text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def exit(self): """Use carefully to cause the Minecraft service to exit (and hopefully restart). Likely to throw communication errors so wrap in exception handler. """ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((self.server2, self.port2)) self._hello(sock) comms.send_message(sock, ("<Exit>" + self._get_token() + "</Exit>").encode()) reply = comms.recv_message(sock) sock.close() ok, = struct.unpack('!I', reply) return ok != 0
[ "def", "exit", "(", "self", ")", ":", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", "sock", ".", "connect", "(", "(", "self", ".", "server2", ",", "self", ".", "port2", ")", ")", "self", ".", "_hello", "(", "sock", ")", "comms", ".", "send_message", "(", "sock", ",", "(", "\"<Exit>\"", "+", "self", ".", "_get_token", "(", ")", "+", "\"</Exit>\"", ")", ".", "encode", "(", ")", ")", "reply", "=", "comms", ".", "recv_message", "(", "sock", ")", "sock", ".", "close", "(", ")", "ok", ",", "=", "struct", ".", "unpack", "(", "'!I'", ",", "reply", ")", "return", "ok", "!=", "0" ]
40.923077
17.615385
def get_tree(ident_hash, baked=False): """Return a tree structure of the Collection""" id, version = get_id_n_version(ident_hash) stmt = _get_sql('get-tree.sql') args = dict(id=id, version=version, baked=baked) with db_connect() as db_conn: with db_conn.cursor() as cursor: cursor.execute(stmt, args) try: tree = cursor.fetchone()[0] except TypeError: raise NotFound(ident_hash) if tree is None: raise NotFound(ident_hash) return tree
[ "def", "get_tree", "(", "ident_hash", ",", "baked", "=", "False", ")", ":", "id", ",", "version", "=", "get_id_n_version", "(", "ident_hash", ")", "stmt", "=", "_get_sql", "(", "'get-tree.sql'", ")", "args", "=", "dict", "(", "id", "=", "id", ",", "version", "=", "version", ",", "baked", "=", "baked", ")", "with", "db_connect", "(", ")", "as", "db_conn", ":", "with", "db_conn", ".", "cursor", "(", ")", "as", "cursor", ":", "cursor", ".", "execute", "(", "stmt", ",", "args", ")", "try", ":", "tree", "=", "cursor", ".", "fetchone", "(", ")", "[", "0", "]", "except", "TypeError", ":", "raise", "NotFound", "(", "ident_hash", ")", "if", "tree", "is", "None", ":", "raise", "NotFound", "(", "ident_hash", ")", "return", "tree" ]
29.555556
13.611111
def get_path(self, path, query=None): """Make a GET request, optionally including a query, to a relative path. The path of the request includes a path on top of the base URL assigned to the endpoint. Parameters ---------- path : str The path to request, relative to the endpoint query : DataQuery, optional The query to pass when making the request Returns ------- resp : requests.Response The server's response to the request See Also -------- get_query, get, url_path """ return self.get(self.url_path(path), query)
[ "def", "get_path", "(", "self", ",", "path", ",", "query", "=", "None", ")", ":", "return", "self", ".", "get", "(", "self", ".", "url_path", "(", "path", ")", ",", "query", ")" ]
27.291667
19.75
def _computeforceArray(self,dr_dx, dtheta_dx, dphi_dx, R, z, phi): """ NAME: _computeforceArray PURPOSE: evaluate the forces in the x direction for a given array of coordinates INPUT: dr_dx - the derivative of r with respect to the chosen variable x dtheta_dx - the derivative of theta with respect to the chosen variable x dphi_dx - the derivative of phi with respect to the chosen variable x R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: The forces in the x direction HISTORY: 2016-06-02 - Written - Aladdin """ R = nu.array(R,dtype=float); z = nu.array(z,dtype=float); phi = nu.array(phi,dtype=float); shape = (R*z*phi).shape if shape == (): dPhi_dr,dPhi_dtheta,dPhi_dphi = \ self._computeforce(R,z,phi) return dr_dx*dPhi_dr + dtheta_dx*dPhi_dtheta +dPhi_dphi*dphi_dx R = R*nu.ones(shape); z = z* nu.ones(shape); phi = phi* nu.ones(shape); force = nu.zeros(shape, float) dr_dx = dr_dx*nu.ones(shape); dtheta_dx = dtheta_dx*nu.ones(shape);dphi_dx = dphi_dx*nu.ones(shape); li = _cartesian(shape) for i in range(li.shape[0]): j = tuple(nu.split(li[i], li.shape[1])) dPhi_dr,dPhi_dtheta,dPhi_dphi = \ self._computeforce(R[j][0],z[j][0],phi[j][0]) force[j] = dr_dx[j][0]*dPhi_dr + dtheta_dx[j][0]*dPhi_dtheta +dPhi_dphi*dphi_dx[j][0] return force
[ "def", "_computeforceArray", "(", "self", ",", "dr_dx", ",", "dtheta_dx", ",", "dphi_dx", ",", "R", ",", "z", ",", "phi", ")", ":", "R", "=", "nu", ".", "array", "(", "R", ",", "dtype", "=", "float", ")", "z", "=", "nu", ".", "array", "(", "z", ",", "dtype", "=", "float", ")", "phi", "=", "nu", ".", "array", "(", "phi", ",", "dtype", "=", "float", ")", "shape", "=", "(", "R", "*", "z", "*", "phi", ")", ".", "shape", "if", "shape", "==", "(", ")", ":", "dPhi_dr", ",", "dPhi_dtheta", ",", "dPhi_dphi", "=", "self", ".", "_computeforce", "(", "R", ",", "z", ",", "phi", ")", "return", "dr_dx", "*", "dPhi_dr", "+", "dtheta_dx", "*", "dPhi_dtheta", "+", "dPhi_dphi", "*", "dphi_dx", "R", "=", "R", "*", "nu", ".", "ones", "(", "shape", ")", "z", "=", "z", "*", "nu", ".", "ones", "(", "shape", ")", "phi", "=", "phi", "*", "nu", ".", "ones", "(", "shape", ")", "force", "=", "nu", ".", "zeros", "(", "shape", ",", "float", ")", "dr_dx", "=", "dr_dx", "*", "nu", ".", "ones", "(", "shape", ")", "dtheta_dx", "=", "dtheta_dx", "*", "nu", ".", "ones", "(", "shape", ")", "dphi_dx", "=", "dphi_dx", "*", "nu", ".", "ones", "(", "shape", ")", "li", "=", "_cartesian", "(", "shape", ")", "for", "i", "in", "range", "(", "li", ".", "shape", "[", "0", "]", ")", ":", "j", "=", "tuple", "(", "nu", ".", "split", "(", "li", "[", "i", "]", ",", "li", ".", "shape", "[", "1", "]", ")", ")", "dPhi_dr", ",", "dPhi_dtheta", ",", "dPhi_dphi", "=", "self", ".", "_computeforce", "(", "R", "[", "j", "]", "[", "0", "]", ",", "z", "[", "j", "]", "[", "0", "]", ",", "phi", "[", "j", "]", "[", "0", "]", ")", "force", "[", "j", "]", "=", "dr_dx", "[", "j", "]", "[", "0", "]", "*", "dPhi_dr", "+", "dtheta_dx", "[", "j", "]", "[", "0", "]", "*", "dPhi_dtheta", "+", "dPhi_dphi", "*", "dphi_dx", "[", "j", "]", "[", "0", "]", "return", "force" ]
41.487179
20.538462
def get_license_assignment_manager(service_instance): ''' Returns the license assignment manager. service_instance The Service Instance Object from which to obrain the license manager. ''' log.debug('Retrieving license assignment manager') try: lic_assignment_manager = \ service_instance.content.licenseManager.licenseAssignmentManager except vim.fault.NoPermission as exc: log.exception(exc) raise salt.exceptions.VMwareApiError( 'Not enough permissions. Required privilege: ' '{0}'.format(exc.privilegeId)) except vim.fault.VimFault as exc: log.exception(exc) raise salt.exceptions.VMwareApiError(exc.msg) except vmodl.RuntimeFault as exc: log.exception(exc) raise salt.exceptions.VMwareRuntimeError(exc.msg) if not lic_assignment_manager: raise salt.exceptions.VMwareObjectRetrievalError( 'License assignment manager was not retrieved') return lic_assignment_manager
[ "def", "get_license_assignment_manager", "(", "service_instance", ")", ":", "log", ".", "debug", "(", "'Retrieving license assignment manager'", ")", "try", ":", "lic_assignment_manager", "=", "service_instance", ".", "content", ".", "licenseManager", ".", "licenseAssignmentManager", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "if", "not", "lic_assignment_manager", ":", "raise", "salt", ".", "exceptions", ".", "VMwareObjectRetrievalError", "(", "'License assignment manager was not retrieved'", ")", "return", "lic_assignment_manager" ]
37.555556
17.185185
def datatable_df(self): """ returns the dataframe representation of the symbol's final data """ data = self._all_datatable_data() adf = pd.DataFrame(data) adf.columns = self.dt_all_cols return self._finish_df(adf, 'ALL')
[ "def", "datatable_df", "(", "self", ")", ":", "data", "=", "self", ".", "_all_datatable_data", "(", ")", "adf", "=", "pd", ".", "DataFrame", "(", "data", ")", "adf", ".", "columns", "=", "self", ".", "dt_all_cols", "return", "self", ".", "_finish_df", "(", "adf", ",", "'ALL'", ")" ]
43.333333
4.666667
def com_google_fonts_check_metadata_subsets_order(family_metadata): """METADATA.pb subsets should be alphabetically ordered.""" expected = list(sorted(family_metadata.subsets)) if list(family_metadata.subsets) != expected: yield FAIL, ("METADATA.pb subsets are not sorted " "in alphabetical order: Got ['{}']" " and expected ['{}']").format("', '".join(family_metadata.subsets), "', '".join(expected)) else: yield PASS, "METADATA.pb subsets are sorted in alphabetical order."
[ "def", "com_google_fonts_check_metadata_subsets_order", "(", "family_metadata", ")", ":", "expected", "=", "list", "(", "sorted", "(", "family_metadata", ".", "subsets", ")", ")", "if", "list", "(", "family_metadata", ".", "subsets", ")", "!=", "expected", ":", "yield", "FAIL", ",", "(", "\"METADATA.pb subsets are not sorted \"", "\"in alphabetical order: Got ['{}']\"", "\" and expected ['{}']\"", ")", ".", "format", "(", "\"', '\"", ".", "join", "(", "family_metadata", ".", "subsets", ")", ",", "\"', '\"", ".", "join", "(", "expected", ")", ")", "else", ":", "yield", "PASS", ",", "\"METADATA.pb subsets are sorted in alphabetical order.\"" ]
51.272727
22.636364
def terrain_data_send(self, lat, lon, grid_spacing, gridbit, data, force_mavlink1=False): ''' Terrain data sent from GCS. The lat/lon and grid_spacing must be the same as a lat/lon from a TERRAIN_REQUEST lat : Latitude of SW corner of first grid (degrees *10^7) (int32_t) lon : Longitude of SW corner of first grid (in degrees *10^7) (int32_t) grid_spacing : Grid spacing in meters (uint16_t) gridbit : bit within the terrain request mask (uint8_t) data : Terrain data in meters AMSL (int16_t) ''' return self.send(self.terrain_data_encode(lat, lon, grid_spacing, gridbit, data), force_mavlink1=force_mavlink1)
[ "def", "terrain_data_send", "(", "self", ",", "lat", ",", "lon", ",", "grid_spacing", ",", "gridbit", ",", "data", ",", "force_mavlink1", "=", "False", ")", ":", "return", "self", ".", "send", "(", "self", ".", "terrain_data_encode", "(", "lat", ",", "lon", ",", "grid_spacing", ",", "gridbit", ",", "data", ")", ",", "force_mavlink1", "=", "force_mavlink1", ")" ]
65.846154
44.615385
def register(self, username, password): """Register a new user. Parameters ---------- username: str The username. password: str The password. Returns ------- bool True if the new user is successfully registered, False otherwise. result An empty dict if the new user is successfully registered, a dict containing the error string with the key ERROR_KEY otherwise. Note ---- Possible error strings are listed as below: - ERROR_USERNAME_ALREADY_EXISTS.format(username) - ERROR_WEAK_PASSWORD """ result = {pytwis_constants.ERROR_KEY: None} # Check the username. if not Pytwis._check_username(username): result[pytwis_constants.ERROR_KEY] = pytwis_constants.ERROR_INVALID_USERNAME return (False, result) # Check the password. if not Pytwis._check_password(password): result[pytwis_constants.ERROR_KEY] = pytwis_constants.ERROR_WEAK_PASSWORD return (False, result) # Update the username-to-userid mapping. with self._rc.pipeline() as pipe: while True: try: # Put a watch on the Hash 'users': username -> user-id, in case that # multiple clients are registering with the same username. pipe.watch(pytwis_constants.USERS_KEY) username_exists = pipe.hexists(pytwis_constants.USERS_KEY, username) if username_exists: result[pytwis_constants.ERROR_KEY] = \ pytwis_constants.ERROR_USERNAME_ALREADY_EXISTS.format(username) return (False, result) # Get the next user-id. If the key "next_user_id" doesn't exist, # it will be created and initialized as 0, and then incremented by 1. userid = pipe.incr(pytwis_constants.NEXT_USER_ID_KEY) # Set the username-to-userid pair in USERS_HASH_KEY. pipe.multi() pipe.hset(pytwis_constants.USERS_KEY, username, userid) pipe.execute() break except WatchError: continue # Generate the authentication secret. auth_secret = secrets.token_hex() userid_profile_key = pytwis_constants.USER_PROFILE_KEY_FORMAT.format(userid) # Generate the password hash. # The format of the password hash looks like "method$salt$hash". password_hash = generate_password_hash(password, method=\ pytwis_constants.PASSWORD_HASH_METHOD) pipe.multi() # Update the authentication_secret-to-userid mapping. pipe.hset(pytwis_constants.AUTHS_KEY, auth_secret, userid) # Create the user profile. pipe.hmset(userid_profile_key, {pytwis_constants.USERNAME_KEY: username, pytwis_constants.PASSWORD_HASH_KEY: password_hash, pytwis_constants.AUTH_KEY: auth_secret}) pipe.execute() return (True, result)
[ "def", "register", "(", "self", ",", "username", ",", "password", ")", ":", "result", "=", "{", "pytwis_constants", ".", "ERROR_KEY", ":", "None", "}", "# Check the username.", "if", "not", "Pytwis", ".", "_check_username", "(", "username", ")", ":", "result", "[", "pytwis_constants", ".", "ERROR_KEY", "]", "=", "pytwis_constants", ".", "ERROR_INVALID_USERNAME", "return", "(", "False", ",", "result", ")", "# Check the password.", "if", "not", "Pytwis", ".", "_check_password", "(", "password", ")", ":", "result", "[", "pytwis_constants", ".", "ERROR_KEY", "]", "=", "pytwis_constants", ".", "ERROR_WEAK_PASSWORD", "return", "(", "False", ",", "result", ")", "# Update the username-to-userid mapping.", "with", "self", ".", "_rc", ".", "pipeline", "(", ")", "as", "pipe", ":", "while", "True", ":", "try", ":", "# Put a watch on the Hash 'users': username -> user-id, in case that", "# multiple clients are registering with the same username.", "pipe", ".", "watch", "(", "pytwis_constants", ".", "USERS_KEY", ")", "username_exists", "=", "pipe", ".", "hexists", "(", "pytwis_constants", ".", "USERS_KEY", ",", "username", ")", "if", "username_exists", ":", "result", "[", "pytwis_constants", ".", "ERROR_KEY", "]", "=", "pytwis_constants", ".", "ERROR_USERNAME_ALREADY_EXISTS", ".", "format", "(", "username", ")", "return", "(", "False", ",", "result", ")", "# Get the next user-id. If the key \"next_user_id\" doesn't exist,", "# it will be created and initialized as 0, and then incremented by 1.", "userid", "=", "pipe", ".", "incr", "(", "pytwis_constants", ".", "NEXT_USER_ID_KEY", ")", "# Set the username-to-userid pair in USERS_HASH_KEY.", "pipe", ".", "multi", "(", ")", "pipe", ".", "hset", "(", "pytwis_constants", ".", "USERS_KEY", ",", "username", ",", "userid", ")", "pipe", ".", "execute", "(", ")", "break", "except", "WatchError", ":", "continue", "# Generate the authentication secret.", "auth_secret", "=", "secrets", ".", "token_hex", "(", ")", "userid_profile_key", "=", "pytwis_constants", ".", "USER_PROFILE_KEY_FORMAT", ".", "format", "(", "userid", ")", "# Generate the password hash.", "# The format of the password hash looks like \"method$salt$hash\".", "password_hash", "=", "generate_password_hash", "(", "password", ",", "method", "=", "pytwis_constants", ".", "PASSWORD_HASH_METHOD", ")", "pipe", ".", "multi", "(", ")", "# Update the authentication_secret-to-userid mapping.", "pipe", ".", "hset", "(", "pytwis_constants", ".", "AUTHS_KEY", ",", "auth_secret", ",", "userid", ")", "# Create the user profile.", "pipe", ".", "hmset", "(", "userid_profile_key", ",", "{", "pytwis_constants", ".", "USERNAME_KEY", ":", "username", ",", "pytwis_constants", ".", "PASSWORD_HASH_KEY", ":", "password_hash", ",", "pytwis_constants", ".", "AUTH_KEY", ":", "auth_secret", "}", ")", "pipe", ".", "execute", "(", ")", "return", "(", "True", ",", "result", ")" ]
39.642857
24.02381
def InjectionStatistics(campaign=0, clobber=False, model='nPLD', plot=True, show=True, **kwargs): ''' Computes and plots the statistics for injection/recovery tests. :param int campaign: The campaign number. Default 0 :param str model: The :py:obj:`everest` model name :param bool plot: Default :py:obj:`True` :param bool show: Show the plot? Default :py:obj:`True`. \ If :py:obj:`False`, returns the `fig, ax` instances. :param bool clobber: Overwrite existing files? Default :py:obj:`False` ''' # Compute the statistics stars = GetK2Campaign(campaign, epics_only=True) if type(campaign) is int: outfile = os.path.join(EVEREST_SRC, 'missions', 'k2', 'tables', 'c%02d_%s.inj' % (campaign, model)) else: outfile = os.path.join(EVEREST_SRC, 'missions', 'k2', 'tables', 'c%04.1f_%s.inj' % (campaign, model)) if clobber or not os.path.exists(outfile): with open(outfile, 'w') as f: print("EPIC Depth UControl URecovered"+ " MControl MRecovered", file=f) print("--------- ---------- ---------- ----------"+ " ---------- ----------", file=f) for i, _ in enumerate(stars): sys.stdout.write('\rProcessing target %d/%d...' % (i + 1, len(stars))) sys.stdout.flush() path = os.path.join(EVEREST_DAT, 'k2', 'c%02d' % int(campaign), ('%09d' % stars[i])[:4] + '00000', ('%09d' % stars[i])[4:]) # Loop over all depths for depth in [0.01, 0.001, 0.0001]: try: # Unmasked data = np.load(os.path.join( path, '%s_Inject_U%g.npz' % (model, depth))) assert depth == data['inject'][()]['depth'], "" ucontrol = data['inject'][()]['rec_depth_control'] urecovered = data['inject'][()]['rec_depth'] # Masked data = np.load(os.path.join( path, '%s_Inject_M%g.npz' % (model, depth))) assert depth == data['inject'][()]['depth'], "" mcontrol = data['inject'][()]['rec_depth_control'] mrecovered = data['inject'][()]['rec_depth'] # Log it print("{:>09d} {:>13.8f} {:>13.8f} {:>13.8f} {:>13.8f} {:>13.8f}".format( stars[i], depth, ucontrol, urecovered, mcontrol, mrecovered), file=f) except: pass print("") if plot: # Load the statistics try: epic, depth, ucontrol, urecovered, mcontrol, mrecovered = \ np.loadtxt(outfile, unpack=True, skiprows=2) except ValueError: raise Exception("No targets to plot.") # Normalize to the injected depth ucontrol /= depth urecovered /= depth mcontrol /= depth mrecovered /= depth # Set up the plot fig, ax = pl.subplots(3, 2, figsize=(9, 12)) fig.subplots_adjust(hspace=0.29) ax[0, 0].set_title(r'Unmasked', fontsize=18) ax[0, 1].set_title(r'Masked', fontsize=18) ax[0, 0].set_ylabel( r'$D_0 = 10^{-2}$', rotation=90, fontsize=18, labelpad=10) ax[1, 0].set_ylabel( r'$D_0 = 10^{-3}$', rotation=90, fontsize=18, labelpad=10) ax[2, 0].set_ylabel( r'$D_0 = 10^{-4}$', rotation=90, fontsize=18, labelpad=10) # Define some useful stuff for plotting depths = [1e-2, 1e-3, 1e-4] ranges = [(0.75, 1.25), (0.5, 1.5), (0., 2.)] nbins = [30, 30, 20] ymax = [0.4, 0.25, 0.16] xticks = [[0.75, 0.875, 1., 1.125, 1.25], [ 0.5, 0.75, 1., 1.25, 1.5], [0., 0.5, 1., 1.5, 2.0]] # Plot for i in range(3): # Indices for this plot idx = np.where(depth == depths[i]) for j, control, recovered in zip([0, 1], [ucontrol[idx], mcontrol[idx]], [urecovered[idx], mrecovered[idx]]): # Control ax[i, j].hist(control, bins=nbins[i], range=ranges[i], color='r', histtype='step', weights=np.ones_like(control) / len(control)) # Recovered ax[i, j].hist(recovered, bins=nbins[i], range=ranges[i], color='b', histtype='step', weights=np.ones_like(recovered) / len(recovered)) # Indicate center ax[i, j].axvline(1., color='k', ls='--') # Indicate the fraction above and below if len(recovered): au = len(np.where(recovered > ranges[i][1])[ 0]) / len(recovered) al = len(np.where(recovered < ranges[i][0])[ 0]) / len(recovered) ax[i, j].annotate('%.2f' % al, xy=(0.01, 0.93), xycoords='axes fraction', xytext=(0.1, 0.93), ha='left', va='center', color='b', arrowprops=dict(arrowstyle="->", color='b')) ax[i, j].annotate('%.2f' % au, xy=(0.99, 0.93), xycoords='axes fraction', xytext=(0.9, 0.93), ha='right', va='center', color='b', arrowprops=dict(arrowstyle="->", color='b')) if len(control): cu = len(np.where(control > ranges[i][1])[ 0]) / len(control) cl = len(np.where(control < ranges[i][0])[ 0]) / len(control) ax[i, j].annotate('%.2f' % cl, xy=(0.01, 0.86), xycoords='axes fraction', xytext=(0.1, 0.86), ha='left', va='center', color='r', arrowprops=dict(arrowstyle="->", color='r')) ax[i, j].annotate('%.2f' % cu, xy=(0.99, 0.86), xycoords='axes fraction', xytext=(0.9, 0.86), ha='right', va='center', color='r', arrowprops=dict(arrowstyle="->", color='r')) # Indicate the median if len(recovered): ax[i, j].annotate('M = %.2f' % np.median(recovered), xy=(0.35, 0.5), ha='right', xycoords='axes fraction', color='b', fontsize=16) if len(control): ax[i, j].annotate('M = %.2f' % np.median(control), xy=(0.65, 0.5), ha='left', xycoords='axes fraction', color='r', fontsize=16) # Tweaks ax[i, j].set_xticks(xticks[i]) ax[i, j].set_xlim(xticks[i][0], xticks[i][-1]) ax[i, j].set_ylim(-0.005, ymax[i]) ax[i, j].set_xlabel(r'$D/D_0$', fontsize=16) ax[i, j].get_yaxis().set_major_locator(MaxNLocator(5)) for tick in ax[i, j].get_xticklabels() + \ ax[i, j].get_yticklabels(): tick.set_fontsize(14) if show: pl.show() else: return fig, ax
[ "def", "InjectionStatistics", "(", "campaign", "=", "0", ",", "clobber", "=", "False", ",", "model", "=", "'nPLD'", ",", "plot", "=", "True", ",", "show", "=", "True", ",", "*", "*", "kwargs", ")", ":", "# Compute the statistics", "stars", "=", "GetK2Campaign", "(", "campaign", ",", "epics_only", "=", "True", ")", "if", "type", "(", "campaign", ")", "is", "int", ":", "outfile", "=", "os", ".", "path", ".", "join", "(", "EVEREST_SRC", ",", "'missions'", ",", "'k2'", ",", "'tables'", ",", "'c%02d_%s.inj'", "%", "(", "campaign", ",", "model", ")", ")", "else", ":", "outfile", "=", "os", ".", "path", ".", "join", "(", "EVEREST_SRC", ",", "'missions'", ",", "'k2'", ",", "'tables'", ",", "'c%04.1f_%s.inj'", "%", "(", "campaign", ",", "model", ")", ")", "if", "clobber", "or", "not", "os", ".", "path", ".", "exists", "(", "outfile", ")", ":", "with", "open", "(", "outfile", ",", "'w'", ")", "as", "f", ":", "print", "(", "\"EPIC Depth UControl URecovered\"", "+", "\" MControl MRecovered\"", ",", "file", "=", "f", ")", "print", "(", "\"--------- ---------- ---------- ----------\"", "+", "\" ---------- ----------\"", ",", "file", "=", "f", ")", "for", "i", ",", "_", "in", "enumerate", "(", "stars", ")", ":", "sys", ".", "stdout", ".", "write", "(", "'\\rProcessing target %d/%d...'", "%", "(", "i", "+", "1", ",", "len", "(", "stars", ")", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "path", "=", "os", ".", "path", ".", "join", "(", "EVEREST_DAT", ",", "'k2'", ",", "'c%02d'", "%", "int", "(", "campaign", ")", ",", "(", "'%09d'", "%", "stars", "[", "i", "]", ")", "[", ":", "4", "]", "+", "'00000'", ",", "(", "'%09d'", "%", "stars", "[", "i", "]", ")", "[", "4", ":", "]", ")", "# Loop over all depths", "for", "depth", "in", "[", "0.01", ",", "0.001", ",", "0.0001", "]", ":", "try", ":", "# Unmasked", "data", "=", "np", ".", "load", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'%s_Inject_U%g.npz'", "%", "(", "model", ",", "depth", ")", ")", ")", "assert", "depth", "==", "data", "[", "'inject'", "]", "[", "(", ")", "]", "[", "'depth'", "]", ",", "\"\"", "ucontrol", "=", "data", "[", "'inject'", "]", "[", "(", ")", "]", "[", "'rec_depth_control'", "]", "urecovered", "=", "data", "[", "'inject'", "]", "[", "(", ")", "]", "[", "'rec_depth'", "]", "# Masked", "data", "=", "np", ".", "load", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'%s_Inject_M%g.npz'", "%", "(", "model", ",", "depth", ")", ")", ")", "assert", "depth", "==", "data", "[", "'inject'", "]", "[", "(", ")", "]", "[", "'depth'", "]", ",", "\"\"", "mcontrol", "=", "data", "[", "'inject'", "]", "[", "(", ")", "]", "[", "'rec_depth_control'", "]", "mrecovered", "=", "data", "[", "'inject'", "]", "[", "(", ")", "]", "[", "'rec_depth'", "]", "# Log it", "print", "(", "\"{:>09d} {:>13.8f} {:>13.8f} {:>13.8f} {:>13.8f} {:>13.8f}\"", ".", "format", "(", "stars", "[", "i", "]", ",", "depth", ",", "ucontrol", ",", "urecovered", ",", "mcontrol", ",", "mrecovered", ")", ",", "file", "=", "f", ")", "except", ":", "pass", "print", "(", "\"\"", ")", "if", "plot", ":", "# Load the statistics", "try", ":", "epic", ",", "depth", ",", "ucontrol", ",", "urecovered", ",", "mcontrol", ",", "mrecovered", "=", "np", ".", "loadtxt", "(", "outfile", ",", "unpack", "=", "True", ",", "skiprows", "=", "2", ")", "except", "ValueError", ":", "raise", "Exception", "(", "\"No targets to plot.\"", ")", "# Normalize to the injected depth", "ucontrol", "/=", "depth", "urecovered", "/=", "depth", "mcontrol", "/=", "depth", "mrecovered", "/=", "depth", "# Set up the plot", "fig", ",", "ax", "=", "pl", ".", "subplots", "(", "3", ",", "2", ",", "figsize", "=", "(", "9", ",", "12", ")", ")", "fig", ".", "subplots_adjust", "(", "hspace", "=", "0.29", ")", "ax", "[", "0", ",", "0", "]", ".", "set_title", "(", "r'Unmasked'", ",", "fontsize", "=", "18", ")", "ax", "[", "0", ",", "1", "]", ".", "set_title", "(", "r'Masked'", ",", "fontsize", "=", "18", ")", "ax", "[", "0", ",", "0", "]", ".", "set_ylabel", "(", "r'$D_0 = 10^{-2}$'", ",", "rotation", "=", "90", ",", "fontsize", "=", "18", ",", "labelpad", "=", "10", ")", "ax", "[", "1", ",", "0", "]", ".", "set_ylabel", "(", "r'$D_0 = 10^{-3}$'", ",", "rotation", "=", "90", ",", "fontsize", "=", "18", ",", "labelpad", "=", "10", ")", "ax", "[", "2", ",", "0", "]", ".", "set_ylabel", "(", "r'$D_0 = 10^{-4}$'", ",", "rotation", "=", "90", ",", "fontsize", "=", "18", ",", "labelpad", "=", "10", ")", "# Define some useful stuff for plotting", "depths", "=", "[", "1e-2", ",", "1e-3", ",", "1e-4", "]", "ranges", "=", "[", "(", "0.75", ",", "1.25", ")", ",", "(", "0.5", ",", "1.5", ")", ",", "(", "0.", ",", "2.", ")", "]", "nbins", "=", "[", "30", ",", "30", ",", "20", "]", "ymax", "=", "[", "0.4", ",", "0.25", ",", "0.16", "]", "xticks", "=", "[", "[", "0.75", ",", "0.875", ",", "1.", ",", "1.125", ",", "1.25", "]", ",", "[", "0.5", ",", "0.75", ",", "1.", ",", "1.25", ",", "1.5", "]", ",", "[", "0.", ",", "0.5", ",", "1.", ",", "1.5", ",", "2.0", "]", "]", "# Plot", "for", "i", "in", "range", "(", "3", ")", ":", "# Indices for this plot", "idx", "=", "np", ".", "where", "(", "depth", "==", "depths", "[", "i", "]", ")", "for", "j", ",", "control", ",", "recovered", "in", "zip", "(", "[", "0", ",", "1", "]", ",", "[", "ucontrol", "[", "idx", "]", ",", "mcontrol", "[", "idx", "]", "]", ",", "[", "urecovered", "[", "idx", "]", ",", "mrecovered", "[", "idx", "]", "]", ")", ":", "# Control", "ax", "[", "i", ",", "j", "]", ".", "hist", "(", "control", ",", "bins", "=", "nbins", "[", "i", "]", ",", "range", "=", "ranges", "[", "i", "]", ",", "color", "=", "'r'", ",", "histtype", "=", "'step'", ",", "weights", "=", "np", ".", "ones_like", "(", "control", ")", "/", "len", "(", "control", ")", ")", "# Recovered", "ax", "[", "i", ",", "j", "]", ".", "hist", "(", "recovered", ",", "bins", "=", "nbins", "[", "i", "]", ",", "range", "=", "ranges", "[", "i", "]", ",", "color", "=", "'b'", ",", "histtype", "=", "'step'", ",", "weights", "=", "np", ".", "ones_like", "(", "recovered", ")", "/", "len", "(", "recovered", ")", ")", "# Indicate center", "ax", "[", "i", ",", "j", "]", ".", "axvline", "(", "1.", ",", "color", "=", "'k'", ",", "ls", "=", "'--'", ")", "# Indicate the fraction above and below", "if", "len", "(", "recovered", ")", ":", "au", "=", "len", "(", "np", ".", "where", "(", "recovered", ">", "ranges", "[", "i", "]", "[", "1", "]", ")", "[", "0", "]", ")", "/", "len", "(", "recovered", ")", "al", "=", "len", "(", "np", ".", "where", "(", "recovered", "<", "ranges", "[", "i", "]", "[", "0", "]", ")", "[", "0", "]", ")", "/", "len", "(", "recovered", ")", "ax", "[", "i", ",", "j", "]", ".", "annotate", "(", "'%.2f'", "%", "al", ",", "xy", "=", "(", "0.01", ",", "0.93", ")", ",", "xycoords", "=", "'axes fraction'", ",", "xytext", "=", "(", "0.1", ",", "0.93", ")", ",", "ha", "=", "'left'", ",", "va", "=", "'center'", ",", "color", "=", "'b'", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "\"->\"", ",", "color", "=", "'b'", ")", ")", "ax", "[", "i", ",", "j", "]", ".", "annotate", "(", "'%.2f'", "%", "au", ",", "xy", "=", "(", "0.99", ",", "0.93", ")", ",", "xycoords", "=", "'axes fraction'", ",", "xytext", "=", "(", "0.9", ",", "0.93", ")", ",", "ha", "=", "'right'", ",", "va", "=", "'center'", ",", "color", "=", "'b'", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "\"->\"", ",", "color", "=", "'b'", ")", ")", "if", "len", "(", "control", ")", ":", "cu", "=", "len", "(", "np", ".", "where", "(", "control", ">", "ranges", "[", "i", "]", "[", "1", "]", ")", "[", "0", "]", ")", "/", "len", "(", "control", ")", "cl", "=", "len", "(", "np", ".", "where", "(", "control", "<", "ranges", "[", "i", "]", "[", "0", "]", ")", "[", "0", "]", ")", "/", "len", "(", "control", ")", "ax", "[", "i", ",", "j", "]", ".", "annotate", "(", "'%.2f'", "%", "cl", ",", "xy", "=", "(", "0.01", ",", "0.86", ")", ",", "xycoords", "=", "'axes fraction'", ",", "xytext", "=", "(", "0.1", ",", "0.86", ")", ",", "ha", "=", "'left'", ",", "va", "=", "'center'", ",", "color", "=", "'r'", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "\"->\"", ",", "color", "=", "'r'", ")", ")", "ax", "[", "i", ",", "j", "]", ".", "annotate", "(", "'%.2f'", "%", "cu", ",", "xy", "=", "(", "0.99", ",", "0.86", ")", ",", "xycoords", "=", "'axes fraction'", ",", "xytext", "=", "(", "0.9", ",", "0.86", ")", ",", "ha", "=", "'right'", ",", "va", "=", "'center'", ",", "color", "=", "'r'", ",", "arrowprops", "=", "dict", "(", "arrowstyle", "=", "\"->\"", ",", "color", "=", "'r'", ")", ")", "# Indicate the median", "if", "len", "(", "recovered", ")", ":", "ax", "[", "i", ",", "j", "]", ".", "annotate", "(", "'M = %.2f'", "%", "np", ".", "median", "(", "recovered", ")", ",", "xy", "=", "(", "0.35", ",", "0.5", ")", ",", "ha", "=", "'right'", ",", "xycoords", "=", "'axes fraction'", ",", "color", "=", "'b'", ",", "fontsize", "=", "16", ")", "if", "len", "(", "control", ")", ":", "ax", "[", "i", ",", "j", "]", ".", "annotate", "(", "'M = %.2f'", "%", "np", ".", "median", "(", "control", ")", ",", "xy", "=", "(", "0.65", ",", "0.5", ")", ",", "ha", "=", "'left'", ",", "xycoords", "=", "'axes fraction'", ",", "color", "=", "'r'", ",", "fontsize", "=", "16", ")", "# Tweaks", "ax", "[", "i", ",", "j", "]", ".", "set_xticks", "(", "xticks", "[", "i", "]", ")", "ax", "[", "i", ",", "j", "]", ".", "set_xlim", "(", "xticks", "[", "i", "]", "[", "0", "]", ",", "xticks", "[", "i", "]", "[", "-", "1", "]", ")", "ax", "[", "i", ",", "j", "]", ".", "set_ylim", "(", "-", "0.005", ",", "ymax", "[", "i", "]", ")", "ax", "[", "i", ",", "j", "]", ".", "set_xlabel", "(", "r'$D/D_0$'", ",", "fontsize", "=", "16", ")", "ax", "[", "i", ",", "j", "]", ".", "get_yaxis", "(", ")", ".", "set_major_locator", "(", "MaxNLocator", "(", "5", ")", ")", "for", "tick", "in", "ax", "[", "i", ",", "j", "]", ".", "get_xticklabels", "(", ")", "+", "ax", "[", "i", ",", "j", "]", ".", "get_yticklabels", "(", ")", ":", "tick", ".", "set_fontsize", "(", "14", ")", "if", "show", ":", "pl", ".", "show", "(", ")", "else", ":", "return", "fig", ",", "ax" ]
44.347594
22.283422
def _init_content_type_params(self): """ Return the Content-Type request header parameters Convert all of the semi-colon separated parameters into a dict of key/vals. If for some stupid reason duplicate & conflicting params are present then the last one wins. If a particular content-type param is non-compliant by not being a simple key=val pair then it is skipped. If no content-type header or params are present then return an empty dict. :return: dict """ ret = {} if self.content_type: params = self.content_type.split(';')[1:] for param in params: try: key, val = param.split('=') ret[naked(key)] = naked(val) except ValueError: continue return ret
[ "def", "_init_content_type_params", "(", "self", ")", ":", "ret", "=", "{", "}", "if", "self", ".", "content_type", ":", "params", "=", "self", ".", "content_type", ".", "split", "(", "';'", ")", "[", "1", ":", "]", "for", "param", "in", "params", ":", "try", ":", "key", ",", "val", "=", "param", ".", "split", "(", "'='", ")", "ret", "[", "naked", "(", "key", ")", "]", "=", "naked", "(", "val", ")", "except", "ValueError", ":", "continue", "return", "ret" ]
29.689655
20.586207
def attributes(attrs): """Returns an attribute list, constructed from the dictionary attrs. """ attrs = attrs or {} ident = attrs.get("id", "") classes = attrs.get("classes", []) keyvals = [[x, attrs[x]] for x in attrs if (x != "classes" and x != "id")] return [ident, classes, keyvals]
[ "def", "attributes", "(", "attrs", ")", ":", "attrs", "=", "attrs", "or", "{", "}", "ident", "=", "attrs", ".", "get", "(", "\"id\"", ",", "\"\"", ")", "classes", "=", "attrs", ".", "get", "(", "\"classes\"", ",", "[", "]", ")", "keyvals", "=", "[", "[", "x", ",", "attrs", "[", "x", "]", "]", "for", "x", "in", "attrs", "if", "(", "x", "!=", "\"classes\"", "and", "x", "!=", "\"id\"", ")", "]", "return", "[", "ident", ",", "classes", ",", "keyvals", "]" ]
34.444444
11.888889
def get(self, id_or_url, default=None): """Fetch and return the spreadsheet with the given id or url. Args: id_or_url (str): unique alphanumeric id or URL of the spreadsheet Returns: New SpreadSheet instance or given default if none is found Raises: ValueError: if an URL is given from which no id could be extracted """ if '/' in id_or_url: id = urls.SheetUrl.from_string(id_or_url).id else: id = id_or_url try: return self[id] except KeyError: return default
[ "def", "get", "(", "self", ",", "id_or_url", ",", "default", "=", "None", ")", ":", "if", "'/'", "in", "id_or_url", ":", "id", "=", "urls", ".", "SheetUrl", ".", "from_string", "(", "id_or_url", ")", ".", "id", "else", ":", "id", "=", "id_or_url", "try", ":", "return", "self", "[", "id", "]", "except", "KeyError", ":", "return", "default" ]
33.333333
20.111111
def changed(self, node=None, allowcache=False): """ Returns if the node is up-to-date with respect to the BuildInfo stored last time it was built. For File nodes this is basically a wrapper around Node.changed(), but we allow the return value to get cached after the reference to the Executor got released in release_target_info(). @see: Node.changed() """ if node is None: try: return self._memo['changed'] except KeyError: pass has_changed = SCons.Node.Node.changed(self, node) if allowcache: self._memo['changed'] = has_changed return has_changed
[ "def", "changed", "(", "self", ",", "node", "=", "None", ",", "allowcache", "=", "False", ")", ":", "if", "node", "is", "None", ":", "try", ":", "return", "self", ".", "_memo", "[", "'changed'", "]", "except", "KeyError", ":", "pass", "has_changed", "=", "SCons", ".", "Node", ".", "Node", ".", "changed", "(", "self", ",", "node", ")", "if", "allowcache", ":", "self", ".", "_memo", "[", "'changed'", "]", "=", "has_changed", "return", "has_changed" ]
33.142857
18.571429
def get_cgi_parameter_str_or_none(form: cgi.FieldStorage, key: str) -> Optional[str]: """ Extracts a string parameter from a CGI form, or ``None`` if the key doesn't exist or the string is zero-length. """ s = get_cgi_parameter_str(form, key) if s is None or len(s) == 0: return None return s
[ "def", "get_cgi_parameter_str_or_none", "(", "form", ":", "cgi", ".", "FieldStorage", ",", "key", ":", "str", ")", "->", "Optional", "[", "str", "]", ":", "s", "=", "get_cgi_parameter_str", "(", "form", ",", "key", ")", "if", "s", "is", "None", "or", "len", "(", "s", ")", "==", "0", ":", "return", "None", "return", "s" ]
35.3
13.5
def valid_identifiers(self): """Get a list of all valid identifiers for the current context. Returns: list(str): A list of all of the valid identifiers for this context """ funcs = list(utils.find_all(self.contexts[-1])) + list(self.builtins) return funcs
[ "def", "valid_identifiers", "(", "self", ")", ":", "funcs", "=", "list", "(", "utils", ".", "find_all", "(", "self", ".", "contexts", "[", "-", "1", "]", ")", ")", "+", "list", "(", "self", ".", "builtins", ")", "return", "funcs" ]
33.444444
23.444444
async def _bind_key_to_queue(self, routing_key: AnyStr, queue_name: AnyStr) -> None: """ Bind to queue with specified routing key. :param routing_key: Routing key to bind with. :param queue_name: Name of the queue :return: Does not return anything """ logger.info("Binding key='%s'", routing_key) result = await self._channel.queue_bind( exchange_name=self._exchange_name, queue_name=queue_name, routing_key=routing_key, ) return result
[ "async", "def", "_bind_key_to_queue", "(", "self", ",", "routing_key", ":", "AnyStr", ",", "queue_name", ":", "AnyStr", ")", "->", "None", ":", "logger", ".", "info", "(", "\"Binding key='%s'\"", ",", "routing_key", ")", "result", "=", "await", "self", ".", "_channel", ".", "queue_bind", "(", "exchange_name", "=", "self", ".", "_exchange_name", ",", "queue_name", "=", "queue_name", ",", "routing_key", "=", "routing_key", ",", ")", "return", "result" ]
33.6875
14.8125
def _CheckStatusAnalysisProcess(self, pid): """Checks the status of an analysis process. Args: pid (int): process ID (PID) of a registered analysis process. Raises: KeyError: if the process is not registered with the engine. """ # TODO: Refactor this method, simplify and separate concerns (monitoring # vs management). self._RaiseIfNotRegistered(pid) if pid in self._completed_analysis_processes: status_indicator = definitions.STATUS_INDICATOR_COMPLETED process_status = { 'processing_status': status_indicator} used_memory = 0 else: process = self._processes_per_pid[pid] process_status = self._QueryProcessStatus(process) if process_status is None: process_is_alive = False else: process_is_alive = True process_information = self._process_information_per_pid[pid] used_memory = process_information.GetUsedMemory() or 0 if self._worker_memory_limit and used_memory > self._worker_memory_limit: logger.warning(( 'Process: {0:s} (PID: {1:d}) killed because it exceeded the ' 'memory limit: {2:d}.').format( process.name, pid, self._worker_memory_limit)) self._KillProcess(pid) if isinstance(process_status, dict): self._rpc_errors_per_pid[pid] = 0 status_indicator = process_status.get('processing_status', None) if status_indicator == definitions.STATUS_INDICATOR_COMPLETED: self._completed_analysis_processes.add(pid) else: rpc_errors = self._rpc_errors_per_pid.get(pid, 0) + 1 self._rpc_errors_per_pid[pid] = rpc_errors if rpc_errors > self._MAXIMUM_RPC_ERRORS: process_is_alive = False if process_is_alive: rpc_port = process.rpc_port.value logger.warning(( 'Unable to retrieve process: {0:s} (PID: {1:d}) status via ' 'RPC socket: http://localhost:{2:d}').format( process.name, pid, rpc_port)) processing_status_string = 'RPC error' status_indicator = definitions.STATUS_INDICATOR_RUNNING else: processing_status_string = 'killed' status_indicator = definitions.STATUS_INDICATOR_KILLED process_status = { 'processing_status': processing_status_string} self._UpdateProcessingStatus(pid, process_status, used_memory) if status_indicator in definitions.ERROR_STATUS_INDICATORS: logger.error(( 'Process {0:s} (PID: {1:d}) is not functioning correctly. ' 'Status code: {2!s}.').format( process.name, pid, status_indicator)) self._TerminateProcessByPid(pid)
[ "def", "_CheckStatusAnalysisProcess", "(", "self", ",", "pid", ")", ":", "# TODO: Refactor this method, simplify and separate concerns (monitoring", "# vs management).", "self", ".", "_RaiseIfNotRegistered", "(", "pid", ")", "if", "pid", "in", "self", ".", "_completed_analysis_processes", ":", "status_indicator", "=", "definitions", ".", "STATUS_INDICATOR_COMPLETED", "process_status", "=", "{", "'processing_status'", ":", "status_indicator", "}", "used_memory", "=", "0", "else", ":", "process", "=", "self", ".", "_processes_per_pid", "[", "pid", "]", "process_status", "=", "self", ".", "_QueryProcessStatus", "(", "process", ")", "if", "process_status", "is", "None", ":", "process_is_alive", "=", "False", "else", ":", "process_is_alive", "=", "True", "process_information", "=", "self", ".", "_process_information_per_pid", "[", "pid", "]", "used_memory", "=", "process_information", ".", "GetUsedMemory", "(", ")", "or", "0", "if", "self", ".", "_worker_memory_limit", "and", "used_memory", ">", "self", ".", "_worker_memory_limit", ":", "logger", ".", "warning", "(", "(", "'Process: {0:s} (PID: {1:d}) killed because it exceeded the '", "'memory limit: {2:d}.'", ")", ".", "format", "(", "process", ".", "name", ",", "pid", ",", "self", ".", "_worker_memory_limit", ")", ")", "self", ".", "_KillProcess", "(", "pid", ")", "if", "isinstance", "(", "process_status", ",", "dict", ")", ":", "self", ".", "_rpc_errors_per_pid", "[", "pid", "]", "=", "0", "status_indicator", "=", "process_status", ".", "get", "(", "'processing_status'", ",", "None", ")", "if", "status_indicator", "==", "definitions", ".", "STATUS_INDICATOR_COMPLETED", ":", "self", ".", "_completed_analysis_processes", ".", "add", "(", "pid", ")", "else", ":", "rpc_errors", "=", "self", ".", "_rpc_errors_per_pid", ".", "get", "(", "pid", ",", "0", ")", "+", "1", "self", ".", "_rpc_errors_per_pid", "[", "pid", "]", "=", "rpc_errors", "if", "rpc_errors", ">", "self", ".", "_MAXIMUM_RPC_ERRORS", ":", "process_is_alive", "=", "False", "if", "process_is_alive", ":", "rpc_port", "=", "process", ".", "rpc_port", ".", "value", "logger", ".", "warning", "(", "(", "'Unable to retrieve process: {0:s} (PID: {1:d}) status via '", "'RPC socket: http://localhost:{2:d}'", ")", ".", "format", "(", "process", ".", "name", ",", "pid", ",", "rpc_port", ")", ")", "processing_status_string", "=", "'RPC error'", "status_indicator", "=", "definitions", ".", "STATUS_INDICATOR_RUNNING", "else", ":", "processing_status_string", "=", "'killed'", "status_indicator", "=", "definitions", ".", "STATUS_INDICATOR_KILLED", "process_status", "=", "{", "'processing_status'", ":", "processing_status_string", "}", "self", ".", "_UpdateProcessingStatus", "(", "pid", ",", "process_status", ",", "used_memory", ")", "if", "status_indicator", "in", "definitions", ".", "ERROR_STATUS_INDICATORS", ":", "logger", ".", "error", "(", "(", "'Process {0:s} (PID: {1:d}) is not functioning correctly. '", "'Status code: {2!s}.'", ")", ".", "format", "(", "process", ".", "name", ",", "pid", ",", "status_indicator", ")", ")", "self", ".", "_TerminateProcessByPid", "(", "pid", ")" ]
34.61039
21.792208
def extract_input(pipe_def=None, pipe_generator=None): """Extract inputs required by a pipe""" if pipe_def: pyinput = gen_input(pipe_def) elif pipe_generator: pyinput = pipe_generator(Context(describe_input=True)) else: raise Exception('Must supply at least one kwarg!') return sorted(list(pyinput))
[ "def", "extract_input", "(", "pipe_def", "=", "None", ",", "pipe_generator", "=", "None", ")", ":", "if", "pipe_def", ":", "pyinput", "=", "gen_input", "(", "pipe_def", ")", "elif", "pipe_generator", ":", "pyinput", "=", "pipe_generator", "(", "Context", "(", "describe_input", "=", "True", ")", ")", "else", ":", "raise", "Exception", "(", "'Must supply at least one kwarg!'", ")", "return", "sorted", "(", "list", "(", "pyinput", ")", ")" ]
33.5
17.6
def _set_seed(self): """ Set random seed for numpy and tensorflow packages """ if self.flags['SEED'] is not None: tf.set_random_seed(self.flags['SEED']) np.random.seed(self.flags['SEED'])
[ "def", "_set_seed", "(", "self", ")", ":", "if", "self", ".", "flags", "[", "'SEED'", "]", "is", "not", "None", ":", "tf", ".", "set_random_seed", "(", "self", ".", "flags", "[", "'SEED'", "]", ")", "np", ".", "random", ".", "seed", "(", "self", ".", "flags", "[", "'SEED'", "]", ")" ]
44.6
7.6
def _xread(self, streams, timeout=0, count=None, latest_ids=None): """Wraps up common functionality between ``xread()`` and ``xread_group()`` You should probably be using ``xread()`` or ``xread_group()`` directly. """ if latest_ids is None: latest_ids = ['$'] * len(streams) if len(streams) != len(latest_ids): raise ValueError( 'The streams and latest_ids parameters must be of the ' 'same length' ) count_args = [b'COUNT', count] if count else [] if timeout is None: block_args = [] elif not isinstance(timeout, int): raise TypeError( "timeout argument must be int, not {!r}".format(timeout)) else: block_args = [b'BLOCK', timeout] return block_args + count_args + [b'STREAMS'] + streams + latest_ids
[ "def", "_xread", "(", "self", ",", "streams", ",", "timeout", "=", "0", ",", "count", "=", "None", ",", "latest_ids", "=", "None", ")", ":", "if", "latest_ids", "is", "None", ":", "latest_ids", "=", "[", "'$'", "]", "*", "len", "(", "streams", ")", "if", "len", "(", "streams", ")", "!=", "len", "(", "latest_ids", ")", ":", "raise", "ValueError", "(", "'The streams and latest_ids parameters must be of the '", "'same length'", ")", "count_args", "=", "[", "b'COUNT'", ",", "count", "]", "if", "count", "else", "[", "]", "if", "timeout", "is", "None", ":", "block_args", "=", "[", "]", "elif", "not", "isinstance", "(", "timeout", ",", "int", ")", ":", "raise", "TypeError", "(", "\"timeout argument must be int, not {!r}\"", ".", "format", "(", "timeout", ")", ")", "else", ":", "block_args", "=", "[", "b'BLOCK'", ",", "timeout", "]", "return", "block_args", "+", "count_args", "+", "[", "b'STREAMS'", "]", "+", "streams", "+", "latest_ids" ]
38.695652
17.782609
def fit(self, X, y, random_state=np.random): """Create constraints from labels and learn the LSML model. Parameters ---------- X : (n x d) matrix Input data, where each row corresponds to a single instance. y : (n) array-like Data labels. random_state : numpy.random.RandomState, optional If provided, controls random number generation. """ if self.num_labeled != 'deprecated': warnings.warn('"num_labeled" parameter is not used.' ' It has been deprecated in version 0.5.0 and will be' 'removed in 0.6.0', DeprecationWarning) X, y = self._prepare_inputs(X, y, ensure_min_samples=2) num_constraints = self.num_constraints if num_constraints is None: num_classes = len(np.unique(y)) num_constraints = 20 * num_classes**2 c = Constraints(y) pos_neg = c.positive_negative_pairs(num_constraints, same_length=True, random_state=random_state) return _BaseLSML._fit(self, X[np.column_stack(pos_neg)], weights=self.weights)
[ "def", "fit", "(", "self", ",", "X", ",", "y", ",", "random_state", "=", "np", ".", "random", ")", ":", "if", "self", ".", "num_labeled", "!=", "'deprecated'", ":", "warnings", ".", "warn", "(", "'\"num_labeled\" parameter is not used.'", "' It has been deprecated in version 0.5.0 and will be'", "'removed in 0.6.0'", ",", "DeprecationWarning", ")", "X", ",", "y", "=", "self", ".", "_prepare_inputs", "(", "X", ",", "y", ",", "ensure_min_samples", "=", "2", ")", "num_constraints", "=", "self", ".", "num_constraints", "if", "num_constraints", "is", "None", ":", "num_classes", "=", "len", "(", "np", ".", "unique", "(", "y", ")", ")", "num_constraints", "=", "20", "*", "num_classes", "**", "2", "c", "=", "Constraints", "(", "y", ")", "pos_neg", "=", "c", ".", "positive_negative_pairs", "(", "num_constraints", ",", "same_length", "=", "True", ",", "random_state", "=", "random_state", ")", "return", "_BaseLSML", ".", "_fit", "(", "self", ",", "X", "[", "np", ".", "column_stack", "(", "pos_neg", ")", "]", ",", "weights", "=", "self", ".", "weights", ")" ]
37.724138
18.62069
def retrieve_loadbalancer_stats(self, loadbalancer, **_params): """Retrieves stats for a certain load balancer.""" return self.get(self.lbaas_loadbalancer_path_stats % (loadbalancer), params=_params)
[ "def", "retrieve_loadbalancer_stats", "(", "self", ",", "loadbalancer", ",", "*", "*", "_params", ")", ":", "return", "self", ".", "get", "(", "self", ".", "lbaas_loadbalancer_path_stats", "%", "(", "loadbalancer", ")", ",", "params", "=", "_params", ")" ]
59
15
def infer_issubclass(callnode, context=None): """Infer issubclass() calls :param nodes.Call callnode: an `issubclass` call :param InferenceContext: the context for the inference :rtype nodes.Const: Boolean Const value of the `issubclass` call :raises UseInferenceDefault: If the node cannot be inferred """ call = arguments.CallSite.from_call(callnode) if call.keyword_arguments: # issubclass doesn't support keyword arguments raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments") if len(call.positional_arguments) != 2: raise UseInferenceDefault( "Expected two arguments, got {count}".format( count=len(call.positional_arguments) ) ) # The left hand argument is the obj to be checked obj_node, class_or_tuple_node = call.positional_arguments try: obj_type = next(obj_node.infer(context=context)) except InferenceError as exc: raise UseInferenceDefault from exc if not isinstance(obj_type, nodes.ClassDef): raise UseInferenceDefault("TypeError: arg 1 must be class") # The right hand argument is the class(es) that the given # object is to be checked against. try: class_container = _class_or_tuple_to_container( class_or_tuple_node, context=context ) except InferenceError as exc: raise UseInferenceDefault from exc try: issubclass_bool = helpers.object_issubclass(obj_type, class_container, context) except AstroidTypeError as exc: raise UseInferenceDefault("TypeError: " + str(exc)) from exc except MroError as exc: raise UseInferenceDefault from exc return nodes.Const(issubclass_bool)
[ "def", "infer_issubclass", "(", "callnode", ",", "context", "=", "None", ")", ":", "call", "=", "arguments", ".", "CallSite", ".", "from_call", "(", "callnode", ")", "if", "call", ".", "keyword_arguments", ":", "# issubclass doesn't support keyword arguments", "raise", "UseInferenceDefault", "(", "\"TypeError: issubclass() takes no keyword arguments\"", ")", "if", "len", "(", "call", ".", "positional_arguments", ")", "!=", "2", ":", "raise", "UseInferenceDefault", "(", "\"Expected two arguments, got {count}\"", ".", "format", "(", "count", "=", "len", "(", "call", ".", "positional_arguments", ")", ")", ")", "# The left hand argument is the obj to be checked", "obj_node", ",", "class_or_tuple_node", "=", "call", ".", "positional_arguments", "try", ":", "obj_type", "=", "next", "(", "obj_node", ".", "infer", "(", "context", "=", "context", ")", ")", "except", "InferenceError", "as", "exc", ":", "raise", "UseInferenceDefault", "from", "exc", "if", "not", "isinstance", "(", "obj_type", ",", "nodes", ".", "ClassDef", ")", ":", "raise", "UseInferenceDefault", "(", "\"TypeError: arg 1 must be class\"", ")", "# The right hand argument is the class(es) that the given", "# object is to be checked against.", "try", ":", "class_container", "=", "_class_or_tuple_to_container", "(", "class_or_tuple_node", ",", "context", "=", "context", ")", "except", "InferenceError", "as", "exc", ":", "raise", "UseInferenceDefault", "from", "exc", "try", ":", "issubclass_bool", "=", "helpers", ".", "object_issubclass", "(", "obj_type", ",", "class_container", ",", "context", ")", "except", "AstroidTypeError", "as", "exc", ":", "raise", "UseInferenceDefault", "(", "\"TypeError: \"", "+", "str", "(", "exc", ")", ")", "from", "exc", "except", "MroError", "as", "exc", ":", "raise", "UseInferenceDefault", "from", "exc", "return", "nodes", ".", "Const", "(", "issubclass_bool", ")" ]
40
17.534884
def _query_postgres(self): """ Queries Postgres and returns a cursor to the results. """ postgres = PostgresHook(postgres_conn_id=self.postgres_conn_id) conn = postgres.get_conn() cursor = conn.cursor() cursor.execute(self.sql, self.parameters) return cursor
[ "def", "_query_postgres", "(", "self", ")", ":", "postgres", "=", "PostgresHook", "(", "postgres_conn_id", "=", "self", ".", "postgres_conn_id", ")", "conn", "=", "postgres", ".", "get_conn", "(", ")", "cursor", "=", "conn", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "self", ".", "sql", ",", "self", ".", "parameters", ")", "return", "cursor" ]
34.888889
12.222222
def install(packages, options=None, fatal=False): """Install one or more packages.""" cmd = ['yum', '--assumeyes'] if options is not None: cmd.extend(options) cmd.append('install') if isinstance(packages, six.string_types): cmd.append(packages) else: cmd.extend(packages) log("Installing {} with options: {}".format(packages, options)) _run_yum_command(cmd, fatal)
[ "def", "install", "(", "packages", ",", "options", "=", "None", ",", "fatal", "=", "False", ")", ":", "cmd", "=", "[", "'yum'", ",", "'--assumeyes'", "]", "if", "options", "is", "not", "None", ":", "cmd", ".", "extend", "(", "options", ")", "cmd", ".", "append", "(", "'install'", ")", "if", "isinstance", "(", "packages", ",", "six", ".", "string_types", ")", ":", "cmd", ".", "append", "(", "packages", ")", "else", ":", "cmd", ".", "extend", "(", "packages", ")", "log", "(", "\"Installing {} with options: {}\"", ".", "format", "(", "packages", ",", "options", ")", ")", "_run_yum_command", "(", "cmd", ",", "fatal", ")" ]
35.076923
12.384615
def list_all_zip_codes_geo_zones(cls, **kwargs): """List ZipCodesGeoZones Return a list of ZipCodesGeoZones This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.list_all_zip_codes_geo_zones(async=True) >>> result = thread.get() :param async bool :param int page: page number :param int size: page size :param str sort: page order :return: page[ZipCodesGeoZone] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._list_all_zip_codes_geo_zones_with_http_info(**kwargs) else: (data) = cls._list_all_zip_codes_geo_zones_with_http_info(**kwargs) return data
[ "def", "list_all_zip_codes_geo_zones", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async'", ")", ":", "return", "cls", ".", "_list_all_zip_codes_geo_zones_with_http_info", "(", "*", "*", "kwargs", ")", "else", ":", "(", "data", ")", "=", "cls", ".", "_list_all_zip_codes_geo_zones_with_http_info", "(", "*", "*", "kwargs", ")", "return", "data" ]
39.130435
15.695652
def __validate_path_parameters(self, field, path_parameters): """Verifies that all path parameters correspond to an existing subfield. Args: field: An instance of a subclass of messages.Field. Should be the root level property name in each path parameter in path_parameters. For example, if the field is called 'foo', then each path parameter should begin with 'foo.'. path_parameters: A list of Strings representing URI parameter variables. Raises: TypeError: If one of the path parameters does not start with field.name. """ for param in path_parameters: segment_list = param.split('.') if segment_list[0] != field.name: raise TypeError('Subfield %r can\'t come from field %r.' % (param, field.name)) self.__validate_simple_subfield(field.name, field, segment_list[1:])
[ "def", "__validate_path_parameters", "(", "self", ",", "field", ",", "path_parameters", ")", ":", "for", "param", "in", "path_parameters", ":", "segment_list", "=", "param", ".", "split", "(", "'.'", ")", "if", "segment_list", "[", "0", "]", "!=", "field", ".", "name", ":", "raise", "TypeError", "(", "'Subfield %r can\\'t come from field %r.'", "%", "(", "param", ",", "field", ".", "name", ")", ")", "self", ".", "__validate_simple_subfield", "(", "field", ".", "name", ",", "field", ",", "segment_list", "[", "1", ":", "]", ")" ]
45.947368
22.947368
def __extend_token_object(self, token_object, is_denormalize=True, func_denormalizer=denormalize_text): # type: (TokenizedResult,bool,Callable[[str],str])->Tuple """This method creates dict object from token object. """ assert isinstance(token_object, TokenizedResult) if is_denormalize: if token_object.is_feature == True: if token_object.is_surface == True: token = (func_denormalizer(token_object.word_surface), token_object.tuple_pos) else: token = (func_denormalizer(token_object.word_stem), token_object.tuple_pos) else: if token_object.is_surface == True: token = func_denormalizer(token_object.word_surface) else: token = func_denormalizer(token_object.word_stem) else: if token_object.is_feature == True: if token_object.is_surface == True: token = (token_object.word_surface, token_object.tuple_pos) else: token = (token_object.word_stem, token_object.tuple_pos) else: if token_object.is_surface == True: token = token_object.word_surface else: token = token_object.word_stem return token
[ "def", "__extend_token_object", "(", "self", ",", "token_object", ",", "is_denormalize", "=", "True", ",", "func_denormalizer", "=", "denormalize_text", ")", ":", "# type: (TokenizedResult,bool,Callable[[str],str])->Tuple", "assert", "isinstance", "(", "token_object", ",", "TokenizedResult", ")", "if", "is_denormalize", ":", "if", "token_object", ".", "is_feature", "==", "True", ":", "if", "token_object", ".", "is_surface", "==", "True", ":", "token", "=", "(", "func_denormalizer", "(", "token_object", ".", "word_surface", ")", ",", "token_object", ".", "tuple_pos", ")", "else", ":", "token", "=", "(", "func_denormalizer", "(", "token_object", ".", "word_stem", ")", ",", "token_object", ".", "tuple_pos", ")", "else", ":", "if", "token_object", ".", "is_surface", "==", "True", ":", "token", "=", "func_denormalizer", "(", "token_object", ".", "word_surface", ")", "else", ":", "token", "=", "func_denormalizer", "(", "token_object", ".", "word_stem", ")", "else", ":", "if", "token_object", ".", "is_feature", "==", "True", ":", "if", "token_object", ".", "is_surface", "==", "True", ":", "token", "=", "(", "token_object", ".", "word_surface", ",", "token_object", ".", "tuple_pos", ")", "else", ":", "token", "=", "(", "token_object", ".", "word_stem", ",", "token_object", ".", "tuple_pos", ")", "else", ":", "if", "token_object", ".", "is_surface", "==", "True", ":", "token", "=", "token_object", ".", "word_surface", "else", ":", "token", "=", "token_object", ".", "word_stem", "return", "token" ]
44.40625
21.09375
def _construct_number_token(self, d: Dict, nlp) -> List[Dict]: """ Construct a shape token Args: d: Dict nlp Returns: List[Dict] """ result = [] if not d["numbers"]: this_token = {attrs.LIKE_NUM: True} result.append(this_token) if d["length"]: result = self._add_length_constrain(result, d["length"]) elif len(d["numbers"]) == 1: this_token = {attrs.ORTH: str(d["numbers"][0])} result.append(this_token) else: global FLAG_ID number_set = set(d["numbers"]) def is_selected_number(x): return x in number_set FLAG_DICT[FLAG_ID] = nlp.vocab.add_flag(is_selected_number) this_token = {FLAG_DICT[FLAG_ID]: True} FLAG_ID += 1 result.append(this_token) result = self._add_common_constrain(result, d) return result
[ "def", "_construct_number_token", "(", "self", ",", "d", ":", "Dict", ",", "nlp", ")", "->", "List", "[", "Dict", "]", ":", "result", "=", "[", "]", "if", "not", "d", "[", "\"numbers\"", "]", ":", "this_token", "=", "{", "attrs", ".", "LIKE_NUM", ":", "True", "}", "result", ".", "append", "(", "this_token", ")", "if", "d", "[", "\"length\"", "]", ":", "result", "=", "self", ".", "_add_length_constrain", "(", "result", ",", "d", "[", "\"length\"", "]", ")", "elif", "len", "(", "d", "[", "\"numbers\"", "]", ")", "==", "1", ":", "this_token", "=", "{", "attrs", ".", "ORTH", ":", "str", "(", "d", "[", "\"numbers\"", "]", "[", "0", "]", ")", "}", "result", ".", "append", "(", "this_token", ")", "else", ":", "global", "FLAG_ID", "number_set", "=", "set", "(", "d", "[", "\"numbers\"", "]", ")", "def", "is_selected_number", "(", "x", ")", ":", "return", "x", "in", "number_set", "FLAG_DICT", "[", "FLAG_ID", "]", "=", "nlp", ".", "vocab", ".", "add_flag", "(", "is_selected_number", ")", "this_token", "=", "{", "FLAG_DICT", "[", "FLAG_ID", "]", ":", "True", "}", "FLAG_ID", "+=", "1", "result", ".", "append", "(", "this_token", ")", "result", "=", "self", ".", "_add_common_constrain", "(", "result", ",", "d", ")", "return", "result" ]
30.1875
16.625
def new_address(self, sender=None, nonce=None): """Create a fresh 160bit address""" if sender is not None and nonce is None: nonce = self.get_nonce(sender) new_address = self.calculate_new_address(sender, nonce) if sender is None and new_address in self: return self.new_address(sender, nonce) return new_address
[ "def", "new_address", "(", "self", ",", "sender", "=", "None", ",", "nonce", "=", "None", ")", ":", "if", "sender", "is", "not", "None", "and", "nonce", "is", "None", ":", "nonce", "=", "self", ".", "get_nonce", "(", "sender", ")", "new_address", "=", "self", ".", "calculate_new_address", "(", "sender", ",", "nonce", ")", "if", "sender", "is", "None", "and", "new_address", "in", "self", ":", "return", "self", ".", "new_address", "(", "sender", ",", "nonce", ")", "return", "new_address" ]
41
12.666667
def route(bp, *args, **kwargs): """Journey route decorator Enables simple serialization, deserialization and validation of Flask routes with the help of Marshmallow. :param bp: :class:`flask.Blueprint` object :param args: args to pass along to `Blueprint.route` :param kwargs: - :strict_slashes: Enable / disable strict slashes (default False) - :validate: Enable / disable body/query validation (default True) - :_query: Unmarshal Query string into this schema - :_body: Unmarshal JSON body into this schema - :marshal_with: Serialize the output with this schema :raises: - ValidationError if the query parameters or JSON body fails validation """ kwargs['strict_slashes'] = kwargs.pop('strict_slashes', False) body = _validate_schema(kwargs.pop('_body', None)) query = _validate_schema(kwargs.pop('_query', None)) output = _validate_schema(kwargs.pop('marshal_with', None)) validate = kwargs.pop('validate', True) def decorator(f): @bp.route(*args, **kwargs) @wraps(f) def wrapper(*inner_args, **inner_kwargs): """If a schema (_body and/or _query) was supplied to the route decorator, the deserialized :class`marshmallow.Schema` object is injected into the decorated function's kwargs.""" try: if query is not None: query.strict = validate url = furl(request.url) inner_kwargs['_query'] = query.load(data=url.args) if body is not None: body.strict = validate json_data = request.get_json() if json_data is None: # Set json_data to empty dict if body is empty, so it gets picked up by the validator json_data = {} inner_kwargs['_body'] = body.load(data=json_data) except ValidationError as err: return jsonify(err.messages), 422 if output: data = output.dump(f(*inner_args, **inner_kwargs)) return jsonify(data[0]) return f(*inner_args, **inner_kwargs) return f return decorator
[ "def", "route", "(", "bp", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "'strict_slashes'", "]", "=", "kwargs", ".", "pop", "(", "'strict_slashes'", ",", "False", ")", "body", "=", "_validate_schema", "(", "kwargs", ".", "pop", "(", "'_body'", ",", "None", ")", ")", "query", "=", "_validate_schema", "(", "kwargs", ".", "pop", "(", "'_query'", ",", "None", ")", ")", "output", "=", "_validate_schema", "(", "kwargs", ".", "pop", "(", "'marshal_with'", ",", "None", ")", ")", "validate", "=", "kwargs", ".", "pop", "(", "'validate'", ",", "True", ")", "def", "decorator", "(", "f", ")", ":", "@", "bp", ".", "route", "(", "*", "args", ",", "*", "*", "kwargs", ")", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "inner_args", ",", "*", "*", "inner_kwargs", ")", ":", "\"\"\"If a schema (_body and/or _query) was supplied to the route decorator, the deserialized\n :class`marshmallow.Schema` object is injected into the decorated function's kwargs.\"\"\"", "try", ":", "if", "query", "is", "not", "None", ":", "query", ".", "strict", "=", "validate", "url", "=", "furl", "(", "request", ".", "url", ")", "inner_kwargs", "[", "'_query'", "]", "=", "query", ".", "load", "(", "data", "=", "url", ".", "args", ")", "if", "body", "is", "not", "None", ":", "body", ".", "strict", "=", "validate", "json_data", "=", "request", ".", "get_json", "(", ")", "if", "json_data", "is", "None", ":", "# Set json_data to empty dict if body is empty, so it gets picked up by the validator", "json_data", "=", "{", "}", "inner_kwargs", "[", "'_body'", "]", "=", "body", ".", "load", "(", "data", "=", "json_data", ")", "except", "ValidationError", "as", "err", ":", "return", "jsonify", "(", "err", ".", "messages", ")", ",", "422", "if", "output", ":", "data", "=", "output", ".", "dump", "(", "f", "(", "*", "inner_args", ",", "*", "*", "inner_kwargs", ")", ")", "return", "jsonify", "(", "data", "[", "0", "]", ")", "return", "f", "(", "*", "inner_args", ",", "*", "*", "inner_kwargs", ")", "return", "f", "return", "decorator" ]
38.12069
21.724138
def getUpperDetectionLimit(self): """Returns the Upper Detection Limit for this service as a floatable """ udl = self.getField('UpperDetectionLimit').get(self) try: return float(udl) except ValueError: return 0
[ "def", "getUpperDetectionLimit", "(", "self", ")", ":", "udl", "=", "self", ".", "getField", "(", "'UpperDetectionLimit'", ")", ".", "get", "(", "self", ")", "try", ":", "return", "float", "(", "udl", ")", "except", "ValueError", ":", "return", "0" ]
33.375
12.5
def lock(self): ''' Try to get locked the file - the function will wait until the file is unlocked if 'wait' was defined as locktype - the funciton will raise AlreadyLocked exception if 'lock' was defined as locktype ''' # Open file self.__fd = open(self.__lockfile, "w") # Get it locked if self.__locktype == "wait": # Try to get it locked until ready fcntl.flock(self.__fd.fileno(), fcntl.LOCK_EX) elif self.__locktype == "lock": # Try to get the locker if can not raise an exception try: fcntl.flock(self.__fd.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB) except IOError: raise AlreadyLocked("File is already locked")
[ "def", "lock", "(", "self", ")", ":", "# Open file", "self", ".", "__fd", "=", "open", "(", "self", ".", "__lockfile", ",", "\"w\"", ")", "# Get it locked", "if", "self", ".", "__locktype", "==", "\"wait\"", ":", "# Try to get it locked until ready", "fcntl", ".", "flock", "(", "self", ".", "__fd", ".", "fileno", "(", ")", ",", "fcntl", ".", "LOCK_EX", ")", "elif", "self", ".", "__locktype", "==", "\"lock\"", ":", "# Try to get the locker if can not raise an exception", "try", ":", "fcntl", ".", "flock", "(", "self", ".", "__fd", ".", "fileno", "(", ")", ",", "fcntl", ".", "LOCK_EX", "|", "fcntl", ".", "LOCK_NB", ")", "except", "IOError", ":", "raise", "AlreadyLocked", "(", "\"File is already locked\"", ")" ]
38.4
23.2
def argument_kind(args): # type: (List[Argument]) -> Optional[str] """Return the kind of an argument, based on one or more descriptions of the argument. Return None if every item does not have the same kind. """ kinds = set(arg.kind for arg in args) if len(kinds) != 1: return None return kinds.pop()
[ "def", "argument_kind", "(", "args", ")", ":", "# type: (List[Argument]) -> Optional[str]", "kinds", "=", "set", "(", "arg", ".", "kind", "for", "arg", "in", "args", ")", "if", "len", "(", "kinds", ")", "!=", "1", ":", "return", "None", "return", "kinds", ".", "pop", "(", ")" ]
32.8
13.6
def order_executed(self, orderDict): ''' call back for executed order ''' for orderId, order in orderDict.items(): if order.security in self.__trakers.keys(): self.__trakers[order.security].orderExecuted(orderId)
[ "def", "order_executed", "(", "self", ",", "orderDict", ")", ":", "for", "orderId", ",", "order", "in", "orderDict", ".", "items", "(", ")", ":", "if", "order", ".", "security", "in", "self", ".", "__trakers", ".", "keys", "(", ")", ":", "self", ".", "__trakers", "[", "order", ".", "security", "]", ".", "orderExecuted", "(", "orderId", ")" ]
50.4
12
def _process_changes(self, newRev, branch): """ Read changes since last change. - Read list of commit hashes. - Extract details from each commit. - Add changes to database. """ # initial run, don't parse all history if not self.lastRev: return rebuild = False if newRev in self.lastRev.values(): if self.buildPushesWithNoCommits: existingRev = self.lastRev.get(branch) if existingRev is None: # This branch was completely unknown, rebuild log.msg('gitpoller: rebuilding {} for new branch "{}"'.format( newRev, branch)) rebuild = True elif existingRev != newRev: # This branch is known, but it now points to a different # commit than last time we saw it, rebuild. log.msg('gitpoller: rebuilding {} for updated branch "{}"'.format( newRev, branch)) rebuild = True # get the change list revListArgs = (['--format=%H', '{}'.format(newRev)] + ['^' + rev for rev in sorted(self.lastRev.values())] + ['--']) self.changeCount = 0 results = yield self._dovccmd('log', revListArgs, path=self.workdir) # process oldest change first revList = results.split() revList.reverse() if rebuild and not revList: revList = [newRev] self.changeCount = len(revList) self.lastRev[branch] = newRev if self.changeCount: log.msg('gitpoller: processing {} changes: {} from "{}" branch "{}"'.format( self.changeCount, revList, self.repourl, branch)) for rev in revList: dl = defer.DeferredList([ self._get_commit_timestamp(rev), self._get_commit_author(rev), self._get_commit_files(rev), self._get_commit_comments(rev), ], consumeErrors=True) results = yield dl # check for failures failures = [r[1] for r in results if not r[0]] if failures: for failure in failures: log.err( failure, "while processing changes for {} {}".format(newRev, branch)) # just fail on the first error; they're probably all related! failures[0].raiseException() timestamp, author, files, comments = [r[1] for r in results] yield self.master.data.updates.addChange( author=author, revision=bytes2unicode(rev, encoding=self.encoding), files=files, comments=comments, when_timestamp=timestamp, branch=bytes2unicode(self._removeHeads(branch)), project=self.project, repository=bytes2unicode(self.repourl, encoding=self.encoding), category=self.category, src='git')
[ "def", "_process_changes", "(", "self", ",", "newRev", ",", "branch", ")", ":", "# initial run, don't parse all history", "if", "not", "self", ".", "lastRev", ":", "return", "rebuild", "=", "False", "if", "newRev", "in", "self", ".", "lastRev", ".", "values", "(", ")", ":", "if", "self", ".", "buildPushesWithNoCommits", ":", "existingRev", "=", "self", ".", "lastRev", ".", "get", "(", "branch", ")", "if", "existingRev", "is", "None", ":", "# This branch was completely unknown, rebuild", "log", ".", "msg", "(", "'gitpoller: rebuilding {} for new branch \"{}\"'", ".", "format", "(", "newRev", ",", "branch", ")", ")", "rebuild", "=", "True", "elif", "existingRev", "!=", "newRev", ":", "# This branch is known, but it now points to a different", "# commit than last time we saw it, rebuild.", "log", ".", "msg", "(", "'gitpoller: rebuilding {} for updated branch \"{}\"'", ".", "format", "(", "newRev", ",", "branch", ")", ")", "rebuild", "=", "True", "# get the change list", "revListArgs", "=", "(", "[", "'--format=%H'", ",", "'{}'", ".", "format", "(", "newRev", ")", "]", "+", "[", "'^'", "+", "rev", "for", "rev", "in", "sorted", "(", "self", ".", "lastRev", ".", "values", "(", ")", ")", "]", "+", "[", "'--'", "]", ")", "self", ".", "changeCount", "=", "0", "results", "=", "yield", "self", ".", "_dovccmd", "(", "'log'", ",", "revListArgs", ",", "path", "=", "self", ".", "workdir", ")", "# process oldest change first", "revList", "=", "results", ".", "split", "(", ")", "revList", ".", "reverse", "(", ")", "if", "rebuild", "and", "not", "revList", ":", "revList", "=", "[", "newRev", "]", "self", ".", "changeCount", "=", "len", "(", "revList", ")", "self", ".", "lastRev", "[", "branch", "]", "=", "newRev", "if", "self", ".", "changeCount", ":", "log", ".", "msg", "(", "'gitpoller: processing {} changes: {} from \"{}\" branch \"{}\"'", ".", "format", "(", "self", ".", "changeCount", ",", "revList", ",", "self", ".", "repourl", ",", "branch", ")", ")", "for", "rev", "in", "revList", ":", "dl", "=", "defer", ".", "DeferredList", "(", "[", "self", ".", "_get_commit_timestamp", "(", "rev", ")", ",", "self", ".", "_get_commit_author", "(", "rev", ")", ",", "self", ".", "_get_commit_files", "(", "rev", ")", ",", "self", ".", "_get_commit_comments", "(", "rev", ")", ",", "]", ",", "consumeErrors", "=", "True", ")", "results", "=", "yield", "dl", "# check for failures", "failures", "=", "[", "r", "[", "1", "]", "for", "r", "in", "results", "if", "not", "r", "[", "0", "]", "]", "if", "failures", ":", "for", "failure", "in", "failures", ":", "log", ".", "err", "(", "failure", ",", "\"while processing changes for {} {}\"", ".", "format", "(", "newRev", ",", "branch", ")", ")", "# just fail on the first error; they're probably all related!", "failures", "[", "0", "]", ".", "raiseException", "(", ")", "timestamp", ",", "author", ",", "files", ",", "comments", "=", "[", "r", "[", "1", "]", "for", "r", "in", "results", "]", "yield", "self", ".", "master", ".", "data", ".", "updates", ".", "addChange", "(", "author", "=", "author", ",", "revision", "=", "bytes2unicode", "(", "rev", ",", "encoding", "=", "self", ".", "encoding", ")", ",", "files", "=", "files", ",", "comments", "=", "comments", ",", "when_timestamp", "=", "timestamp", ",", "branch", "=", "bytes2unicode", "(", "self", ".", "_removeHeads", "(", "branch", ")", ")", ",", "project", "=", "self", ".", "project", ",", "repository", "=", "bytes2unicode", "(", "self", ".", "repourl", ",", "encoding", "=", "self", ".", "encoding", ")", ",", "category", "=", "self", ".", "category", ",", "src", "=", "'git'", ")" ]
38.78481
18.025316
def initialize_approx_dist(self, phi, start_diffuse, gaussian_latents): """ Initializes the appoximate distibution for the model Parameters ---------- phi : np.ndarray Latent variables start_diffuse: boolean Whether to start from diffuse values or not gaussian_latents: LatentVariables object Latent variables for the Gaussian approximation Returns ---------- BBVI fit object """ # Starting values for approximate distribution for i in range(len(self.latent_variables.z_list)): approx_dist = self.latent_variables.z_list[i].q if isinstance(approx_dist, fam.Normal): self.latent_variables.z_list[i].q.mu0 = phi[i] self.latent_variables.z_list[i].q.sigma0 = np.exp(-3.0) q_list = [k.q for k in self.latent_variables.z_list] # Get starting values for states T, Z, R, Q = self._ss_matrices(phi) H, mu = self.family.approximating_model(phi, T, Z, R, Q, gaussian_latents.get_z_values(transformed=True)[0], self.data) a, V = self.smoothed_state(self.data, phi, H, mu) V[0][0][0] = V[0][0][-1] for item in range(self.data_length): if start_diffuse is False: q_list.append(fam.Normal(a[0][item], np.sqrt(np.abs(V[0][0][item])))) else: q_list.append(fam.Normal(self.family.itransform(np.mean(self.data)), np.sqrt(np.abs(V[0][0][item])))) return q_list
[ "def", "initialize_approx_dist", "(", "self", ",", "phi", ",", "start_diffuse", ",", "gaussian_latents", ")", ":", "# Starting values for approximate distribution", "for", "i", "in", "range", "(", "len", "(", "self", ".", "latent_variables", ".", "z_list", ")", ")", ":", "approx_dist", "=", "self", ".", "latent_variables", ".", "z_list", "[", "i", "]", ".", "q", "if", "isinstance", "(", "approx_dist", ",", "fam", ".", "Normal", ")", ":", "self", ".", "latent_variables", ".", "z_list", "[", "i", "]", ".", "q", ".", "mu0", "=", "phi", "[", "i", "]", "self", ".", "latent_variables", ".", "z_list", "[", "i", "]", ".", "q", ".", "sigma0", "=", "np", ".", "exp", "(", "-", "3.0", ")", "q_list", "=", "[", "k", ".", "q", "for", "k", "in", "self", ".", "latent_variables", ".", "z_list", "]", "# Get starting values for states", "T", ",", "Z", ",", "R", ",", "Q", "=", "self", ".", "_ss_matrices", "(", "phi", ")", "H", ",", "mu", "=", "self", ".", "family", ".", "approximating_model", "(", "phi", ",", "T", ",", "Z", ",", "R", ",", "Q", ",", "gaussian_latents", ".", "get_z_values", "(", "transformed", "=", "True", ")", "[", "0", "]", ",", "self", ".", "data", ")", "a", ",", "V", "=", "self", ".", "smoothed_state", "(", "self", ".", "data", ",", "phi", ",", "H", ",", "mu", ")", "V", "[", "0", "]", "[", "0", "]", "[", "0", "]", "=", "V", "[", "0", "]", "[", "0", "]", "[", "-", "1", "]", "for", "item", "in", "range", "(", "self", ".", "data_length", ")", ":", "if", "start_diffuse", "is", "False", ":", "q_list", ".", "append", "(", "fam", ".", "Normal", "(", "a", "[", "0", "]", "[", "item", "]", ",", "np", ".", "sqrt", "(", "np", ".", "abs", "(", "V", "[", "0", "]", "[", "0", "]", "[", "item", "]", ")", ")", ")", ")", "else", ":", "q_list", ".", "append", "(", "fam", ".", "Normal", "(", "self", ".", "family", ".", "itransform", "(", "np", ".", "mean", "(", "self", ".", "data", ")", ")", ",", "np", ".", "sqrt", "(", "np", ".", "abs", "(", "V", "[", "0", "]", "[", "0", "]", "[", "item", "]", ")", ")", ")", ")", "return", "q_list" ]
37.365854
24.146341
def get_entities(seq, suffix=False): """Gets entities from sequence. Args: seq (list): sequence of labels. Returns: list: list of (chunk_type, chunk_start, chunk_end). Example: >>> from seqeval.metrics.sequence_labeling import get_entities >>> seq = ['B-PER', 'I-PER', 'O', 'B-LOC'] >>> get_entities(seq) [('PER', 0, 1), ('LOC', 3, 3)] """ # for nested list if any(isinstance(s, list) for s in seq): seq = [item for sublist in seq for item in sublist + ['O']] prev_tag = 'O' prev_type = '' begin_offset = 0 chunks = [] for i, chunk in enumerate(seq + ['O']): if suffix: tag = chunk[-1] type_ = chunk.split('-')[0] else: tag = chunk[0] type_ = chunk.split('-')[-1] if end_of_chunk(prev_tag, tag, prev_type, type_): chunks.append((prev_type, begin_offset, i-1)) if start_of_chunk(prev_tag, tag, prev_type, type_): begin_offset = i prev_tag = tag prev_type = type_ return chunks
[ "def", "get_entities", "(", "seq", ",", "suffix", "=", "False", ")", ":", "# for nested list", "if", "any", "(", "isinstance", "(", "s", ",", "list", ")", "for", "s", "in", "seq", ")", ":", "seq", "=", "[", "item", "for", "sublist", "in", "seq", "for", "item", "in", "sublist", "+", "[", "'O'", "]", "]", "prev_tag", "=", "'O'", "prev_type", "=", "''", "begin_offset", "=", "0", "chunks", "=", "[", "]", "for", "i", ",", "chunk", "in", "enumerate", "(", "seq", "+", "[", "'O'", "]", ")", ":", "if", "suffix", ":", "tag", "=", "chunk", "[", "-", "1", "]", "type_", "=", "chunk", ".", "split", "(", "'-'", ")", "[", "0", "]", "else", ":", "tag", "=", "chunk", "[", "0", "]", "type_", "=", "chunk", ".", "split", "(", "'-'", ")", "[", "-", "1", "]", "if", "end_of_chunk", "(", "prev_tag", ",", "tag", ",", "prev_type", ",", "type_", ")", ":", "chunks", ".", "append", "(", "(", "prev_type", ",", "begin_offset", ",", "i", "-", "1", ")", ")", "if", "start_of_chunk", "(", "prev_tag", ",", "tag", ",", "prev_type", ",", "type_", ")", ":", "begin_offset", "=", "i", "prev_tag", "=", "tag", "prev_type", "=", "type_", "return", "chunks" ]
27.461538
19.102564
def parse(self, stream, template, predefines=True, orig_filename=None, keep_successful=False, printf=True): """Parse the data stream using the template (e.g. parse the 010 template and interpret the template using the stream as the data source). :stream: The input data stream :template: The template to parse the stream with :keep_successful: Return whatever was successfully parsed before an error. ``_pfp__error`` will contain the exception (if one was raised) :param bool printf: If ``False``, printfs will be noops (default=``True``) :returns: Pfp Dom """ self._dlog("parsing") self._printf = printf self._orig_filename = orig_filename self._stream = stream self._template = template self._template_lines = self._template.split("\n") self._ast = self._parse_string(template, predefines) self._dlog("parsed template into ast") res = self._run(keep_successful) return res
[ "def", "parse", "(", "self", ",", "stream", ",", "template", ",", "predefines", "=", "True", ",", "orig_filename", "=", "None", ",", "keep_successful", "=", "False", ",", "printf", "=", "True", ")", ":", "self", ".", "_dlog", "(", "\"parsing\"", ")", "self", ".", "_printf", "=", "printf", "self", ".", "_orig_filename", "=", "orig_filename", "self", ".", "_stream", "=", "stream", "self", ".", "_template", "=", "template", "self", ".", "_template_lines", "=", "self", ".", "_template", ".", "split", "(", "\"\\n\"", ")", "self", ".", "_ast", "=", "self", ".", "_parse_string", "(", "template", ",", "predefines", ")", "self", ".", "_dlog", "(", "\"parsed template into ast\"", ")", "res", "=", "self", ".", "_run", "(", "keep_successful", ")", "return", "res" ]
43.478261
23.782609
def assertFileSizeAlmostEqual( self, filename, size, places=None, msg=None, delta=None): '''Fail if ``filename`` does not have the given ``size`` as determined by their difference rounded to the given number of decimal ``places`` (default 7) and comparing to zero, or if their difference is greater than a given ``delta``. Parameters ---------- filename : str, bytes, file-like size : int, float places : int msg : str If not provided, the :mod:`marbles.mixins` or :mod:`unittest` standard message will be used. delta : int, float Raises ------ TypeError If ``filename`` is not a str or bytes object and is not file-like. ''' fsize = self._get_file_size(filename) self.assertAlmostEqual( fsize, size, places=places, msg=msg, delta=delta)
[ "def", "assertFileSizeAlmostEqual", "(", "self", ",", "filename", ",", "size", ",", "places", "=", "None", ",", "msg", "=", "None", ",", "delta", "=", "None", ")", ":", "fsize", "=", "self", ".", "_get_file_size", "(", "filename", ")", "self", ".", "assertAlmostEqual", "(", "fsize", ",", "size", ",", "places", "=", "places", ",", "msg", "=", "msg", ",", "delta", "=", "delta", ")" ]
35.615385
21.538462
def add_deploy(state, deploy_func, *args, **kwargs): ''' Prepare & add an deploy to pyinfra.state by executing it on all hosts. Args: state (``pyinfra.api.State`` obj): the deploy state to add the operation deploy_func (function): the operation function from one of the modules, ie ``server.user`` args/kwargs: passed to the operation function ''' frameinfo = get_caller_frameinfo() kwargs['frameinfo'] = frameinfo for host in state.inventory: deploy_func(state, host, *args, **kwargs)
[ "def", "add_deploy", "(", "state", ",", "deploy_func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "frameinfo", "=", "get_caller_frameinfo", "(", ")", "kwargs", "[", "'frameinfo'", "]", "=", "frameinfo", "for", "host", "in", "state", ".", "inventory", ":", "deploy_func", "(", "state", ",", "host", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
33.8125
24.5625
def get_notifications(self, login=None, **kwargs): """Get the current notifications of a user. :return: JSON """ _login = kwargs.get( 'login', login or self._login ) _notif_url = NOTIF_URL.format(login=_login) return self._request_api(url=_notif_url).json()
[ "def", "get_notifications", "(", "self", ",", "login", "=", "None", ",", "*", "*", "kwargs", ")", ":", "_login", "=", "kwargs", ".", "get", "(", "'login'", ",", "login", "or", "self", ".", "_login", ")", "_notif_url", "=", "NOTIF_URL", ".", "format", "(", "login", "=", "_login", ")", "return", "self", ".", "_request_api", "(", "url", "=", "_notif_url", ")", ".", "json", "(", ")" ]
27.333333
17.166667
def walk_directories_info(self, relativePath="", fullPath=False, recursive=False): """ Walk the repository relative path and yield tuple of two items where first item is directory relative/full path and second item is directory info. If directory file info is not found on disk, second item will be None. :parameters: #. relativePath (string): The relative path from which start the walk. #. fullPath (boolean): Whether to return full or relative path. #. recursive (boolean): Whether walk all directories files recursively. """ assert isinstance(fullPath, bool), "fullPath must be boolean" assert isinstance(recursive, bool), "recursive must be boolean" relativePath = self.to_repo_relative_path(path=relativePath, split=False) # walk directories for dpath in self.walk_directories_path(relativePath=relativePath, fullPath=False, recursive=recursive): dirInfoPath = os.path.join(self.__path,dpath,self.__dirInfo) if os.path.isfile(dirInfoPath): with open(dirInfoPath, 'rb') as fd: info = pickle.load(fd) else: info = None if fullPath: yield (os.path.join(self.__path, dpath), info) else: yield (dpath, info)
[ "def", "walk_directories_info", "(", "self", ",", "relativePath", "=", "\"\"", ",", "fullPath", "=", "False", ",", "recursive", "=", "False", ")", ":", "assert", "isinstance", "(", "fullPath", ",", "bool", ")", ",", "\"fullPath must be boolean\"", "assert", "isinstance", "(", "recursive", ",", "bool", ")", ",", "\"recursive must be boolean\"", "relativePath", "=", "self", ".", "to_repo_relative_path", "(", "path", "=", "relativePath", ",", "split", "=", "False", ")", "# walk directories", "for", "dpath", "in", "self", ".", "walk_directories_path", "(", "relativePath", "=", "relativePath", ",", "fullPath", "=", "False", ",", "recursive", "=", "recursive", ")", ":", "dirInfoPath", "=", "os", ".", "path", ".", "join", "(", "self", ".", "__path", ",", "dpath", ",", "self", ".", "__dirInfo", ")", "if", "os", ".", "path", ".", "isfile", "(", "dirInfoPath", ")", ":", "with", "open", "(", "dirInfoPath", ",", "'rb'", ")", "as", "fd", ":", "info", "=", "pickle", ".", "load", "(", "fd", ")", "else", ":", "info", "=", "None", "if", "fullPath", ":", "yield", "(", "os", ".", "path", ".", "join", "(", "self", ".", "__path", ",", "dpath", ")", ",", "info", ")", "else", ":", "yield", "(", "dpath", ",", "info", ")" ]
52
26.076923
def F(self, x): """ Classic NFW function in terms of arctanh and arctan :param x: r/Rs :return: """ if isinstance(x, np.ndarray): nfwvals = np.ones_like(x) inds1 = np.where(x < 1) inds2 = np.where(x > 1) nfwvals[inds1] = (1 - x[inds1] ** 2) ** -.5 * np.arctanh((1 - x[inds1] ** 2) ** .5) nfwvals[inds2] = (x[inds2] ** 2 - 1) ** -.5 * np.arctan((x[inds2] ** 2 - 1) ** .5) return nfwvals elif isinstance(x, float) or isinstance(x, int): if x == 1: return 1 if x < 1: return (1 - x ** 2) ** -.5 * np.arctanh((1 - x ** 2) ** .5) else: return (x ** 2 - 1) ** -.5 * np.arctan((x ** 2 - 1) ** .5)
[ "def", "F", "(", "self", ",", "x", ")", ":", "if", "isinstance", "(", "x", ",", "np", ".", "ndarray", ")", ":", "nfwvals", "=", "np", ".", "ones_like", "(", "x", ")", "inds1", "=", "np", ".", "where", "(", "x", "<", "1", ")", "inds2", "=", "np", ".", "where", "(", "x", ">", "1", ")", "nfwvals", "[", "inds1", "]", "=", "(", "1", "-", "x", "[", "inds1", "]", "**", "2", ")", "**", "-", ".5", "*", "np", ".", "arctanh", "(", "(", "1", "-", "x", "[", "inds1", "]", "**", "2", ")", "**", ".5", ")", "nfwvals", "[", "inds2", "]", "=", "(", "x", "[", "inds2", "]", "**", "2", "-", "1", ")", "**", "-", ".5", "*", "np", ".", "arctan", "(", "(", "x", "[", "inds2", "]", "**", "2", "-", "1", ")", "**", ".5", ")", "return", "nfwvals", "elif", "isinstance", "(", "x", ",", "float", ")", "or", "isinstance", "(", "x", ",", "int", ")", ":", "if", "x", "==", "1", ":", "return", "1", "if", "x", "<", "1", ":", "return", "(", "1", "-", "x", "**", "2", ")", "**", "-", ".5", "*", "np", ".", "arctanh", "(", "(", "1", "-", "x", "**", "2", ")", "**", ".5", ")", "else", ":", "return", "(", "x", "**", "2", "-", "1", ")", "**", "-", ".5", "*", "np", ".", "arctan", "(", "(", "x", "**", "2", "-", "1", ")", "**", ".5", ")" ]
37.238095
20.285714
def raise_with_traceback(exc, traceback=Ellipsis): """ Raise exception with existing traceback. If traceback is not passed, uses sys.exc_info() to get traceback. """ if traceback == Ellipsis: _, _, traceback = sys.exc_info() raise exc.with_traceback(traceback)
[ "def", "raise_with_traceback", "(", "exc", ",", "traceback", "=", "Ellipsis", ")", ":", "if", "traceback", "==", "Ellipsis", ":", "_", ",", "_", ",", "traceback", "=", "sys", ".", "exc_info", "(", ")", "raise", "exc", ".", "with_traceback", "(", "traceback", ")" ]
35.625
6.875
def run_step(context): """Parse input json file and substitute {tokens} from context. Loads json into memory to do parsing, so be aware of big files. Args: context: pypyr.context.Context. Mandatory. - fileFormatJson - in. mandatory. str, path-like, or an iterable (list/tuple) of strings/paths. Each str/path can be a glob, relative or absolute path. - out. optional. path-like. Can refer to a file or a directory. will create directory structure if it doesn't exist. If in-path refers to >1 file (e.g it's a glob or list), out path can only be a directory - it doesn't make sense to write >1 file to the same single file (this is not an appender.) To ensure out_path is read as a directory and not a file, be sure to have the path separator (/) at the end. If out_path is not specified or None, will in-place edit and overwrite the in-files. Returns: None. Raises: FileNotFoundError: take a guess pypyr.errors.KeyNotInContextError: fileFormatJson or fileFormatJson['in'] missing in context. pypyr.errors.KeyInContextHasNoValueError: fileFormatJson or fileFormatJson['in'] exists but is None. """ logger.debug("started") deprecated(context) ObjectRewriterStep(__name__, 'fileFormatJson', context).run_step( JsonRepresenter()) logger.debug("done")
[ "def", "run_step", "(", "context", ")", ":", "logger", ".", "debug", "(", "\"started\"", ")", "deprecated", "(", "context", ")", "ObjectRewriterStep", "(", "__name__", ",", "'fileFormatJson'", ",", "context", ")", ".", "run_step", "(", "JsonRepresenter", "(", ")", ")", "logger", ".", "debug", "(", "\"done\"", ")" ]
38.744186
24.209302
def _thumbnail_resize(self, image, thumb_size, crop=None, bg=None): """Performs the actual image cropping operation with PIL.""" if crop == 'fit': img = ImageOps.fit(image, thumb_size, Image.ANTIALIAS) else: img = image.copy() img.thumbnail(thumb_size, Image.ANTIALIAS) if bg: img = self._bg_square(img, bg) return img
[ "def", "_thumbnail_resize", "(", "self", ",", "image", ",", "thumb_size", ",", "crop", "=", "None", ",", "bg", "=", "None", ")", ":", "if", "crop", "==", "'fit'", ":", "img", "=", "ImageOps", ".", "fit", "(", "image", ",", "thumb_size", ",", "Image", ".", "ANTIALIAS", ")", "else", ":", "img", "=", "image", ".", "copy", "(", ")", "img", ".", "thumbnail", "(", "thumb_size", ",", "Image", ".", "ANTIALIAS", ")", "if", "bg", ":", "img", "=", "self", ".", "_bg_square", "(", "img", ",", "bg", ")", "return", "img" ]
30.538462
22.230769
def lyricsmode(song): """ Returns the lyrics found in lyricsmode.com for the specified mp3 file or an empty string if not found. """ translate = { URLESCAPE: '', ' ': '_' } artist = song.artist.lower() artist = normalize(artist, translate) title = song.title.lower() title = normalize(title, translate) artist = re.sub(r'\_{2,}', '_', artist) title = re.sub(r'\_{2,}', '_', title) if artist[0:4].lower() == 'the ': artist = artist[4:] if artist[0:2].lower() == 'a ': prefix = artist[2] else: prefix = artist[0] url = 'http://www.lyricsmode.com/lyrics/{}/{}/{}.html' url = url.format(prefix, artist, title) soup = get_url(url) content = soup.find(id='lyrics_text') return content.get_text().strip()
[ "def", "lyricsmode", "(", "song", ")", ":", "translate", "=", "{", "URLESCAPE", ":", "''", ",", "' '", ":", "'_'", "}", "artist", "=", "song", ".", "artist", ".", "lower", "(", ")", "artist", "=", "normalize", "(", "artist", ",", "translate", ")", "title", "=", "song", ".", "title", ".", "lower", "(", ")", "title", "=", "normalize", "(", "title", ",", "translate", ")", "artist", "=", "re", ".", "sub", "(", "r'\\_{2,}'", ",", "'_'", ",", "artist", ")", "title", "=", "re", ".", "sub", "(", "r'\\_{2,}'", ",", "'_'", ",", "title", ")", "if", "artist", "[", "0", ":", "4", "]", ".", "lower", "(", ")", "==", "'the '", ":", "artist", "=", "artist", "[", "4", ":", "]", "if", "artist", "[", "0", ":", "2", "]", ".", "lower", "(", ")", "==", "'a '", ":", "prefix", "=", "artist", "[", "2", "]", "else", ":", "prefix", "=", "artist", "[", "0", "]", "url", "=", "'http://www.lyricsmode.com/lyrics/{}/{}/{}.html'", "url", "=", "url", ".", "format", "(", "prefix", ",", "artist", ",", "title", ")", "soup", "=", "get_url", "(", "url", ")", "content", "=", "soup", ".", "find", "(", "id", "=", "'lyrics_text'", ")", "return", "content", ".", "get_text", "(", ")", ".", "strip", "(", ")" ]
25.548387
16.580645
def _is_memory_usage_qualified(self): """ return a boolean if we need a qualified .info display """ def f(l): return 'mixed' in l or 'string' in l or 'unicode' in l return any(f(l) for l in self._inferred_type_levels)
[ "def", "_is_memory_usage_qualified", "(", "self", ")", ":", "def", "f", "(", "l", ")", ":", "return", "'mixed'", "in", "l", "or", "'string'", "in", "l", "or", "'unicode'", "in", "l", "return", "any", "(", "f", "(", "l", ")", "for", "l", "in", "self", ".", "_inferred_type_levels", ")" ]
49.8
14.4
def process_header(self, headers): """Ignore the incomming header and replace it with the destination header""" return [c.name for c in self.source.dest_table.columns][1:]
[ "def", "process_header", "(", "self", ",", "headers", ")", ":", "return", "[", "c", ".", "name", "for", "c", "in", "self", ".", "source", ".", "dest_table", ".", "columns", "]", "[", "1", ":", "]" ]
46.25
18.25
def _try_disconnect(self, ref): """ Called by the weak reference when its target dies. In other words, we can assert that self.weak_subscribers is not None at this time. """ with self.lock: weak = [s[0] for s in self.weak_subscribers] try: index = weak.index(ref) except ValueError: # subscriber was already removed by a call to disconnect() pass else: self.weak_subscribers.pop(index)
[ "def", "_try_disconnect", "(", "self", ",", "ref", ")", ":", "with", "self", ".", "lock", ":", "weak", "=", "[", "s", "[", "0", "]", "for", "s", "in", "self", ".", "weak_subscribers", "]", "try", ":", "index", "=", "weak", ".", "index", "(", "ref", ")", "except", "ValueError", ":", "# subscriber was already removed by a call to disconnect()", "pass", "else", ":", "self", ".", "weak_subscribers", ".", "pop", "(", "index", ")" ]
35.4
15
def _create_paths(self, basedir, name=None): """Create datadir and subdir paths.""" if name: datapath = os.path.join(basedir, name) else: datapath = basedir dbpath = os.path.join(datapath, 'db') if not os.path.exists(dbpath): os.makedirs(dbpath) if self.args['verbose']: print('creating directory: %s' % dbpath) return datapath
[ "def", "_create_paths", "(", "self", ",", "basedir", ",", "name", "=", "None", ")", ":", "if", "name", ":", "datapath", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "name", ")", "else", ":", "datapath", "=", "basedir", "dbpath", "=", "os", ".", "path", ".", "join", "(", "datapath", ",", "'db'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "dbpath", ")", ":", "os", ".", "makedirs", "(", "dbpath", ")", "if", "self", ".", "args", "[", "'verbose'", "]", ":", "print", "(", "'creating directory: %s'", "%", "dbpath", ")", "return", "datapath" ]
30
14.857143
def _set_v3host(self, v, load=False): """ Setter method for v3host, mapped from YANG variable /snmp_server/v3host (list) If this variable is read-only (config: false) in the source YANG file, then _set_v3host is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_v3host() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("hostip username",v3host.v3host, yang_name="v3host", rest_name="v3host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'24'}}), is_container='list', yang_name="v3host", rest_name="v3host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'24'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """v3host must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("hostip username",v3host.v3host, yang_name="v3host", rest_name="v3host", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'24'}}), is_container='list', yang_name="v3host", rest_name="v3host", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'24'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)""", }) self.__v3host = t if hasattr(self, '_set'): self._set()
[ "def", "_set_v3host", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "YANGListType", "(", "\"hostip username\"", ",", "v3host", ".", "v3host", ",", "yang_name", "=", "\"v3host\"", ",", "rest_name", "=", "\"v3host\"", ",", "parent", "=", "self", ",", "is_container", "=", "'list'", ",", "user_ordered", "=", "False", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "yang_keys", "=", "'hostip username'", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Holds IP Address, username, severity level and \\nport number used to send v3 traps and informs'", ",", "u'cli-suppress-list-no'", ":", "None", ",", "u'callpoint'", ":", "u'snmpV3host'", ",", "u'cli-suppress-key-abbreviation'", ":", "None", ",", "u'sort-priority'", ":", "u'24'", "}", "}", ")", ",", "is_container", "=", "'list'", ",", "yang_name", "=", "\"v3host\"", ",", "rest_name", "=", "\"v3host\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Holds IP Address, username, severity level and \\nport number used to send v3 traps and informs'", ",", "u'cli-suppress-list-no'", ":", "None", ",", "u'callpoint'", ":", "u'snmpV3host'", ",", "u'cli-suppress-key-abbreviation'", ":", "None", ",", "u'sort-priority'", ":", "u'24'", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-snmp'", ",", "defining_module", "=", "'brocade-snmp'", ",", "yang_type", "=", "'list'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"v3host must be of a type compatible with list\"\"\"", ",", "'defined-type'", ":", "\"list\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=YANGListType(\"hostip username\",v3host.v3host, yang_name=\"v3host\", rest_name=\"v3host\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='hostip username', extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \\nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'24'}}), is_container='list', yang_name=\"v3host\", rest_name=\"v3host\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Holds IP Address, username, severity level and \\nport number used to send v3 traps and informs', u'cli-suppress-list-no': None, u'callpoint': u'snmpV3host', u'cli-suppress-key-abbreviation': None, u'sort-priority': u'24'}}, namespace='urn:brocade.com:mgmt:brocade-snmp', defining_module='brocade-snmp', yang_type='list', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__v3host", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
125.045455
60.090909
def create_vars_from_data(self, dataset, split="train"): """ Create vars given a dataset and set test values. Useful when dataset is already defined. """ from deepy.core.neural_var import NeuralVariable vars = [] if split == "valid": data_split = dataset.valid_set() elif split == "test": data_split = dataset.test_set() else: data_split = dataset.train_set() first_data_piece = list(data_split)[0] for i, numpy_tensor in enumerate(first_data_piece): if numpy_tensor.dtype == "int64": numpy_tensor = numpy_tensor.astype("int32") if numpy_tensor.dtype == "float64": numpy_tensor = numpy_tensor.astype(env.FLOATX) type_map = { 0: "scalar", 1: "vector", 2: "matrix", 3: "tensor3", 4: "tensor4", 5: "tensor5", } tensor_type = type_map[numpy_tensor.ndim] if numpy_tensor.ndim in type_map else type_map[0] if numpy_tensor.dtype.kind == "i": tensor_type = "i" + tensor_type theano_tensor = getattr(TT, tensor_type)("input_{}_{}".format(i + 1, tensor_type)) last_dim = numpy_tensor.shape[-1] var = NeuralVariable(theano_tensor, dim=last_dim) var.set_test_value(numpy_tensor) vars.append(var) return vars
[ "def", "create_vars_from_data", "(", "self", ",", "dataset", ",", "split", "=", "\"train\"", ")", ":", "from", "deepy", ".", "core", ".", "neural_var", "import", "NeuralVariable", "vars", "=", "[", "]", "if", "split", "==", "\"valid\"", ":", "data_split", "=", "dataset", ".", "valid_set", "(", ")", "elif", "split", "==", "\"test\"", ":", "data_split", "=", "dataset", ".", "test_set", "(", ")", "else", ":", "data_split", "=", "dataset", ".", "train_set", "(", ")", "first_data_piece", "=", "list", "(", "data_split", ")", "[", "0", "]", "for", "i", ",", "numpy_tensor", "in", "enumerate", "(", "first_data_piece", ")", ":", "if", "numpy_tensor", ".", "dtype", "==", "\"int64\"", ":", "numpy_tensor", "=", "numpy_tensor", ".", "astype", "(", "\"int32\"", ")", "if", "numpy_tensor", ".", "dtype", "==", "\"float64\"", ":", "numpy_tensor", "=", "numpy_tensor", ".", "astype", "(", "env", ".", "FLOATX", ")", "type_map", "=", "{", "0", ":", "\"scalar\"", ",", "1", ":", "\"vector\"", ",", "2", ":", "\"matrix\"", ",", "3", ":", "\"tensor3\"", ",", "4", ":", "\"tensor4\"", ",", "5", ":", "\"tensor5\"", ",", "}", "tensor_type", "=", "type_map", "[", "numpy_tensor", ".", "ndim", "]", "if", "numpy_tensor", ".", "ndim", "in", "type_map", "else", "type_map", "[", "0", "]", "if", "numpy_tensor", ".", "dtype", ".", "kind", "==", "\"i\"", ":", "tensor_type", "=", "\"i\"", "+", "tensor_type", "theano_tensor", "=", "getattr", "(", "TT", ",", "tensor_type", ")", "(", "\"input_{}_{}\"", ".", "format", "(", "i", "+", "1", ",", "tensor_type", ")", ")", "last_dim", "=", "numpy_tensor", ".", "shape", "[", "-", "1", "]", "var", "=", "NeuralVariable", "(", "theano_tensor", ",", "dim", "=", "last_dim", ")", "var", ".", "set_test_value", "(", "numpy_tensor", ")", "vars", ".", "append", "(", "var", ")", "return", "vars" ]
40.777778
14.5
def f_add_derived_parameter(self, *args, **kwargs): """Adds a derived parameter under the current group. Similar to :func:`~pypet.naturalnaming.ParameterGroup.f_add_parameter` Naming prefixes are added as in :func:`~pypet.naturalnaming.DerivedParameterGroup.f_add_derived_parameter_group` """ return self._nn_interface._add_generic(self, type_name=DERIVED_PARAMETER, group_type_name=DERIVED_PARAMETER_GROUP, args=args, kwargs=kwargs)
[ "def", "f_add_derived_parameter", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_nn_interface", ".", "_add_generic", "(", "self", ",", "type_name", "=", "DERIVED_PARAMETER", ",", "group_type_name", "=", "DERIVED_PARAMETER_GROUP", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")" ]
44.153846
26.846154
def get_current_version(repo_path): """ Given a repo will return the version string, according to semantic versioning, counting as non-backwards compatible commit any one with a message header that matches (case insensitive):: sem-ver: .*break.* And as features any commit with a header matching:: sem-ver: feature And counting any other as a bugfix """ repo = dulwich.repo.Repo(repo_path) tags = get_tags(repo) maj_version = 0 feat_version = 0 fix_version = 0 for commit_sha, children in reversed( get_children_per_first_parent(repo_path).items() ): commit = get_repo_object(repo, commit_sha) maj_version, feat_version, fix_version = get_version( commit=commit, tags=tags, maj_version=maj_version, feat_version=feat_version, fix_version=fix_version, children=children, ) return '%s.%s.%s' % (maj_version, feat_version, fix_version)
[ "def", "get_current_version", "(", "repo_path", ")", ":", "repo", "=", "dulwich", ".", "repo", ".", "Repo", "(", "repo_path", ")", "tags", "=", "get_tags", "(", "repo", ")", "maj_version", "=", "0", "feat_version", "=", "0", "fix_version", "=", "0", "for", "commit_sha", ",", "children", "in", "reversed", "(", "get_children_per_first_parent", "(", "repo_path", ")", ".", "items", "(", ")", ")", ":", "commit", "=", "get_repo_object", "(", "repo", ",", "commit_sha", ")", "maj_version", ",", "feat_version", ",", "fix_version", "=", "get_version", "(", "commit", "=", "commit", ",", "tags", "=", "tags", ",", "maj_version", "=", "maj_version", ",", "feat_version", "=", "feat_version", ",", "fix_version", "=", "fix_version", ",", "children", "=", "children", ",", ")", "return", "'%s.%s.%s'", "%", "(", "maj_version", ",", "feat_version", ",", "fix_version", ")" ]
29.088235
18.794118
def create_python_bundle(self, dirn, arch): """ Create a packaged python bundle in the target directory, by copying all the modules and standard library to the right place. """ # Todo: find a better way to find the build libs folder modules_build_dir = join( self.get_build_dir(arch.arch), 'android-build', 'build', 'lib.linux{}-{}-{}'.format( '2' if self.version[0] == '2' else '', arch.command_prefix.split('-')[0], self.major_minor_version_string )) # Compile to *.pyc/*.pyo the python modules self.compile_python_files(modules_build_dir) # Compile to *.pyc/*.pyo the standard python library self.compile_python_files(join(self.get_build_dir(arch.arch), 'Lib')) # Compile to *.pyc/*.pyo the other python packages (site-packages) self.compile_python_files(self.ctx.get_python_install_dir()) # Bundle compiled python modules to a folder modules_dir = join(dirn, 'modules') c_ext = self.compiled_extension ensure_dir(modules_dir) module_filens = (glob.glob(join(modules_build_dir, '*.so')) + glob.glob(join(modules_build_dir, '*' + c_ext))) info("Copy {} files into the bundle".format(len(module_filens))) for filen in module_filens: info(" - copy {}".format(filen)) copy2(filen, modules_dir) # zip up the standard library stdlib_zip = join(dirn, 'stdlib.zip') with current_directory(join(self.get_build_dir(arch.arch), 'Lib')): stdlib_filens = list(walk_valid_filens( '.', self.stdlib_dir_blacklist, self.stdlib_filen_blacklist)) info("Zip {} files into the bundle".format(len(stdlib_filens))) shprint(sh.zip, stdlib_zip, *stdlib_filens) # copy the site-packages into place ensure_dir(join(dirn, 'site-packages')) ensure_dir(self.ctx.get_python_install_dir()) # TODO: Improve the API around walking and copying the files with current_directory(self.ctx.get_python_install_dir()): filens = list(walk_valid_filens( '.', self.site_packages_dir_blacklist, self.site_packages_filen_blacklist)) info("Copy {} files into the site-packages".format(len(filens))) for filen in filens: info(" - copy {}".format(filen)) ensure_dir(join(dirn, 'site-packages', dirname(filen))) copy2(filen, join(dirn, 'site-packages', filen)) # copy the python .so files into place python_build_dir = join(self.get_build_dir(arch.arch), 'android-build') python_lib_name = 'libpython' + self.major_minor_version_string if self.major_minor_version_string[0] == '3': python_lib_name += 'm' shprint(sh.cp, join(python_build_dir, python_lib_name + '.so'), join(self.ctx.dist_dir, self.ctx.dist_name, 'libs', arch.arch)) info('Renaming .so files to reflect cross-compile') self.reduce_object_file_names(join(dirn, 'site-packages')) return join(dirn, 'site-packages')
[ "def", "create_python_bundle", "(", "self", ",", "dirn", ",", "arch", ")", ":", "# Todo: find a better way to find the build libs folder", "modules_build_dir", "=", "join", "(", "self", ".", "get_build_dir", "(", "arch", ".", "arch", ")", ",", "'android-build'", ",", "'build'", ",", "'lib.linux{}-{}-{}'", ".", "format", "(", "'2'", "if", "self", ".", "version", "[", "0", "]", "==", "'2'", "else", "''", ",", "arch", ".", "command_prefix", ".", "split", "(", "'-'", ")", "[", "0", "]", ",", "self", ".", "major_minor_version_string", ")", ")", "# Compile to *.pyc/*.pyo the python modules", "self", ".", "compile_python_files", "(", "modules_build_dir", ")", "# Compile to *.pyc/*.pyo the standard python library", "self", ".", "compile_python_files", "(", "join", "(", "self", ".", "get_build_dir", "(", "arch", ".", "arch", ")", ",", "'Lib'", ")", ")", "# Compile to *.pyc/*.pyo the other python packages (site-packages)", "self", ".", "compile_python_files", "(", "self", ".", "ctx", ".", "get_python_install_dir", "(", ")", ")", "# Bundle compiled python modules to a folder", "modules_dir", "=", "join", "(", "dirn", ",", "'modules'", ")", "c_ext", "=", "self", ".", "compiled_extension", "ensure_dir", "(", "modules_dir", ")", "module_filens", "=", "(", "glob", ".", "glob", "(", "join", "(", "modules_build_dir", ",", "'*.so'", ")", ")", "+", "glob", ".", "glob", "(", "join", "(", "modules_build_dir", ",", "'*'", "+", "c_ext", ")", ")", ")", "info", "(", "\"Copy {} files into the bundle\"", ".", "format", "(", "len", "(", "module_filens", ")", ")", ")", "for", "filen", "in", "module_filens", ":", "info", "(", "\" - copy {}\"", ".", "format", "(", "filen", ")", ")", "copy2", "(", "filen", ",", "modules_dir", ")", "# zip up the standard library", "stdlib_zip", "=", "join", "(", "dirn", ",", "'stdlib.zip'", ")", "with", "current_directory", "(", "join", "(", "self", ".", "get_build_dir", "(", "arch", ".", "arch", ")", ",", "'Lib'", ")", ")", ":", "stdlib_filens", "=", "list", "(", "walk_valid_filens", "(", "'.'", ",", "self", ".", "stdlib_dir_blacklist", ",", "self", ".", "stdlib_filen_blacklist", ")", ")", "info", "(", "\"Zip {} files into the bundle\"", ".", "format", "(", "len", "(", "stdlib_filens", ")", ")", ")", "shprint", "(", "sh", ".", "zip", ",", "stdlib_zip", ",", "*", "stdlib_filens", ")", "# copy the site-packages into place", "ensure_dir", "(", "join", "(", "dirn", ",", "'site-packages'", ")", ")", "ensure_dir", "(", "self", ".", "ctx", ".", "get_python_install_dir", "(", ")", ")", "# TODO: Improve the API around walking and copying the files", "with", "current_directory", "(", "self", ".", "ctx", ".", "get_python_install_dir", "(", ")", ")", ":", "filens", "=", "list", "(", "walk_valid_filens", "(", "'.'", ",", "self", ".", "site_packages_dir_blacklist", ",", "self", ".", "site_packages_filen_blacklist", ")", ")", "info", "(", "\"Copy {} files into the site-packages\"", ".", "format", "(", "len", "(", "filens", ")", ")", ")", "for", "filen", "in", "filens", ":", "info", "(", "\" - copy {}\"", ".", "format", "(", "filen", ")", ")", "ensure_dir", "(", "join", "(", "dirn", ",", "'site-packages'", ",", "dirname", "(", "filen", ")", ")", ")", "copy2", "(", "filen", ",", "join", "(", "dirn", ",", "'site-packages'", ",", "filen", ")", ")", "# copy the python .so files into place", "python_build_dir", "=", "join", "(", "self", ".", "get_build_dir", "(", "arch", ".", "arch", ")", ",", "'android-build'", ")", "python_lib_name", "=", "'libpython'", "+", "self", ".", "major_minor_version_string", "if", "self", ".", "major_minor_version_string", "[", "0", "]", "==", "'3'", ":", "python_lib_name", "+=", "'m'", "shprint", "(", "sh", ".", "cp", ",", "join", "(", "python_build_dir", ",", "python_lib_name", "+", "'.so'", ")", ",", "join", "(", "self", ".", "ctx", ".", "dist_dir", ",", "self", ".", "ctx", ".", "dist_name", ",", "'libs'", ",", "arch", ".", "arch", ")", ")", "info", "(", "'Renaming .so files to reflect cross-compile'", ")", "self", ".", "reduce_object_file_names", "(", "join", "(", "dirn", ",", "'site-packages'", ")", ")", "return", "join", "(", "dirn", ",", "'site-packages'", ")" ]
46.314286
18.771429
def canonic_signame(name_num): """Return a signal name for a signal name or signal number. Return None is name_num is an int but not a valid signal number and False if name_num is a not number. If name_num is a signal name or signal number, the canonic if name is returned.""" signum = lookup_signum(name_num) if signum is None: # Maybe signame is a number? try: num = int(name_num) signame = lookup_signame(num) if signame is None: return None except: return False return signame signame = name_num.upper() if not signame.startswith('SIG'): return 'SIG'+signame return signame
[ "def", "canonic_signame", "(", "name_num", ")", ":", "signum", "=", "lookup_signum", "(", "name_num", ")", "if", "signum", "is", "None", ":", "# Maybe signame is a number?", "try", ":", "num", "=", "int", "(", "name_num", ")", "signame", "=", "lookup_signame", "(", "num", ")", "if", "signame", "is", "None", ":", "return", "None", "except", ":", "return", "False", "return", "signame", "signame", "=", "name_num", ".", "upper", "(", ")", "if", "not", "signame", ".", "startswith", "(", "'SIG'", ")", ":", "return", "'SIG'", "+", "signame", "return", "signame" ]
34.6
15
def parse_host_port(host_port): """ Takes a string argument specifying host or host:port. Returns a (hostname, port) or (ip_address, port) tuple. If no port is given, the second (port) element of the returned tuple will be None. host:port argument, for example, is accepted in the forms of: - hostname - hostname:1234 - hostname.domain.tld - hostname.domain.tld:5678 - [1234::5]:5678 - 1234::5 - 10.11.12.13:4567 - 10.11.12.13 """ host, port = None, None # default _s_ = host_port[:] if _s_[0] == "[": if "]" in host_port: host, _s_ = _s_.lstrip("[").rsplit("]", 1) host = ipaddress.IPv6Address(host).compressed if _s_[0] == ":": port = int(_s_.lstrip(":")) else: if len(_s_) > 1: raise ValueError('found ambiguous "{}" port in "{}"'.format(_s_, host_port)) else: if _s_.count(":") == 1: host, _hostport_separator_, port = _s_.partition(":") try: port = int(port) except ValueError as _e_: log.error('host_port "%s" port value "%s" is not an integer.', host_port, port) raise _e_ else: host = _s_ try: if not isinstance(host, ipaddress._BaseAddress): host_ip = ipaddress.ip_address(host).compressed host = host_ip except ValueError: log.debug('"%s" Not an IP address? Assuming it is a hostname.', host) if host != sanitize_host(host): log.error('bad hostname: "%s"', host) raise ValueError('bad hostname: "{}"'.format(host)) return host, port
[ "def", "parse_host_port", "(", "host_port", ")", ":", "host", ",", "port", "=", "None", ",", "None", "# default", "_s_", "=", "host_port", "[", ":", "]", "if", "_s_", "[", "0", "]", "==", "\"[\"", ":", "if", "\"]\"", "in", "host_port", ":", "host", ",", "_s_", "=", "_s_", ".", "lstrip", "(", "\"[\"", ")", ".", "rsplit", "(", "\"]\"", ",", "1", ")", "host", "=", "ipaddress", ".", "IPv6Address", "(", "host", ")", ".", "compressed", "if", "_s_", "[", "0", "]", "==", "\":\"", ":", "port", "=", "int", "(", "_s_", ".", "lstrip", "(", "\":\"", ")", ")", "else", ":", "if", "len", "(", "_s_", ")", ">", "1", ":", "raise", "ValueError", "(", "'found ambiguous \"{}\" port in \"{}\"'", ".", "format", "(", "_s_", ",", "host_port", ")", ")", "else", ":", "if", "_s_", ".", "count", "(", "\":\"", ")", "==", "1", ":", "host", ",", "_hostport_separator_", ",", "port", "=", "_s_", ".", "partition", "(", "\":\"", ")", "try", ":", "port", "=", "int", "(", "port", ")", "except", "ValueError", "as", "_e_", ":", "log", ".", "error", "(", "'host_port \"%s\" port value \"%s\" is not an integer.'", ",", "host_port", ",", "port", ")", "raise", "_e_", "else", ":", "host", "=", "_s_", "try", ":", "if", "not", "isinstance", "(", "host", ",", "ipaddress", ".", "_BaseAddress", ")", ":", "host_ip", "=", "ipaddress", ".", "ip_address", "(", "host", ")", ".", "compressed", "host", "=", "host_ip", "except", "ValueError", ":", "log", ".", "debug", "(", "'\"%s\" Not an IP address? Assuming it is a hostname.'", ",", "host", ")", "if", "host", "!=", "sanitize_host", "(", "host", ")", ":", "log", ".", "error", "(", "'bad hostname: \"%s\"'", ",", "host", ")", "raise", "ValueError", "(", "'bad hostname: \"{}\"'", ".", "format", "(", "host", ")", ")", "return", "host", ",", "port" ]
33.8
20.12
def user_deleted_from_site_event(event): """ Remove deleted user from all the workspaces where he is a member """ userid = event.principal catalog = api.portal.get_tool('portal_catalog') query = {'object_provides': WORKSPACE_INTERFACE} query['workspace_members'] = userid workspaces = [ IWorkspace(b._unrestrictedGetObject()) for b in catalog.unrestrictedSearchResults(query) ] for workspace in workspaces: workspace.remove_from_team(userid)
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
32.866667
13.466667
def _fetch(self, statement, commit, max_attempts=5): """ Execute a SQL query and return a result. Recursively disconnect and reconnect to the database if an error occurs. """ if self._auto_reconnect: attempts = 0 while attempts < max_attempts: try: # Execute statement self._cursor.execute(statement) fetch = self._cursor.fetchall() rows = self._fetch_rows(fetch) if commit: self._commit() # Return a single item if the list only has one item return rows[0] if len(rows) == 1 else rows except Exception as e: if attempts >= max_attempts: raise e else: attempts += 1 self.reconnect() continue else: # Execute statement self._cursor.execute(statement) fetch = self._cursor.fetchall() rows = self._fetch_rows(fetch) if commit: self._commit() # Return a single item if the list only has one item return rows[0] if len(rows) == 1 else rows
[ "def", "_fetch", "(", "self", ",", "statement", ",", "commit", ",", "max_attempts", "=", "5", ")", ":", "if", "self", ".", "_auto_reconnect", ":", "attempts", "=", "0", "while", "attempts", "<", "max_attempts", ":", "try", ":", "# Execute statement", "self", ".", "_cursor", ".", "execute", "(", "statement", ")", "fetch", "=", "self", ".", "_cursor", ".", "fetchall", "(", ")", "rows", "=", "self", ".", "_fetch_rows", "(", "fetch", ")", "if", "commit", ":", "self", ".", "_commit", "(", ")", "# Return a single item if the list only has one item", "return", "rows", "[", "0", "]", "if", "len", "(", "rows", ")", "==", "1", "else", "rows", "except", "Exception", "as", "e", ":", "if", "attempts", ">=", "max_attempts", ":", "raise", "e", "else", ":", "attempts", "+=", "1", "self", ".", "reconnect", "(", ")", "continue", "else", ":", "# Execute statement", "self", ".", "_cursor", ".", "execute", "(", "statement", ")", "fetch", "=", "self", ".", "_cursor", ".", "fetchall", "(", ")", "rows", "=", "self", ".", "_fetch_rows", "(", "fetch", ")", "if", "commit", ":", "self", ".", "_commit", "(", ")", "# Return a single item if the list only has one item", "return", "rows", "[", "0", "]", "if", "len", "(", "rows", ")", "==", "1", "else", "rows" ]
35.486486
12.783784
def get_policy(self): """ Returns an instance of :attr:`~policy_class`. :return: An instance of the current policy class. :rtype: dockermap.map.policy.base.BasePolicy """ if not self._policy: self._policy = self.policy_class(self._maps, self._clients) return self._policy
[ "def", "get_policy", "(", "self", ")", ":", "if", "not", "self", ".", "_policy", ":", "self", ".", "_policy", "=", "self", ".", "policy_class", "(", "self", ".", "_maps", ",", "self", ".", "_clients", ")", "return", "self", ".", "_policy" ]
33.1
15.7
def _packet_manager(self): """ Watch packet list for timeouts. """ while True: if self._packets: with self._packet_lock: now = time.time() self._packets[:] = \ [packet for packet in self._packets if self._packet_timeout(packet, now)] # c.f. nyquist time.sleep(ACK_RESEND / 2)
[ "def", "_packet_manager", "(", "self", ")", ":", "while", "True", ":", "if", "self", ".", "_packets", ":", "with", "self", ".", "_packet_lock", ":", "now", "=", "time", ".", "time", "(", ")", "self", ".", "_packets", "[", ":", "]", "=", "[", "packet", "for", "packet", "in", "self", ".", "_packets", "if", "self", ".", "_packet_timeout", "(", "packet", ",", "now", ")", "]", "# c.f. nyquist", "time", ".", "sleep", "(", "ACK_RESEND", "/", "2", ")" ]
32.461538
14.384615
def register(coordinator): """Registers this module as a worker with the given coordinator.""" timer_queue = Queue.Queue() coordinator.register(TimerItem, timer_queue) coordinator.worker_threads.append( TimerThread(timer_queue, coordinator.input_queue))
[ "def", "register", "(", "coordinator", ")", ":", "timer_queue", "=", "Queue", ".", "Queue", "(", ")", "coordinator", ".", "register", "(", "TimerItem", ",", "timer_queue", ")", "coordinator", ".", "worker_threads", ".", "append", "(", "TimerThread", "(", "timer_queue", ",", "coordinator", ".", "input_queue", ")", ")" ]
45.333333
8.5
def update_environment(self, environment): """ Updates this channel's remote shell environment. .. note:: This operation is additive - i.e. the current environment is not reset before the given environment variables are set. .. warning:: Servers may silently reject some environment variables; see the warning in `set_environment_variable` for details. :param dict environment: a dictionary containing the name and respective values to set :raises: `.SSHException` -- if any of the environment variables was rejected by the server or the channel was closed """ for name, value in environment.items(): try: self.set_environment_variable(name, value) except SSHException as e: err = 'Failed to set environment variable "{}".' raise SSHException(err.format(name), e)
[ "def", "update_environment", "(", "self", ",", "environment", ")", ":", "for", "name", ",", "value", "in", "environment", ".", "items", "(", ")", ":", "try", ":", "self", ".", "set_environment_variable", "(", "name", ",", "value", ")", "except", "SSHException", "as", "e", ":", "err", "=", "'Failed to set environment variable \"{}\".'", "raise", "SSHException", "(", "err", ".", "format", "(", "name", ")", ",", "e", ")" ]
40.125
21.041667
def compose(self, *args, **kwargs): """ Generate a file from the current template and given arguments. Warning: Make certain to check the formatted editor for correctness! Args: args: Positional arguments to update the template kwargs: Keyword arguments to update the template Returns: editor: An editor containing the formatted template. """ linebreak = kwargs.pop("linebreak", "\n") # Update the internally stored args/kwargs from which formatting arguments come if len(args) > 0: self.args = args self._update(**kwargs) # Format string arguments (for the modified template) fkwargs = {} # Format string keyword arguments modtmpl = [] # The modified template lines #curpos = 0 # Positional argument counter #i = 0 for line in self: cline = copy(line) # If any special formatters exist, handle them for match in self._regex.findall(line): search = "[{}]".format("|".join(match)) name, indent, delim, qual, _ = match if indent != "": indent = " "*int(indent) delim = delim.replace("\\|", "|") # Collect and format the data accordingly data = getattr(self, name, None) # If no data exists, treat as optional if data is None: cline = cline.replace(search, "") continue elif delim.isdigit(): fkwargs[name] = getattr(self, "_fmt_"+name)() else: fkwargs[name] = linebreak.join([indent+k+delim+qual+v+qual for k, v in data.items()]) cline = cline.replace(search, "{"+name+"}") modtmpl.append(cline) modtmpl = "\n".join(modtmpl) print(modtmpl) dct = self.get_kwargs() dct.update(fkwargs) return self._constructor(textobj=modtmpl.format(*self.args, **dct))
[ "def", "compose", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "linebreak", "=", "kwargs", ".", "pop", "(", "\"linebreak\"", ",", "\"\\n\"", ")", "# Update the internally stored args/kwargs from which formatting arguments come\r", "if", "len", "(", "args", ")", ">", "0", ":", "self", ".", "args", "=", "args", "self", ".", "_update", "(", "*", "*", "kwargs", ")", "# Format string arguments (for the modified template)\r", "fkwargs", "=", "{", "}", "# Format string keyword arguments\r", "modtmpl", "=", "[", "]", "# The modified template lines\r", "#curpos = 0 # Positional argument counter\r", "#i = 0\r", "for", "line", "in", "self", ":", "cline", "=", "copy", "(", "line", ")", "# If any special formatters exist, handle them\r", "for", "match", "in", "self", ".", "_regex", ".", "findall", "(", "line", ")", ":", "search", "=", "\"[{}]\"", ".", "format", "(", "\"|\"", ".", "join", "(", "match", ")", ")", "name", ",", "indent", ",", "delim", ",", "qual", ",", "_", "=", "match", "if", "indent", "!=", "\"\"", ":", "indent", "=", "\" \"", "*", "int", "(", "indent", ")", "delim", "=", "delim", ".", "replace", "(", "\"\\\\|\"", ",", "\"|\"", ")", "# Collect and format the data accordingly\r", "data", "=", "getattr", "(", "self", ",", "name", ",", "None", ")", "# If no data exists, treat as optional\r", "if", "data", "is", "None", ":", "cline", "=", "cline", ".", "replace", "(", "search", ",", "\"\"", ")", "continue", "elif", "delim", ".", "isdigit", "(", ")", ":", "fkwargs", "[", "name", "]", "=", "getattr", "(", "self", ",", "\"_fmt_\"", "+", "name", ")", "(", ")", "else", ":", "fkwargs", "[", "name", "]", "=", "linebreak", ".", "join", "(", "[", "indent", "+", "k", "+", "delim", "+", "qual", "+", "v", "+", "qual", "for", "k", ",", "v", "in", "data", ".", "items", "(", ")", "]", ")", "cline", "=", "cline", ".", "replace", "(", "search", ",", "\"{\"", "+", "name", "+", "\"}\"", ")", "modtmpl", ".", "append", "(", "cline", ")", "modtmpl", "=", "\"\\n\"", ".", "join", "(", "modtmpl", ")", "print", "(", "modtmpl", ")", "dct", "=", "self", ".", "get_kwargs", "(", ")", "dct", ".", "update", "(", "fkwargs", ")", "return", "self", ".", "_constructor", "(", "textobj", "=", "modtmpl", ".", "format", "(", "*", "self", ".", "args", ",", "*", "*", "dct", ")", ")" ]
42.28
18.04
def fill_subparser(subparser): """Sets up a subparser to download audio of YouTube videos. Adds the compulsory `--youtube-id` flag. Parameters ---------- subparser : :class:`argparse.ArgumentParser` Subparser handling the `youtube_audio` command. """ subparser.add_argument( '--youtube-id', type=str, required=True, help=("The YouTube ID of the video from which to extract audio, " "usually an 11-character string.") ) return download
[ "def", "fill_subparser", "(", "subparser", ")", ":", "subparser", ".", "add_argument", "(", "'--youtube-id'", ",", "type", "=", "str", ",", "required", "=", "True", ",", "help", "=", "(", "\"The YouTube ID of the video from which to extract audio, \"", "\"usually an 11-character string.\"", ")", ")", "return", "download" ]
29.117647
19.235294
def predict(oracle, context, ab=None, verbose=False): """Single symbolic prediction given a context, an oracle and an alphabet. :param oracle: a learned vmo object from a symbolic sequence. :param context: the context precedes the predicted symbol :param ab: alphabet :param verbose: to show if the context if pruned or not :return: a probability distribution over the alphabet for the prediction. """ if verbose: print("original context: ", context) if ab is None: ab = oracle.get_alphabet() _b, _s, context = _test_context(oracle, context) _lrs = [oracle.lrs[k] for k in oracle.rsfx[_s]] context_state = [] while not context_state: for _i, _l in enumerate(_lrs): if _l >= len(context): context_state.append(oracle.rsfx[_s][_i]) if context_state: break else: context = context[1:] _b, _s = oracle.accept(context) _lrs = [oracle.lrs[k] for k in oracle.rsfx[_s]] if verbose: print("final context: ", context) print("context_state: ", context_state) d_count = len(ab) hist = [1.0] * len(ab) # initialize all histograms with 1s. trn_data = [oracle.data[n] for n in oracle.trn[_s]] for k in trn_data: hist[ab[k]] += 1.0 d_count += 1.0 for i in context_state: d_count, hist = _rsfx_count(oracle, i, d_count, hist, ab) return [hist[idx] / d_count for idx in range(len(hist))], context
[ "def", "predict", "(", "oracle", ",", "context", ",", "ab", "=", "None", ",", "verbose", "=", "False", ")", ":", "if", "verbose", ":", "print", "(", "\"original context: \"", ",", "context", ")", "if", "ab", "is", "None", ":", "ab", "=", "oracle", ".", "get_alphabet", "(", ")", "_b", ",", "_s", ",", "context", "=", "_test_context", "(", "oracle", ",", "context", ")", "_lrs", "=", "[", "oracle", ".", "lrs", "[", "k", "]", "for", "k", "in", "oracle", ".", "rsfx", "[", "_s", "]", "]", "context_state", "=", "[", "]", "while", "not", "context_state", ":", "for", "_i", ",", "_l", "in", "enumerate", "(", "_lrs", ")", ":", "if", "_l", ">=", "len", "(", "context", ")", ":", "context_state", ".", "append", "(", "oracle", ".", "rsfx", "[", "_s", "]", "[", "_i", "]", ")", "if", "context_state", ":", "break", "else", ":", "context", "=", "context", "[", "1", ":", "]", "_b", ",", "_s", "=", "oracle", ".", "accept", "(", "context", ")", "_lrs", "=", "[", "oracle", ".", "lrs", "[", "k", "]", "for", "k", "in", "oracle", ".", "rsfx", "[", "_s", "]", "]", "if", "verbose", ":", "print", "(", "\"final context: \"", ",", "context", ")", "print", "(", "\"context_state: \"", ",", "context_state", ")", "d_count", "=", "len", "(", "ab", ")", "hist", "=", "[", "1.0", "]", "*", "len", "(", "ab", ")", "# initialize all histograms with 1s.", "trn_data", "=", "[", "oracle", ".", "data", "[", "n", "]", "for", "n", "in", "oracle", ".", "trn", "[", "_s", "]", "]", "for", "k", "in", "trn_data", ":", "hist", "[", "ab", "[", "k", "]", "]", "+=", "1.0", "d_count", "+=", "1.0", "for", "i", "in", "context_state", ":", "d_count", ",", "hist", "=", "_rsfx_count", "(", "oracle", ",", "i", ",", "d_count", ",", "hist", ",", "ab", ")", "return", "[", "hist", "[", "idx", "]", "/", "d_count", "for", "idx", "in", "range", "(", "len", "(", "hist", ")", ")", "]", ",", "context" ]
35.214286
18.309524
def add_process(self, command=None, vsplit=False, start_directory=None): """ Add a new process to the current window. (vsplit/hsplit). """ assert command is None or isinstance(command, six.text_type) assert start_directory is None or isinstance(start_directory, six.text_type) window = self.arrangement.get_active_window() pane = self._create_pane(window, command, start_directory=start_directory) window.add_pane(pane, vsplit=vsplit) pane.focus() self.invalidate()
[ "def", "add_process", "(", "self", ",", "command", "=", "None", ",", "vsplit", "=", "False", ",", "start_directory", "=", "None", ")", ":", "assert", "command", "is", "None", "or", "isinstance", "(", "command", ",", "six", ".", "text_type", ")", "assert", "start_directory", "is", "None", "or", "isinstance", "(", "start_directory", ",", "six", ".", "text_type", ")", "window", "=", "self", ".", "arrangement", ".", "get_active_window", "(", ")", "pane", "=", "self", ".", "_create_pane", "(", "window", ",", "command", ",", "start_directory", "=", "start_directory", ")", "window", ".", "add_pane", "(", "pane", ",", "vsplit", "=", "vsplit", ")", "pane", ".", "focus", "(", ")", "self", ".", "invalidate", "(", ")" ]
41.153846
23.307692
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: ParticipantContext for this ParticipantInstance :rtype: twilio.rest.video.v1.room.room_participant.ParticipantContext """ if self._context is None: self._context = ParticipantContext( self._version, room_sid=self._solution['room_sid'], sid=self._solution['sid'], ) return self._context
[ "def", "_proxy", "(", "self", ")", ":", "if", "self", ".", "_context", "is", "None", ":", "self", ".", "_context", "=", "ParticipantContext", "(", "self", ".", "_version", ",", "room_sid", "=", "self", ".", "_solution", "[", "'room_sid'", "]", ",", "sid", "=", "self", ".", "_solution", "[", "'sid'", "]", ",", ")", "return", "self", ".", "_context" ]
39.333333
19.066667
def set_dependencies(ctx, archive_name, dependency=None): ''' Set the dependencies of an archive ''' _generate_api(ctx) kwargs = _parse_dependencies(dependency) var = ctx.obj.api.get_archive(archive_name) var.set_dependencies(dependencies=kwargs)
[ "def", "set_dependencies", "(", "ctx", ",", "archive_name", ",", "dependency", "=", "None", ")", ":", "_generate_api", "(", "ctx", ")", "kwargs", "=", "_parse_dependencies", "(", "dependency", ")", "var", "=", "ctx", ".", "obj", ".", "api", ".", "get_archive", "(", "archive_name", ")", "var", ".", "set_dependencies", "(", "dependencies", "=", "kwargs", ")" ]
24.272727
21.727273
def scale_to_vol(self, vol): """Scale ball to encompass a target volume.""" f = (vol / self.vol_ball) ** (1.0 / self.n) # linear factor self.expand *= f self.radius *= f self.vol_ball = vol
[ "def", "scale_to_vol", "(", "self", ",", "vol", ")", ":", "f", "=", "(", "vol", "/", "self", ".", "vol_ball", ")", "**", "(", "1.0", "/", "self", ".", "n", ")", "# linear factor", "self", ".", "expand", "*=", "f", "self", ".", "radius", "*=", "f", "self", ".", "vol_ball", "=", "vol" ]
32.142857
17.857143
def uniqueName(self, name): """UIParser.uniqueName(string) -> string Create a unique name from a string. >>> p = UIParser(QtCore, QtGui, QtWidgets) >>> p.uniqueName("foo") 'foo' >>> p.uniqueName("foo") 'foo1' """ try: suffix = self.name_suffixes[name] except KeyError: self.name_suffixes[name] = 0 return name suffix += 1 self.name_suffixes[name] = suffix return "%s%i" % (name, suffix)
[ "def", "uniqueName", "(", "self", ",", "name", ")", ":", "try", ":", "suffix", "=", "self", ".", "name_suffixes", "[", "name", "]", "except", "KeyError", ":", "self", ".", "name_suffixes", "[", "name", "]", "=", "0", "return", "name", "suffix", "+=", "1", "self", ".", "name_suffixes", "[", "name", "]", "=", "suffix", "return", "\"%s%i\"", "%", "(", "name", ",", "suffix", ")" ]
25.5
15.35
def sunionstore(self, destkey, key, *keys): """Add multiple sets and store the resulting set in a key.""" return self.execute(b'SUNIONSTORE', destkey, key, *keys)
[ "def", "sunionstore", "(", "self", ",", "destkey", ",", "key", ",", "*", "keys", ")", ":", "return", "self", ".", "execute", "(", "b'SUNIONSTORE'", ",", "destkey", ",", "key", ",", "*", "keys", ")" ]
58.666667
9
def _make_defaults_hazard_table(): """Build headers for a table related to hazard classes. :return: A table with headers. :rtype: m.Table """ table = m.Table(style_class='table table-condensed table-striped') row = m.Row() # first row is for colour - we dont use a header here as some tables # do not have colour... row.add(m.Cell(tr(''), header=True)) row.add(m.Cell(tr('Name'), header=True)) row.add(m.Cell(tr('Affected'), header=True)) row.add(m.Cell(tr('Fatality rate'), header=True)) row.add(m.Cell(tr('Displacement rate'), header=True)) row.add(m.Cell(tr('Default values'), header=True)) row.add(m.Cell(tr('Default min'), header=True)) row.add(m.Cell(tr('Default max'), header=True)) table.add(row) return table
[ "def", "_make_defaults_hazard_table", "(", ")", ":", "table", "=", "m", ".", "Table", "(", "style_class", "=", "'table table-condensed table-striped'", ")", "row", "=", "m", ".", "Row", "(", ")", "# first row is for colour - we dont use a header here as some tables", "# do not have colour...", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "''", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Name'", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Affected'", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Fatality rate'", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Displacement rate'", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Default values'", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Default min'", ")", ",", "header", "=", "True", ")", ")", "row", ".", "add", "(", "m", ".", "Cell", "(", "tr", "(", "'Default max'", ")", ",", "header", "=", "True", ")", ")", "table", ".", "add", "(", "row", ")", "return", "table" ]
38.55
14.75
def get_base_url(config, args): """ Get the API base url. Try Terraform state first, then :py:class:`~.AWSInfo`. :param config: configuration :type config: :py:class:`~.Config` :param args: command line arguments :type args: :py:class:`argparse.Namespace` :return: API base URL :rtype: str """ try: logger.debug('Trying to get Terraform base_url output') runner = TerraformRunner(config, args.tf_path) outputs = runner._get_outputs() base_url = outputs['base_url'] logger.debug("Terraform base_url output: '%s'", base_url) except Exception: logger.info('Unable to find API base_url from Terraform state; ' 'querying AWS.', exc_info=1) aws = AWSInfo(config) base_url = aws.get_api_base_url() logger.debug("AWS api_base_url: '%s'", base_url) if not base_url.endswith('/'): base_url += '/' return base_url
[ "def", "get_base_url", "(", "config", ",", "args", ")", ":", "try", ":", "logger", ".", "debug", "(", "'Trying to get Terraform base_url output'", ")", "runner", "=", "TerraformRunner", "(", "config", ",", "args", ".", "tf_path", ")", "outputs", "=", "runner", ".", "_get_outputs", "(", ")", "base_url", "=", "outputs", "[", "'base_url'", "]", "logger", ".", "debug", "(", "\"Terraform base_url output: '%s'\"", ",", "base_url", ")", "except", "Exception", ":", "logger", ".", "info", "(", "'Unable to find API base_url from Terraform state; '", "'querying AWS.'", ",", "exc_info", "=", "1", ")", "aws", "=", "AWSInfo", "(", "config", ")", "base_url", "=", "aws", ".", "get_api_base_url", "(", ")", "logger", ".", "debug", "(", "\"AWS api_base_url: '%s'\"", ",", "base_url", ")", "if", "not", "base_url", ".", "endswith", "(", "'/'", ")", ":", "base_url", "+=", "'/'", "return", "base_url" ]
34.555556
13.518519
def _ParseAttribute(self, file_object): """Parses a CUPS IPP attribute from a file-like object. Args: file_object (dfvfs.FileIO): file-like object. Returns: tuple[str, object]: attribute name and value. Raises: ParseError: if the attribute cannot be parsed. """ file_offset = file_object.tell() attribute_map = self._GetDataTypeMap('cups_ipp_attribute') try: attribute, _ = self._ReadStructureFromFileObject( file_object, file_offset, attribute_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError( 'Unable to parse attribute with error: {0!s}'.format(exception)) value = None if attribute.tag_value in self._INTEGER_TAG_VALUES: # TODO: correct file offset to point to the start of value_data. value = self._ParseIntegerValue(attribute.value_data, file_offset) elif attribute.tag_value == self._TAG_VALUE_BOOLEAN: value = self._ParseBooleanValue(attribute.value_data) elif attribute.tag_value == self._TAG_VALUE_DATE_TIME: # TODO: correct file offset to point to the start of value_data. value = self._ParseDateTimeValue(attribute.value_data, file_offset) elif attribute.tag_value in self._STRING_WITHOUT_LANGUAGE_VALUES: value = attribute.value_data.decode(self._last_charset_attribute) elif attribute.tag_value in self._ASCII_STRING_VALUES: value = attribute.value_data.decode('ascii') if attribute.tag_value == self._TAG_VALUE_CHARSET: self._last_charset_attribute = value else: value = attribute.value_data return attribute.name, value
[ "def", "_ParseAttribute", "(", "self", ",", "file_object", ")", ":", "file_offset", "=", "file_object", ".", "tell", "(", ")", "attribute_map", "=", "self", ".", "_GetDataTypeMap", "(", "'cups_ipp_attribute'", ")", "try", ":", "attribute", ",", "_", "=", "self", ".", "_ReadStructureFromFileObject", "(", "file_object", ",", "file_offset", ",", "attribute_map", ")", "except", "(", "ValueError", ",", "errors", ".", "ParseError", ")", "as", "exception", ":", "raise", "errors", ".", "ParseError", "(", "'Unable to parse attribute with error: {0!s}'", ".", "format", "(", "exception", ")", ")", "value", "=", "None", "if", "attribute", ".", "tag_value", "in", "self", ".", "_INTEGER_TAG_VALUES", ":", "# TODO: correct file offset to point to the start of value_data.", "value", "=", "self", ".", "_ParseIntegerValue", "(", "attribute", ".", "value_data", ",", "file_offset", ")", "elif", "attribute", ".", "tag_value", "==", "self", ".", "_TAG_VALUE_BOOLEAN", ":", "value", "=", "self", ".", "_ParseBooleanValue", "(", "attribute", ".", "value_data", ")", "elif", "attribute", ".", "tag_value", "==", "self", ".", "_TAG_VALUE_DATE_TIME", ":", "# TODO: correct file offset to point to the start of value_data.", "value", "=", "self", ".", "_ParseDateTimeValue", "(", "attribute", ".", "value_data", ",", "file_offset", ")", "elif", "attribute", ".", "tag_value", "in", "self", ".", "_STRING_WITHOUT_LANGUAGE_VALUES", ":", "value", "=", "attribute", ".", "value_data", ".", "decode", "(", "self", ".", "_last_charset_attribute", ")", "elif", "attribute", ".", "tag_value", "in", "self", ".", "_ASCII_STRING_VALUES", ":", "value", "=", "attribute", ".", "value_data", ".", "decode", "(", "'ascii'", ")", "if", "attribute", ".", "tag_value", "==", "self", ".", "_TAG_VALUE_CHARSET", ":", "self", ".", "_last_charset_attribute", "=", "value", "else", ":", "value", "=", "attribute", ".", "value_data", "return", "attribute", ".", "name", ",", "value" ]
34.340426
23.744681
def iter_events(self, number=-1, etag=None): """Iterate over public events. :param int number: (optional), number of events to return. Default: -1 returns all available events :param str etag: (optional), ETag from a previous request to the same endpoint :returns: generator of :class:`Event <github3.events.Event>`\ s """ url = self._build_url('events') return self._iter(int(number), url, Event, etag=etag)
[ "def", "iter_events", "(", "self", ",", "number", "=", "-", "1", ",", "etag", "=", "None", ")", ":", "url", "=", "self", ".", "_build_url", "(", "'events'", ")", "return", "self", ".", "_iter", "(", "int", "(", "number", ")", ",", "url", ",", "Event", ",", "etag", "=", "etag", ")" ]
43.545455
17.454545
def ExtractCredentialsFromPathSpec(self, path_spec): """Extracts credentials from a path specification. Args: path_spec (PathSpec): path specification to extract credentials from. """ credentials = manager.CredentialsManager.GetCredentials(path_spec) for identifier in credentials.CREDENTIALS: value = getattr(path_spec, identifier, None) if value is None: continue self.SetCredential(path_spec, identifier, value)
[ "def", "ExtractCredentialsFromPathSpec", "(", "self", ",", "path_spec", ")", ":", "credentials", "=", "manager", ".", "CredentialsManager", ".", "GetCredentials", "(", "path_spec", ")", "for", "identifier", "in", "credentials", ".", "CREDENTIALS", ":", "value", "=", "getattr", "(", "path_spec", ",", "identifier", ",", "None", ")", "if", "value", "is", "None", ":", "continue", "self", ".", "SetCredential", "(", "path_spec", ",", "identifier", ",", "value", ")" ]
35.076923
19.923077
def reformat_date(date, new_fmt='%Y-%m-%d'): """ Returns reformated date. :param date: The string date with this format %m/%d/%Y :type date: String :param new_fmt: date format string. Default is '%Y-%m-%d' :type date: String :returns: int :example: >>> reformat_date('05/1/2015', '%d/%m/%Y') '1/05/2015' """ try: if isinstance(date, datetime): return date.strftime(new_fmt) else: fmt = '%m/%d/%Y' return datetime.strptime(date, fmt).strftime(new_fmt) except ValueError: return date
[ "def", "reformat_date", "(", "date", ",", "new_fmt", "=", "'%Y-%m-%d'", ")", ":", "try", ":", "if", "isinstance", "(", "date", ",", "datetime", ")", ":", "return", "date", ".", "strftime", "(", "new_fmt", ")", "else", ":", "fmt", "=", "'%m/%d/%Y'", "return", "datetime", ".", "strptime", "(", "date", ",", "fmt", ")", ".", "strftime", "(", "new_fmt", ")", "except", "ValueError", ":", "return", "date" ]
22.740741
20.037037
def as_obj(func): """ A decorator used to return a JSON response with a dict representation of the model instance. It expects the decorated function to return a Model instance. It then converts the instance to dicts and serializes it into a json response Examples: >>> @app.route('/api/shipments/<id>') ... @as_obj ... def get_shipment(id): ... return Shipment.get(id) """ @wraps(func) def wrapper(*args, **kwargs): response = func(*args, **kwargs) return render_json_obj_with_requested_structure(response) return wrapper
[ "def", "as_obj", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "response", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "render_json_obj_with_requested_structure", "(", "response", ")", "return", "wrapper" ]
34.833333
17.555556
def number_of_bytes_to_modify(buf_len, fuzz_factor): """Calculate number of bytes to modify. :param buf_len: len of data buffer to fuzz. :param fuzz_factor: degree of fuzzing. :return: number of bytes to change. """ return random.randrange(math.ceil((float(buf_len) / fuzz_factor))) + 1
[ "def", "number_of_bytes_to_modify", "(", "buf_len", ",", "fuzz_factor", ")", ":", "return", "random", ".", "randrange", "(", "math", ".", "ceil", "(", "(", "float", "(", "buf_len", ")", "/", "fuzz_factor", ")", ")", ")", "+", "1" ]
38
12
def __merge_nearest_successors(self, node): """! @brief Find nearest sucessors and merge them. @param[in] node (non_leaf_node): Node whose two nearest successors should be merged. @return (bool): True if merging has been successfully performed, otherwise False. """ merging_result = False; if (node.successors[0].type == cfnode_type.CFNODE_NONLEAF): [nearest_child_node1, nearest_child_node2] = node.get_nearest_successors(self.__type_measurement); if (len(nearest_child_node1.successors) + len(nearest_child_node2.successors) <= self.__branch_factor): node.successors.remove(nearest_child_node2); if (nearest_child_node2.type == cfnode_type.CFNODE_LEAF): self.__leafes.remove(nearest_child_node2); nearest_child_node1.merge(nearest_child_node2); merging_result = True; return merging_result;
[ "def", "__merge_nearest_successors", "(", "self", ",", "node", ")", ":", "merging_result", "=", "False", "if", "(", "node", ".", "successors", "[", "0", "]", ".", "type", "==", "cfnode_type", ".", "CFNODE_NONLEAF", ")", ":", "[", "nearest_child_node1", ",", "nearest_child_node2", "]", "=", "node", ".", "get_nearest_successors", "(", "self", ".", "__type_measurement", ")", "if", "(", "len", "(", "nearest_child_node1", ".", "successors", ")", "+", "len", "(", "nearest_child_node2", ".", "successors", ")", "<=", "self", ".", "__branch_factor", ")", ":", "node", ".", "successors", ".", "remove", "(", "nearest_child_node2", ")", "if", "(", "nearest_child_node2", ".", "type", "==", "cfnode_type", ".", "CFNODE_LEAF", ")", ":", "self", ".", "__leafes", ".", "remove", "(", "nearest_child_node2", ")", "nearest_child_node1", ".", "merge", "(", "nearest_child_node2", ")", "merging_result", "=", "True", "return", "merging_result" ]
42.6
27.04
def foreach_model(self, fn): """Apply the given function to each model replica in each worker. Returns: List of results from applying the function. """ results = ray.get([w.foreach_model.remote(fn) for w in self.workers]) out = [] for r in results: out.extend(r) return out
[ "def", "foreach_model", "(", "self", ",", "fn", ")", ":", "results", "=", "ray", ".", "get", "(", "[", "w", ".", "foreach_model", ".", "remote", "(", "fn", ")", "for", "w", "in", "self", ".", "workers", "]", ")", "out", "=", "[", "]", "for", "r", "in", "results", ":", "out", ".", "extend", "(", "r", ")", "return", "out" ]
28.666667
20.333333
def get(img, cache_dir=CACHE_DIR, iterative=False): """Validate image input.""" if os.path.isfile(img): wal_img = img elif os.path.isdir(img): if iterative: wal_img = get_next_image(img) else: wal_img = get_random_image(img) else: logging.error("No valid image file found.") sys.exit(1) wal_img = os.path.abspath(wal_img) # Cache the image file path. util.save_file(wal_img, os.path.join(cache_dir, "wal")) logging.info("Using image \033[1;37m%s\033[0m.", os.path.basename(wal_img)) return wal_img
[ "def", "get", "(", "img", ",", "cache_dir", "=", "CACHE_DIR", ",", "iterative", "=", "False", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "img", ")", ":", "wal_img", "=", "img", "elif", "os", ".", "path", ".", "isdir", "(", "img", ")", ":", "if", "iterative", ":", "wal_img", "=", "get_next_image", "(", "img", ")", "else", ":", "wal_img", "=", "get_random_image", "(", "img", ")", "else", ":", "logging", ".", "error", "(", "\"No valid image file found.\"", ")", "sys", ".", "exit", "(", "1", ")", "wal_img", "=", "os", ".", "path", ".", "abspath", "(", "wal_img", ")", "# Cache the image file path.", "util", ".", "save_file", "(", "wal_img", ",", "os", ".", "path", ".", "join", "(", "cache_dir", ",", "\"wal\"", ")", ")", "logging", ".", "info", "(", "\"Using image \\033[1;37m%s\\033[0m.\"", ",", "os", ".", "path", ".", "basename", "(", "wal_img", ")", ")", "return", "wal_img" ]
25.26087
21.652174
def copy_resource(self, container, resource, local_filename): """ Identical to :meth:`dockermap.client.base.DockerClientWrapper.copy_resource` with additional logging. """ self.push_log("Receiving tarball for resource '{0}:{1}' and storing as {2}".format(container, resource, local_filename)) super(DockerFabricClient, self).copy_resource(container, resource, local_filename)
[ "def", "copy_resource", "(", "self", ",", "container", ",", "resource", ",", "local_filename", ")", ":", "self", ".", "push_log", "(", "\"Receiving tarball for resource '{0}:{1}' and storing as {2}\"", ".", "format", "(", "container", ",", "resource", ",", "local_filename", ")", ")", "super", "(", "DockerFabricClient", ",", "self", ")", ".", "copy_resource", "(", "container", ",", "resource", ",", "local_filename", ")" ]
68.333333
38
def save(self, *args, **kwargs): """ Before saving, if slide is for a publication, use publication info for slide's title, subtitle, description. """ if self.publication: publication = self.publication if not self.title: self.title = publication.title if not self.subtitle: first_author = publication.first_author if first_author == publication.last_author: authors = first_author else: authors = '{} et al.'.format(first_author) self.subtitle = '{}, {} ({})'.format(authors, publication.journal, publication.year) if not self.description: self.description = publication.abstract if self.publication.year and not self.pk: delta = timezone.now() - self.publish_datetime if self.publish_datetime <= timezone.now() and delta.days == 0: self.publish_datetime = datetime.datetime( year=int(self.publication.year), month=int(self.publication.month or 1), day=int(self.publication.day or 1), ) super().save(*args, **kwargs)
[ "def", "save", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "publication", ":", "publication", "=", "self", ".", "publication", "if", "not", "self", ".", "title", ":", "self", ".", "title", "=", "publication", ".", "title", "if", "not", "self", ".", "subtitle", ":", "first_author", "=", "publication", ".", "first_author", "if", "first_author", "==", "publication", ".", "last_author", ":", "authors", "=", "first_author", "else", ":", "authors", "=", "'{} et al.'", ".", "format", "(", "first_author", ")", "self", ".", "subtitle", "=", "'{}, {} ({})'", ".", "format", "(", "authors", ",", "publication", ".", "journal", ",", "publication", ".", "year", ")", "if", "not", "self", ".", "description", ":", "self", ".", "description", "=", "publication", ".", "abstract", "if", "self", ".", "publication", ".", "year", "and", "not", "self", ".", "pk", ":", "delta", "=", "timezone", ".", "now", "(", ")", "-", "self", ".", "publish_datetime", "if", "self", ".", "publish_datetime", "<=", "timezone", ".", "now", "(", ")", "and", "delta", ".", "days", "==", "0", ":", "self", ".", "publish_datetime", "=", "datetime", ".", "datetime", "(", "year", "=", "int", "(", "self", ".", "publication", ".", "year", ")", ",", "month", "=", "int", "(", "self", ".", "publication", ".", "month", "or", "1", ")", ",", "day", "=", "int", "(", "self", ".", "publication", ".", "day", "or", "1", ")", ",", ")", "super", "(", ")", ".", "save", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
36.027778
19.972222
def _check_multi_statement_line(self, node, line): """Check for lines containing multiple statements.""" # Do not warn about multiple nested context managers # in with statements. if isinstance(node, nodes.With): return # For try... except... finally..., the two nodes # appear to be on the same line due to how the AST is built. if isinstance(node, nodes.TryExcept) and isinstance( node.parent, nodes.TryFinally ): return if ( isinstance(node.parent, nodes.If) and not node.parent.orelse and self.config.single_line_if_stmt ): return if ( isinstance(node.parent, nodes.ClassDef) and len(node.parent.body) == 1 and self.config.single_line_class_stmt ): return self.add_message("multiple-statements", node=node) self._visited_lines[line] = 2
[ "def", "_check_multi_statement_line", "(", "self", ",", "node", ",", "line", ")", ":", "# Do not warn about multiple nested context managers", "# in with statements.", "if", "isinstance", "(", "node", ",", "nodes", ".", "With", ")", ":", "return", "# For try... except... finally..., the two nodes", "# appear to be on the same line due to how the AST is built.", "if", "isinstance", "(", "node", ",", "nodes", ".", "TryExcept", ")", "and", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "TryFinally", ")", ":", "return", "if", "(", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "If", ")", "and", "not", "node", ".", "parent", ".", "orelse", "and", "self", ".", "config", ".", "single_line_if_stmt", ")", ":", "return", "if", "(", "isinstance", "(", "node", ".", "parent", ",", "nodes", ".", "ClassDef", ")", "and", "len", "(", "node", ".", "parent", ".", "body", ")", "==", "1", "and", "self", ".", "config", ".", "single_line_class_stmt", ")", ":", "return", "self", ".", "add_message", "(", "\"multiple-statements\"", ",", "node", "=", "node", ")", "self", ".", "_visited_lines", "[", "line", "]", "=", "2" ]
36.884615
15.307692
def _create_model_matrices(self): """ Creates model matrices/vectors Returns ---------- None (changes model attributes) """ self.model_Y = self.data self.model_scores = np.zeros((self.X.shape[1], self.model_Y.shape[0]+1))
[ "def", "_create_model_matrices", "(", "self", ")", ":", "self", ".", "model_Y", "=", "self", ".", "data", "self", ".", "model_scores", "=", "np", ".", "zeros", "(", "(", "self", ".", "X", ".", "shape", "[", "1", "]", ",", "self", ".", "model_Y", ".", "shape", "[", "0", "]", "+", "1", ")", ")" ]
27
18.3
def status(self): """ Check if the daemon is currently running. Requires procfs, so it will only work on POSIX compliant OS'. """ # Get the pid from the pidfile try: pf = file(self.pidfile,'r') pid = int(pf.read().strip()) pf.close() except IOError: pid = None if not pid: return False try: return os.path.exists("/proc/{0}".format(pid)) except OSError: return False
[ "def", "status", "(", "self", ")", ":", "# Get the pid from the pidfile", "try", ":", "pf", "=", "file", "(", "self", ".", "pidfile", ",", "'r'", ")", "pid", "=", "int", "(", "pf", ".", "read", "(", ")", ".", "strip", "(", ")", ")", "pf", ".", "close", "(", ")", "except", "IOError", ":", "pid", "=", "None", "if", "not", "pid", ":", "return", "False", "try", ":", "return", "os", ".", "path", ".", "exists", "(", "\"/proc/{0}\"", ".", "format", "(", "pid", ")", ")", "except", "OSError", ":", "return", "False" ]
19.5
20.5
def _record_extension(self, key, value): """ To structure a record extension property bean """ record_bean = { 'value': value, 'displayName': self._text_bean(key), 'description': self._text_bean(key), 'displayLabel': self._text_bean(key), 'associatedId': str(self.ident) } return record_bean
[ "def", "_record_extension", "(", "self", ",", "key", ",", "value", ")", ":", "record_bean", "=", "{", "'value'", ":", "value", ",", "'displayName'", ":", "self", ".", "_text_bean", "(", "key", ")", ",", "'description'", ":", "self", ".", "_text_bean", "(", "key", ")", ",", "'displayLabel'", ":", "self", ".", "_text_bean", "(", "key", ")", ",", "'associatedId'", ":", "str", "(", "self", ".", "ident", ")", "}", "return", "record_bean" ]
32.333333
9.666667
def _make_metadata_request(self, meta_id, metadata_type=None): """ Get the Metadata. The Session initializes with 'COMPACT-DECODED' as the format type. If that returns a DTD error then we change to the 'STANDARD-XML' format and try again. :param meta_id: The name of the resource, class, or lookup to get metadata for :param metadata_type: The RETS metadata type :return: list """ # If this metadata _request has already happened, returned the saved result. key = '{0!s}:{1!s}'.format(metadata_type, meta_id) if key in self.metadata_responses and self.cache_metadata: response = self.metadata_responses[key] else: response = self._request( capability='GetMetadata', options={ 'query': { 'Type': metadata_type, 'ID': meta_id, 'Format': self.metadata_format } } ) self.metadata_responses[key] = response if self.metadata_format == 'COMPACT-DECODED': parser = CompactMetadata() else: parser = StandardXMLetadata() try: return parser.parse(response=response, metadata_type=metadata_type) except RETSException as e: # Remove response from cache self.metadata_responses.pop(key, None) # If the server responds with an invalid parameter for COMPACT-DECODED, try STANDARD-XML if self.metadata_format != 'STANDARD-XML' and e.reply_code in ['20513', '20514']: self.metadata_responses.pop(key, None) self.metadata_format = 'STANDARD-XML' return self._make_metadata_request(meta_id=meta_id, metadata_type=metadata_type) raise RETSException(e.reply_text, e.reply_code)
[ "def", "_make_metadata_request", "(", "self", ",", "meta_id", ",", "metadata_type", "=", "None", ")", ":", "# If this metadata _request has already happened, returned the saved result.", "key", "=", "'{0!s}:{1!s}'", ".", "format", "(", "metadata_type", ",", "meta_id", ")", "if", "key", "in", "self", ".", "metadata_responses", "and", "self", ".", "cache_metadata", ":", "response", "=", "self", ".", "metadata_responses", "[", "key", "]", "else", ":", "response", "=", "self", ".", "_request", "(", "capability", "=", "'GetMetadata'", ",", "options", "=", "{", "'query'", ":", "{", "'Type'", ":", "metadata_type", ",", "'ID'", ":", "meta_id", ",", "'Format'", ":", "self", ".", "metadata_format", "}", "}", ")", "self", ".", "metadata_responses", "[", "key", "]", "=", "response", "if", "self", ".", "metadata_format", "==", "'COMPACT-DECODED'", ":", "parser", "=", "CompactMetadata", "(", ")", "else", ":", "parser", "=", "StandardXMLetadata", "(", ")", "try", ":", "return", "parser", ".", "parse", "(", "response", "=", "response", ",", "metadata_type", "=", "metadata_type", ")", "except", "RETSException", "as", "e", ":", "# Remove response from cache", "self", ".", "metadata_responses", ".", "pop", "(", "key", ",", "None", ")", "# If the server responds with an invalid parameter for COMPACT-DECODED, try STANDARD-XML", "if", "self", ".", "metadata_format", "!=", "'STANDARD-XML'", "and", "e", ".", "reply_code", "in", "[", "'20513'", ",", "'20514'", "]", ":", "self", ".", "metadata_responses", ".", "pop", "(", "key", ",", "None", ")", "self", ".", "metadata_format", "=", "'STANDARD-XML'", "return", "self", ".", "_make_metadata_request", "(", "meta_id", "=", "meta_id", ",", "metadata_type", "=", "metadata_type", ")", "raise", "RETSException", "(", "e", ".", "reply_text", ",", "e", ".", "reply_code", ")" ]
44.97619
21.97619
def get_vertex_string(self, i): """Return a string based on the atom number""" number = self.numbers[i] if number == 0: return Graph.get_vertex_string(self, i) else: # pad with zeros to make sure that string sort is identical to number sort return "%03i" % number
[ "def", "get_vertex_string", "(", "self", ",", "i", ")", ":", "number", "=", "self", ".", "numbers", "[", "i", "]", "if", "number", "==", "0", ":", "return", "Graph", ".", "get_vertex_string", "(", "self", ",", "i", ")", "else", ":", "# pad with zeros to make sure that string sort is identical to number sort", "return", "\"%03i\"", "%", "number" ]
40.5
15.5
def _set_LED(self, status): """ _set_LED: boolean -> None Sets the status of the remote LED """ # DIO pin 1 (LED), active low self.hw.remote_at( dest_addr=self.remote_addr, command='D1', parameter='\x04' if status else '\x05')
[ "def", "_set_LED", "(", "self", ",", "status", ")", ":", "# DIO pin 1 (LED), active low", "self", ".", "hw", ".", "remote_at", "(", "dest_addr", "=", "self", ".", "remote_addr", ",", "command", "=", "'D1'", ",", "parameter", "=", "'\\x04'", "if", "status", "else", "'\\x05'", ")" ]
27.363636
9.545455
def union(self, other, recursive=True, overwrite=False): """ Recursively compute union of data. For dictionaries, items for specific keys will be combined into a list, depending on the status of the overwrite= parameter. For lists, items will be appended and reduced to unique items. This method is meant to be analogous to set.union for composite objects. Args: other (composite): Other composite object to union with. recursive (bool): Whether or not to perform the operation recursively, for all nested composite objects. overwrite (bool): Whether or not to overwrite entries with the same key in a nested dictionary. """ if not isinstance(other, composite): raise AssertionError('Cannot union composite and {} types'.format(type(other))) if self.meta_type != other.meta_type: return composite([self, other]) if self.meta_type == 'list': keep = [] for item in self._list: keep.append(item) for item in other._list: if item not in self._list: keep.append(item) return composite(keep) elif self.meta_type == 'dict': keep = {} for key in list(set(list(self._dict.keys()) + list(other._dict.keys()))): left = self._dict.get(key) right = other._dict.get(key) if recursive and \ isinstance(left, composite) and \ isinstance(right, composite): keep[key] = left.union(right, recursive=recursive, overwrite=overwrite) elif left == right: keep[key] = left elif left is None: keep[key] = right elif right is None: keep[key] = left elif overwrite: keep[key] = right else: keep[key] = composite([left, right]) return composite(keep) return
[ "def", "union", "(", "self", ",", "other", ",", "recursive", "=", "True", ",", "overwrite", "=", "False", ")", ":", "if", "not", "isinstance", "(", "other", ",", "composite", ")", ":", "raise", "AssertionError", "(", "'Cannot union composite and {} types'", ".", "format", "(", "type", "(", "other", ")", ")", ")", "if", "self", ".", "meta_type", "!=", "other", ".", "meta_type", ":", "return", "composite", "(", "[", "self", ",", "other", "]", ")", "if", "self", ".", "meta_type", "==", "'list'", ":", "keep", "=", "[", "]", "for", "item", "in", "self", ".", "_list", ":", "keep", ".", "append", "(", "item", ")", "for", "item", "in", "other", ".", "_list", ":", "if", "item", "not", "in", "self", ".", "_list", ":", "keep", ".", "append", "(", "item", ")", "return", "composite", "(", "keep", ")", "elif", "self", ".", "meta_type", "==", "'dict'", ":", "keep", "=", "{", "}", "for", "key", "in", "list", "(", "set", "(", "list", "(", "self", ".", "_dict", ".", "keys", "(", ")", ")", "+", "list", "(", "other", ".", "_dict", ".", "keys", "(", ")", ")", ")", ")", ":", "left", "=", "self", ".", "_dict", ".", "get", "(", "key", ")", "right", "=", "other", ".", "_dict", ".", "get", "(", "key", ")", "if", "recursive", "and", "isinstance", "(", "left", ",", "composite", ")", "and", "isinstance", "(", "right", ",", "composite", ")", ":", "keep", "[", "key", "]", "=", "left", ".", "union", "(", "right", ",", "recursive", "=", "recursive", ",", "overwrite", "=", "overwrite", ")", "elif", "left", "==", "right", ":", "keep", "[", "key", "]", "=", "left", "elif", "left", "is", "None", ":", "keep", "[", "key", "]", "=", "right", "elif", "right", "is", "None", ":", "keep", "[", "key", "]", "=", "left", "elif", "overwrite", ":", "keep", "[", "key", "]", "=", "right", "else", ":", "keep", "[", "key", "]", "=", "composite", "(", "[", "left", ",", "right", "]", ")", "return", "composite", "(", "keep", ")", "return" ]
42.2
15.52
def started(name=None, user=None, group=None, chroot=None, caps=None, no_caps=False, pidfile=None, enable_core=False, fd_limit=None, verbose=False, debug=False, trace=False, yydebug=False, persist_file=None, control=None, worker_threads=None, *args, **kwargs): ''' Ensures, that syslog-ng is started via the given parameters. Users shouldn't use this function, if the service module is available on their system. ''' return __salt__['syslog_ng.start'](name=name, user=user, group=group, chroot=chroot, caps=caps, no_caps=no_caps, pidfile=pidfile, enable_core=enable_core, fd_limit=fd_limit, verbose=verbose, debug=debug, trace=trace, yydebug=yydebug, persist_file=persist_file, control=control, worker_threads=worker_threads)
[ "def", "started", "(", "name", "=", "None", ",", "user", "=", "None", ",", "group", "=", "None", ",", "chroot", "=", "None", ",", "caps", "=", "None", ",", "no_caps", "=", "False", ",", "pidfile", "=", "None", ",", "enable_core", "=", "False", ",", "fd_limit", "=", "None", ",", "verbose", "=", "False", ",", "debug", "=", "False", ",", "trace", "=", "False", ",", "yydebug", "=", "False", ",", "persist_file", "=", "None", ",", "control", "=", "None", ",", "worker_threads", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "__salt__", "[", "'syslog_ng.start'", "]", "(", "name", "=", "name", ",", "user", "=", "user", ",", "group", "=", "group", ",", "chroot", "=", "chroot", ",", "caps", "=", "caps", ",", "no_caps", "=", "no_caps", ",", "pidfile", "=", "pidfile", ",", "enable_core", "=", "enable_core", ",", "fd_limit", "=", "fd_limit", ",", "verbose", "=", "verbose", ",", "debug", "=", "debug", ",", "trace", "=", "trace", ",", "yydebug", "=", "yydebug", ",", "persist_file", "=", "persist_file", ",", "control", "=", "control", ",", "worker_threads", "=", "worker_threads", ")" ]
37.5
17.6
def where(self, cond, value, other=None, subset=None, **kwargs): """ Apply a function elementwise, updating the HTML representation with a style which is selected in accordance with the return value of a function. .. versionadded:: 0.21.0 Parameters ---------- cond : callable ``cond`` should take a scalar and return a boolean value : str applied when ``cond`` returns true other : str applied when ``cond`` returns false subset : IndexSlice a valid indexer to limit ``data`` to *before* applying the function. Consider using a pandas.IndexSlice kwargs : dict pass along to ``cond`` Returns ------- self : Styler See Also -------- Styler.applymap """ if other is None: other = '' return self.applymap(lambda val: value if cond(val) else other, subset=subset, **kwargs)
[ "def", "where", "(", "self", ",", "cond", ",", "value", ",", "other", "=", "None", ",", "subset", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "other", "is", "None", ":", "other", "=", "''", "return", "self", ".", "applymap", "(", "lambda", "val", ":", "value", "if", "cond", "(", "val", ")", "else", "other", ",", "subset", "=", "subset", ",", "*", "*", "kwargs", ")" ]
28.361111
20.861111
def _sector_erase_program_double_buffer(self, progress_cb=_stub_progress): """! @brief Double-buffered program by performing sector erases.""" actual_sector_erase_count = 0 actual_sector_erase_weight = 0 progress = 0 progress_cb(0.0) # Fill in same flag for all pages. This is done up front so we're not trying # to read from flash while simultaneously programming it. progress = self._scan_pages_for_same(progress_cb) # Erase all sectors up front. self.flash.init(self.flash.Operation.ERASE) for sector in self.sector_list: if sector.are_any_pages_not_same(): # Erase the sector self.flash.erase_sector(sector.addr) # Update progress progress += sector.erase_weight if self.sector_erase_weight > 0: progress_cb(float(progress) / float(self.sector_erase_weight)) self.flash.uninit() # Set up page and buffer info. current_buf = 0 next_buf = 1 page, i = self._next_nonsame_page(0) # Make sure there are actually pages to program differently from current flash contents. if page is not None: self.flash.init(self.flash.Operation.PROGRAM) # Load first page buffer self.flash.load_page_buffer(current_buf, page.addr, page.data) while page is not None: assert page.same is not None # Kick off this page program. current_addr = page.addr current_weight = page.get_program_weight() self.flash.start_program_page_with_buffer(current_buf, current_addr) actual_sector_erase_count += 1 actual_sector_erase_weight += page.get_program_weight() # Get next page and load it. page, i = self._next_nonsame_page(i) if page is not None: self.flash.load_page_buffer(next_buf, page.addr, page.data) # Wait for the program to complete. result = self.flash.wait_for_completion() if result != 0: raise FlashProgramFailure('program_page(0x%x) error: %i' % (current_addr, result), current_addr, result) # Swap buffers. current_buf, next_buf = next_buf, current_buf # Update progress progress += current_weight if self.sector_erase_weight > 0: progress_cb(float(progress) / float(self.sector_erase_weight)) self.flash.uninit() progress_cb(1.0) LOG.debug("Estimated sector erase programmed page count: %i", self.sector_erase_count) LOG.debug("Actual sector erase programmed page count: %i", actual_sector_erase_count) return FlashBuilder.FLASH_SECTOR_ERASE
[ "def", "_sector_erase_program_double_buffer", "(", "self", ",", "progress_cb", "=", "_stub_progress", ")", ":", "actual_sector_erase_count", "=", "0", "actual_sector_erase_weight", "=", "0", "progress", "=", "0", "progress_cb", "(", "0.0", ")", "# Fill in same flag for all pages. This is done up front so we're not trying", "# to read from flash while simultaneously programming it.", "progress", "=", "self", ".", "_scan_pages_for_same", "(", "progress_cb", ")", "# Erase all sectors up front.", "self", ".", "flash", ".", "init", "(", "self", ".", "flash", ".", "Operation", ".", "ERASE", ")", "for", "sector", "in", "self", ".", "sector_list", ":", "if", "sector", ".", "are_any_pages_not_same", "(", ")", ":", "# Erase the sector", "self", ".", "flash", ".", "erase_sector", "(", "sector", ".", "addr", ")", "# Update progress", "progress", "+=", "sector", ".", "erase_weight", "if", "self", ".", "sector_erase_weight", ">", "0", ":", "progress_cb", "(", "float", "(", "progress", ")", "/", "float", "(", "self", ".", "sector_erase_weight", ")", ")", "self", ".", "flash", ".", "uninit", "(", ")", "# Set up page and buffer info.", "current_buf", "=", "0", "next_buf", "=", "1", "page", ",", "i", "=", "self", ".", "_next_nonsame_page", "(", "0", ")", "# Make sure there are actually pages to program differently from current flash contents.", "if", "page", "is", "not", "None", ":", "self", ".", "flash", ".", "init", "(", "self", ".", "flash", ".", "Operation", ".", "PROGRAM", ")", "# Load first page buffer", "self", ".", "flash", ".", "load_page_buffer", "(", "current_buf", ",", "page", ".", "addr", ",", "page", ".", "data", ")", "while", "page", "is", "not", "None", ":", "assert", "page", ".", "same", "is", "not", "None", "# Kick off this page program.", "current_addr", "=", "page", ".", "addr", "current_weight", "=", "page", ".", "get_program_weight", "(", ")", "self", ".", "flash", ".", "start_program_page_with_buffer", "(", "current_buf", ",", "current_addr", ")", "actual_sector_erase_count", "+=", "1", "actual_sector_erase_weight", "+=", "page", ".", "get_program_weight", "(", ")", "# Get next page and load it.", "page", ",", "i", "=", "self", ".", "_next_nonsame_page", "(", "i", ")", "if", "page", "is", "not", "None", ":", "self", ".", "flash", ".", "load_page_buffer", "(", "next_buf", ",", "page", ".", "addr", ",", "page", ".", "data", ")", "# Wait for the program to complete.", "result", "=", "self", ".", "flash", ".", "wait_for_completion", "(", ")", "if", "result", "!=", "0", ":", "raise", "FlashProgramFailure", "(", "'program_page(0x%x) error: %i'", "%", "(", "current_addr", ",", "result", ")", ",", "current_addr", ",", "result", ")", "# Swap buffers.", "current_buf", ",", "next_buf", "=", "next_buf", ",", "current_buf", "# Update progress", "progress", "+=", "current_weight", "if", "self", ".", "sector_erase_weight", ">", "0", ":", "progress_cb", "(", "float", "(", "progress", ")", "/", "float", "(", "self", ".", "sector_erase_weight", ")", ")", "self", ".", "flash", ".", "uninit", "(", ")", "progress_cb", "(", "1.0", ")", "LOG", ".", "debug", "(", "\"Estimated sector erase programmed page count: %i\"", ",", "self", ".", "sector_erase_count", ")", "LOG", ".", "debug", "(", "\"Actual sector erase programmed page count: %i\"", ",", "actual_sector_erase_count", ")", "return", "FlashBuilder", ".", "FLASH_SECTOR_ERASE" ]
38.736842
21.881579
def get_account_user(self, account_id, user_id, **kwargs): # noqa: E501 """Details of the user. # noqa: E501 An endpoint for retrieving details of the user. **Example usage:** `curl https://api.us-east-1.mbedcloud.com/v3/accounts/{accountID}/users/{userID} -H 'Authorization: Bearer API_KEY'` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.get_account_user(account_id, user_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str account_id: Account ID. (required) :param str user_id: The ID of the user to be retrieved. (required) :return: UserInfoResp If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.get_account_user_with_http_info(account_id, user_id, **kwargs) # noqa: E501 else: (data) = self.get_account_user_with_http_info(account_id, user_id, **kwargs) # noqa: E501 return data
[ "def", "get_account_user", "(", "self", ",", "account_id", ",", "user_id", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'asynchronous'", ")", ":", "return", "self", ".", "get_account_user_with_http_info", "(", "account_id", ",", "user_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "get_account_user_with_http_info", "(", "account_id", ",", "user_id", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
54.5
29.409091