repo
stringlengths
1
29
path
stringlengths
24
332
code
stringlengths
39
579k
overpython
overpython//overpython.pyfile:/overpython.py:function:isAccesible/isAccesible
def isAccesible(path, mode='r'): """ overpython.isAccessible(path, mode='r') Check if the file or directory at `path` can be accessed by the program using `mode` open flags. """ try: f = open(path, mode) f.close() except IOError: return False return True
twisted
twisted//internet/interfaces.pyclass:IHostnameResolver/resolveHostName
def resolveHostName(resolutionReceiver, hostName, portNumber=0, addressTypes=None, transportSemantics='TCP'): """ Initiate a hostname resolution. @param resolutionReceiver: an object that will receive each resolved address as it arrives. @type resolutionReceiver: L{IResolutionReceiver} @param hostName: The name of the host to resolve. If this contains non-ASCII code points, they will be converted to IDNA first. @type hostName: L{unicode} @param portNumber: The port number that the returned addresses should include. @type portNumber: L{int} greater than or equal to 0 and less than 65536 @param addressTypes: An iterable of implementors of L{IAddress} that are acceptable values for C{resolutionReceiver} to receive to its L{addressResolved <IResolutionReceiver.addressResolved>}. In practice, this means an iterable containing L{twisted.internet.address.IPv4Address}, L{twisted.internet.address.IPv6Address}, both, or neither. @type addressTypes: L{collections.abc.Iterable} of L{type} @param transportSemantics: A string describing the semantics of the transport; either C{'TCP'} for stream-oriented transports or C{'UDP'} for datagram-oriented; see L{twisted.internet.address.IPv6Address.type} and L{twisted.internet.address.IPv4Address.type}. @type transportSemantics: native L{str} @return: The resolution in progress. @rtype: L{IResolutionReceiver} """
ftw.geo-1.4.2
ftw.geo-1.4.2//ftw/geo/interfaces.pyclass:IGeocodableLocation/getLocationString
def getLocationString(): """Build a geocodable location string from the content type's location related fields. """
streamlit
streamlit//env_util.pyfile:/env_util.py:function:is_executable_in_path/is_executable_in_path
def is_executable_in_path(name): """Check if executable is in OS path.""" from distutils.spawn import find_executable return find_executable(name) is not None
digsandpaper
digsandpaper//coarse/preprocess/constraint_remap_factory.pyclass:ConstraintReMapSimilarity/subtract_lists
@staticmethod def subtract_lists(similar_articles, blacklisted_articles): """ Helper function to perform a set subtract between the list of articles returned by faiss and blacklisted articles for the given IFP by users :param similar_articles: list of similar articles to the query as returned by FAISS :param blacklisted_articles: blacklisted articles for this query as set by users on SAGE :return: similar_articles - blacklisted_articles """ return list(set(similar_articles) - set(blacklisted_articles))
byteArk-0.0.12
byteArk-0.0.12//byteArk/urllib3/util/ssl_.pyfile:/byteArk/urllib3/util/ssl_.py:function:_const_compare_digest_backport/_const_compare_digest_backport
def _const_compare_digest_backport(a, b): """ Compare two digests of equal length in constant time. The digests must be of type str/bytes. Returns True if the digests match, and False otherwise. """ result = abs(len(a) - len(b)) for l, r in zip(bytearray(a), bytearray(b)): result |= l ^ r return result == 0
fake-bpy-module-2.79-20200428
fake-bpy-module-2.79-20200428//bpy/ops/curve.pyfile:/bpy/ops/curve.py:function:vertex_add/vertex_add
def vertex_add(location: float=(0.0, 0.0, 0.0)): """Add a new control point (linked to only selected end-curve one, if any) :param location: Location, Location to add new vertex at :type location: float """ pass
pyrolite
pyrolite//ext/alphamelts/automation.pyfile:/ext/alphamelts/automation.py:function:_enqueue_output/_enqueue_output
def _enqueue_output(out, queue): """ Send output to a queue. Parameters ----------- out Readable output object. queue : :class:`queue.Queue` Queue to send ouptut to. """ for line in iter(out.readline, b''): queue.put(line) out.close()
photovoltaic
photovoltaic//semi.pyfile:/semi.py:function:equilibrium_carrier/equilibrium_carrier
def equilibrium_carrier(N, ni=8600000000.0): """Return the majority and minority carrier concentrations (cm-3) of a semiconductor at equilibrium where N is the doping level (cm-3) and ni is the intrinsic carrier concentratoin (cm-3) Strictly N and ni just have to be in the same units but (cm-3) is almost always used.""" majority = N minority = N / ni ** 2 return majority, minority
forgi-2.0.2
forgi-2.0.2//forgi/threedee/model/_ensemble2.pyfile:/forgi/threedee/model/_ensemble2.py:function:is_stationary_adf/is_stationary_adf
def is_stationary_adf(y): """ Whether a time series y is stationary or not. Uses the Augmented Dickey-Fuller Test. :param y: A 1D np.array :returns: BOOLEAN """ import statsmodels.tsa.stattools as smtools adf = smtools.adfuller(y) return adf[0] < adf[4]['5%']
jadlog-0.1.0
jadlog-0.1.0//jadlog/calcula.pyfile:/jadlog/calcula.py:function:peso_real/peso_real
def peso_real(largura, altura, profundidade, modalidade, peso): """ Função que retorna o peso de cubagem para calculo de frete via Jadlog # Modalidades de Frete # Modalidade 0 = Expresso # Modalidade 4 = Rodoviário :param largura: Largura em centímetros :param altura: Altura em centímetros :param profundidade: Profundidade em centímetros :param modalidade: Modalidade (0 - Expresso / 4 - Rodoviário) :param peso: Em Kg (Ex.: 27) :return: Peso real de cubagem para Jadlog """ _peso_cubagem = 0 if modalidade == 0: _peso_cubagem = largura * altura * profundidade / 6000 if modalidade == 4: _peso_cubagem = largura * altura * profundidade / 3333 if _peso_cubagem > peso: _peso_real = _peso_cubagem else: _peso_real = peso return _peso_real
markovchain
markovchain//image/traversal.pyclass:Spiral/_rspiral
@staticmethod def _rspiral(width, height): """Reversed spiral generator. Parameters ---------- width : `int` Spiral width. height : `int` Spiral height. Returns ------- `generator` of (`int`, `int`) Points. """ x0 = 0 y0 = 0 x1 = width - 1 y1 = height - 1 while x0 < x1 and y0 < y1: for x in range(x0, x1): yield x, y0 for y in range(y0, y1): yield x1, y for x in range(x1, x0, -1): yield x, y1 for y in range(y1, y0, -1): yield x0, y x0 += 1 y0 += 1 x1 -= 1 y1 -= 1 if x0 == x1: for y in range(y0, y1 + 1): yield x0, y elif y0 == y1: for x in range(x0, x1 + 1): yield x, y0
fabric2-2.5.0
fabric2-2.5.0//fabric2/config.pyclass:Config/from_v1
@classmethod def from_v1(cls, env, **kwargs): """ Alternate constructor which uses Fabric 1's ``env`` dict for settings. All keyword arguments besides ``env`` are passed unmolested into the primary constructor, with the exception of ``overrides``, which is used internally & will end up resembling the data from ``env`` with the user-supplied overrides on top. .. warning:: Because your own config overrides will win over data from ``env``, make sure you only set values you *intend* to change from your v1 environment! For details on exactly which ``env`` vars are imported and what they become in the new API, please see :ref:`v1-env-var-imports`. :param env: An explicit Fabric 1 ``env`` dict (technically, any ``fabric.utils._AttributeDict`` instance should work) to pull configuration from. .. versionadded:: 2.4 """ data = kwargs.pop('overrides', {}) for subdict in ('connect_kwargs', 'run', 'sudo', 'timeouts'): data.setdefault(subdict, {}) data['run'].setdefault('pty', env.always_use_pty) data.setdefault('gateway', env.gateway) data.setdefault('forward_agent', env.forward_agent) if env.key_filename is not None: data['connect_kwargs'].setdefault('key_filename', env.key_filename) data['connect_kwargs'].setdefault('allow_agent', not env.no_agent) data.setdefault('ssh_config_path', env.ssh_config_path) data['sudo'].setdefault('password', env.sudo_password) passwd = env.password data['connect_kwargs'].setdefault('password', passwd) if not data['sudo']['password']: data['sudo']['password'] = passwd data['sudo'].setdefault('prompt', env.sudo_prompt) data['timeouts'].setdefault('connect', env.timeout) data.setdefault('load_ssh_configs', env.use_ssh_config) data['run'].setdefault('warn', env.warn_only) kwargs['overrides'] = data return cls(**kwargs)
pyboto3-1.4.4
pyboto3-1.4.4//pyboto3/lexmodelbuildingservice.pyfile:/pyboto3/lexmodelbuildingservice.py:function:delete_utterances/delete_utterances
def delete_utterances(botName=None, userId=None): """ Deletes stored utterances. Amazon Lex stores the utterances that users send to your bot unless the childDirected field in the bot is set to true . Utterances are stored for 15 days for use with the operation, and then stored indefinately for use in improving the ability of your bot to respond to user input. Use the DeleteStoredUtterances operation to manually delete stored utterances for a specific user. This operation requires permissions for the lex:DeleteUtterances action. See also: AWS API Documentation :example: response = client.delete_utterances( botName='string', userId='string' ) :type botName: string :param botName: [REQUIRED] The name of the bot that stored the utterances. :type userId: string :param userId: [REQUIRED] The unique identifier for the user that made the utterances. This is the user ID that was sent in the or operation request that contained the utterance. """ pass
treesapp
treesapp//HMMER_domainTblParser.pyfile:/HMMER_domainTblParser.py:function:overlap_length/overlap_length
def overlap_length(r_i: int, r_j: int, q_i: int, q_j: int) ->int: """ Returns the number of positions the query (base) alignment overlaps with the reference (projected) alignment :param q_i: query start position :param q_j: query end position :param r_i: reference start position :param r_j: reference end position :return: Number of positions the two alignments overlap """ if r_j < q_i or q_i > r_j: return 0 else: return min(r_j, q_j) - max(r_i, q_i)
xamcheck_utils-0.0.13
xamcheck_utils-0.0.13//src/xamcheck_utils/conversions.pyfile:/src/xamcheck_utils/conversions.py:function:string2int/string2int
def string2int(s): """ Returns integer value if number is passed as string. >>> string2int('111.111') == 111 True """ return int(float(s))
gcspath
gcspath//api.pyclass:GCSPath/cwd
@classmethod def cwd(cls): """ cwd class method is unsupported on GCS service GCS don't have this file system action concept """ message = cls._NOT_SUPPORTED_MESSAGE.format(method=cls.cwd.__qualname__) raise NotImplementedError(message)
selinonlib
selinonlib//migrations/tainted_flow_strategy.pyclass:TaintedFlowStrategy/get_option_names
@classmethod def get_option_names(cls): """Get names of all options.""" return list(map(lambda x: x.name, cls.get_options()))
lemoncheesecake-1.4.8
lemoncheesecake-1.4.8//lemoncheesecake/matching/matcher.pyclass:MatchResult/success
@classmethod def success(cls, description=None): """ Shortcut used to create a "successful" MatchResult. """ return cls(True, description)
colorclass-2.2.0
colorclass-2.2.0//colorclass/color.pyclass:Color/bgwhite
@classmethod def bgwhite(cls, string, auto=False): """Color-code entire string. :param str string: String to colorize. :param bool auto: Enable auto-color (dark/light terminal). :return: Class instance for colorized string. :rtype: Color """ return cls.colorize('bgwhite', string, auto=auto)
amazon.ion-0.6.0
amazon.ion-0.6.0//amazon/ion/util.pyclass:_EnumMetaClass/__getitem__
def __getitem__(cls, name): """Looks up an enumeration value field by integer value.""" return cls._enum_members[name]
asn1tools
asn1tools//codecs/per.pyfile:/codecs/per.py:function:size_as_number_of_bytes/size_as_number_of_bytes
def size_as_number_of_bytes(size): """Returns the minimum number of bytes needed to fit given positive integer. """ if size == 0: return 1 else: number_of_bits = size.bit_length() rest = number_of_bits % 8 if rest != 0: number_of_bits += 8 - rest return number_of_bits // 8
glimix-core-3.1.8
glimix-core-3.1.8//glimix_core/_util/random.pyfile:/glimix_core/_util/random.py:function:multivariate_normal/multivariate_normal
def multivariate_normal(random, mean, cov): """ Draw random samples from a multivariate normal distribution. Parameters ---------- random : np.random.RandomState instance Random state. mean : array_like Mean of the n-dimensional distribution. cov : array_like Covariance matrix of the distribution. It must be symmetric and positive-definite for proper sampling. Returns ------- out : ndarray The drawn sample. """ from numpy.linalg import cholesky L = cholesky(cov) return L @ random.randn(L.shape[0]) + mean
fake-bpy-module-2.78-20200428
fake-bpy-module-2.78-20200428//bpy/ops/particle.pyfile:/bpy/ops/particle.py:function:mirror/mirror
def mirror(): """Duplicate and mirror the selected particles along the local X axis """ pass
pdkit
pdkit//utils.pyfile:/utils.py:function:non_zero_row/non_zero_row
def non_zero_row(arr): """ 0. Empty row returns False. >>> arr = array([]) >>> non_zero_row(arr) False 1. Row with a zero returns False. >>> arr = array([1, 4, 3, 0, 5, -1, -2]) >>> non_zero_row(arr) False 2. Row with no zeros returns True. >>> arr = array([-1, -0.1, 0.001, 2]) >>> non_zero_row(arr) True :param arr: array :type arr: numpy array :return empty: If row is completely free of zeros :rtype empty: bool """ if len(arr) == 0: return False for item in arr: if item == 0: return False return True
regraph
regraph//rules.pyfile:/rules.py:function:invert_rule_hierarchy/invert_rule_hierarchy
def invert_rule_hierarchy(rule_hierarchy): """Get inverted rule hierarchy (swapped lhs and rhs).""" new_rule_hierarchy = {'rules': {}, 'rule_homomorphisms': {}} for graph, rule in rule_hierarchy['rules'].items(): new_rule_hierarchy['rules'][graph] = rule.get_inverted_rule() for (source, target), (lhs_h, p_h, rhs_h) in rule_hierarchy[ 'rule_homomorphisms'].items(): new_rule_hierarchy['rule_homomorphisms'][source, target ] = rhs_h, p_h, lhs_h return new_rule_hierarchy
kivish-0.2.1
kivish-0.2.1//kivish/node/primary_nodes_locator/main.pyfile:/kivish/node/primary_nodes_locator/main.py:function:count_extra_node_height/count_extra_node_height
def count_extra_node_height(code): """Multiline nodes take more than a single line of code.""" height = code.count('\n') if height > 0: height += 1 return height
thompson
thompson//_version.pyfile:/_version.py:function:render_pep440_old/render_pep440_old
def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance'] or pieces['dirty']: rendered += '.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' else: rendered = '0.post%d' % pieces['distance'] if pieces['dirty']: rendered += '.dev0' return rendered
PyBrain2-0.4.0
PyBrain2-0.4.0//pybrain2/datasets/classification.pyclass:ClassificationDataSet/load_matlab
@classmethod def load_matlab(cls, fname): """Create a dataset by reading a Matlab file containing one variable called 'data' which is an array of nSamples * nFeatures + 1 and contains the class in the first column.""" from mlabwrap import mlab d = mlab.load(fname) return cls(d.data[:, (0)], d.data[:, 1:])
Cnc25D-0.1.10
Cnc25D-0.1.10//cnc25d/gearring.pyfile:/cnc25d/gearring.py:function:gearring_self_test/gearring_self_test
def gearring_self_test(): """ This is the non-regression test of gearring. Look at the simulation Tk window to check errors. """ r_tests = [['simplest test', '--gear_tooth_nb 25 --gear_module 10 --holder_diameter 300.0 --cnc_router_bit_radius 2.0' ], ['no tooth', '--gear_tooth_nb 0 --gear_primitive_diameter 100.0 --holder_diameter 120.0 --cnc_router_bit_radius 2.0 --holder_crenel_number 7' ], ['no holder-hole', '--gear_tooth_nb 30 --gear_module 10 --holder_diameter 360.0 --holder_crenel_width 20.0 --holder_crenel_skin_width 20.0 --cnc_router_bit_radius 2.0 --holder_hole_diameter 0.0' ], ['no crenel', '--gear_tooth_nb 29 --gear_module 10 --holder_diameter 340.0 --holder_crenel_width 20.0 --holder_crenel_number 0' ], ['marked holder-hole', '--gear_tooth_nb 33 --gear_module 10 --holder_crenel_number 8 --holder_hole_mark_nb 1 --holder_hole_diameter 14.0 --holder_crenel_position 10.0' ], ['double hole only', '--gear_tooth_nb 37 --gear_module 10 --holder_crenel_number 8 --holder_double_hole_mark_nb 2 --holder_double_hole_diameter 6.0 --holder_double_hole_length 12.0 --holder_hole_diameter 0.0' ], ['single and double hole', '--gear_tooth_nb 37 --gear_module 10 --holder_crenel_number 8 --holder_double_hole_mark_nb 3 --holder_double_hole_diameter 4.0 --holder_double_hole_length 20.0 --holder_hole_diameter 8.0 --holder_double_hole_position 4.0' ], ['small crenel', '--gear_tooth_nb 30 --gear_module 10 --holder_diameter 360.0 --holder_crenel_width 20.0 --holder_crenel_number 1 --holder_hole_diameter 0.0 --holder_crenel_position 0.0 --holder_crenel_height 5.0' ], ['narrow crenel', '--gear_tooth_nb 30 --gear_module 10 --holder_diameter 360.0 --holder_crenel_width 20.0 --holder_crenel_number 4 --holder_position_angle 0.785 --holder_hole_diameter 0.0 --holder_crenel_position 0.0 --holder_crenel_height 5.0' ], ['crenel-B', '--gear_tooth_nb 51 --gear_module 1.0 --holder_diameter 59.0 --holder_crenel_width 2.0 --holder_crenel_number 6 --holder_hole_diameter 4.1 --holder_hole_B_diameter 1.1 --holder_crenel_position 30.5 --holder_crenel_height 0.5 --holder_crenel_position 3.5 --holder_crenel_B_position 1.0 --holder_hole_B_crenel_list 2 5 --cnc_router_bit_radius 0.05 --gear_router_bit_radius 0.05 --holder_crenel_router_bit_radius 0.1 --holder_crenel_skin_width 3.0' ], ['output dxf', '--gear_tooth_nb 30 --gear_module 10 --holder_diameter 360.0 --holder_crenel_width 20.0 --holder_crenel_number 2 --holder_position_angle 0.785 --holder_hole_diameter 0.0 --holder_crenel_position 0.0 --holder_crenel_height 5.0 --output_file_basename test_output/gearring_self_test.dxf' ], ['last test', '--gear_tooth_nb 30 --gear_module 10.0 --holder_diameter 340.0']] return r_tests
perfmetrics-3.0.0
perfmetrics-3.0.0//src/perfmetrics/interfaces.pyclass:IStatsdClient/close
def close(): """ Release resources (sockets) held by this object. .. versionadded:: 3.0 """
thelper-0.4.7
thelper-0.4.7//thelper/utils.pyfile:/thelper/utils.py:function:str2bool/str2bool
def str2bool(s): """Converts a string to a boolean. If the lower case version of the provided string matches any of 'true', '1', or 'yes', then the function returns ``True``. """ if isinstance(s, bool): return s if isinstance(s, (int, float)): return s != 0 if isinstance(s, str): positive_flags = ['true', '1', 'yes'] return s.lower() in positive_flags raise AssertionError('unrecognized input type')
octavia-5.0.1
octavia-5.0.1//octavia/common/jinja/haproxy/combined_listeners/jinja_cfg.pyclass:JinjaTemplater/_transform_member
@staticmethod def _transform_member(member, feature_compatibility): """Transforms a member into an object that will be processed by the templating system """ return {'id': member.id, 'address': member.ip_address, 'protocol_port': member.protocol_port, 'weight': member.weight, 'enabled': member. enabled, 'subnet_id': member.subnet_id, 'operating_status': member. operating_status, 'monitor_address': member.monitor_address, 'monitor_port': member.monitor_port, 'backup': member.backup}
sympy
sympy//integrals/risch.pyfile:/integrals/risch.py:function:frac_in/frac_in
def frac_in(f, t, **kwargs): """ Returns the tuple (fa, fd), where fa and fd are Polys in t. This is a common idiom in the Risch Algorithm functions, so we abstract it out here. f should be a basic expression, a Poly, or a tuple (fa, fd), where fa and fd are either basic expressions or Polys, and f == fa/fd. **kwargs are applied to Poly. """ cancel = kwargs.pop('cancel', False) if type(f) is tuple: fa, fd = f f = fa.as_expr() / fd.as_expr() fa, fd = f.as_expr().as_numer_denom() fa, fd = fa.as_poly(t, **kwargs), fd.as_poly(t, **kwargs) if cancel: fa, fd = fa.cancel(fd, include=True) if fa is None or fd is None: raise ValueError('Could not turn %s into a fraction in %s.' % (f, t)) return fa, fd
tornado-rest-framework-1.0.9
tornado-rest-framework-1.0.9//rest_framework/filters/filters.pyclass:Filter/get_join_fields
@staticmethod def get_join_fields(qs): """ 获得进行查询的model对象的join对象字段 :return: """ join_model_fields = {} for join_models in qs._joins.values(): for jm in join_models: dest_meta = jm.dest._meta dest_name = dest_meta.name for f in dest_meta.sorted_fields: join_model_fields['%s.%s' % (dest_name, f.name)] = f return join_model_fields
alchemy_mock
alchemy_mock//utils.pyfile:/utils.py:function:copy_and_update/copy_and_update
def copy_and_update(target, updater): """ Copy dictionary and update it all in one operation For example:: >>> a = {'foo': 'bar'} >>> b = copy_and_update(a, {1: 2}) >>> a is b False >>> b == {'foo': 'bar', 1: 2} True """ result = target.copy() result.update(updater) return result
cis_interface
cis_interface//schema.pyfile:/schema.py:function:clear_schema/clear_schema
def clear_schema(): """Clear global schema.""" global _schema _schema = None
mcuuidButWorks
mcuuidButWorks//tools.pyfile:/tools.py:function:is_valid_minecraft_username/is_valid_minecraft_username
def is_valid_minecraft_username(username): """https://help.mojang.com/customer/portal/articles/928638-minecraft-usernames""" allowed_chars = 'abcdefghijklmnopqrstuvwxyz1234567890_' allowed_len = [3, 16] username = username.lower() if len(username) < allowed_len[0] or len(username) > allowed_len[1]: return False for char in username: if char not in allowed_chars: return False return True
autonomie_base-4.3.1
autonomie_base-4.3.1//autonomie_base/utils/ascii.pyfile:/autonomie_base/utils/ascii.py:function:force_unicode/force_unicode
def force_unicode(value): """ return an utf-8 unicode entry """ if isinstance(value, str): value = value.decode('utf-8') return value
pycorn-0.19
pycorn-0.19//examplescripts/pycorn-bin.pyfile:/examplescripts/pycorn-bin.py:function:expander/expander
def expander(min_val, max_val, perc): """ expand -/+ direction of two values by a percentage of their delta """ delta = abs(max_val - min_val) x = delta * perc return min_val - x, max_val + x
wmcore-1.1.19.2
wmcore-1.1.19.2//src/python/WMCore/WMSpec/WMWorkloadTools.pyfile:/src/python/WMCore/WMSpec/WMWorkloadTools.py:function:parsePileupConfig/parsePileupConfig
def parsePileupConfig(mcPileup, dataPileup): """ _parsePileupConfig_ If the pileup config is defined as MCPileup and DataPileup then make sure we get the usual dictionary as PileupConfig : {'mc': ['/mc_pd/procds/tier'], 'data': ['/data_pd/procds/tier']} """ pileUpConfig = {} if mcPileup is not None: pileUpConfig['mc'] = [mcPileup] if dataPileup is not None: pileUpConfig['data'] = [dataPileup] return pileUpConfig
stheno-0.3.4
stheno-0.3.4//stheno/kernel.pyfile:/stheno/kernel.py:function:expand/expand
def expand(xs): """Expand a sequence to the same element repeated twice if there is only one element. Args: xs (sequence): Sequence to expand. Returns: object: `xs * 2` or `xs`. """ return xs * 2 if len(xs) == 1 else xs
biosteam-2.17.0
biosteam-2.17.0//biosteam/_heat_utility.pyclass:HeatUtility/get_suitable_heating_agent
@classmethod def get_suitable_heating_agent(cls, T_pinch): """ Return a heating agent that works at the pinch temperature. Parameters ---------- T_pinch : float Pinch temperature [K]. """ for agent in cls.heating_agents: if T_pinch < agent.T: return agent raise RuntimeError(f'no heating agent that can heat over {T_pinch} K')
lookout
lookout//style/typos/research/dev_utils.pyfile:/style/typos/research/dev_utils.py:function:correction_score/correction_score
def correction_score(typos, corrections): """ Equal to score_at_k(typos, corrections, 1). """ assert typos.shape[0] == corrections.shape[0] scores = {'tp': 0, 'fp': 0, 'tn': 0, 'fn': 0} for id in typos.index: if typos.loc[id, 'corrupted']: if corrections[id] == typos.loc[id, 'identifier']: scores['tp'] += 1 else: scores['fn'] += 1 elif corrections[id] == typos.loc[id, 'identifier']: scores['tn'] += 1 else: scores['fp'] += 1 return scores
django-arrange-0.0.3.3
django-arrange-0.0.3.3//arrange/utils.pyfile:/arrange/utils.py:function:resolve_labels/resolve_labels
def resolve_labels(cls): """ Returns app, model, app_model, module_app and module_app_model labels for provided class. XXX: There has to be a better way to do this. """ labels = {} labels['module_app'] = cls.__module__.replace('.models', '') labels['app'] = '.'.join(labels['module_app'].split('.')[1:]) labels['model'] = cls._meta.object_name labels['app_model'] = '%s.%s' % (labels['app'], labels['model']) labels['module_app_model'] = '%s.%s' % (labels['module_app'], labels[ 'model']) return labels
pandoc-1.0.2
pandoc-1.0.2//.lib/setuptools/command/alias.pyfile:/.lib/setuptools/command/alias.py:function:shquote/shquote
def shquote(arg): """Quote an argument for later parsing by shlex.split()""" for c in ('"', "'", '\\', '#'): if c in arg: return repr(arg) if arg.split() != [arg]: return repr(arg) return arg
cis_interface-0.7.10
cis_interface-0.7.10//cis_interface/communication/FileComm.pyclass:FileComm/new_comm_kwargs
@classmethod def new_comm_kwargs(cls, *args, **kwargs): """Initialize communication with new queue.""" kwargs.setdefault('address', 'file.txt') return args, kwargs
itunes_last_export
itunes_last_export//server_management.pyfile:/server_management.py:function:get_pageinfo/get_pageinfo
def get_pageinfo(response, tracktype='recenttracks'): """Check how many pages of tracks the user have. :param response: json page given by the server :param tracktype: Type of information to download from the server, can be either 'recentracks' or 'lovedtracks' :return: Number of total pages to import """ totalpages = response[tracktype]['@attr']['totalPages'] return int(totalpages)
brabbel-0.4.4
brabbel-0.4.4//brabbel/parser.pyfile:/brabbel/parser.py:function:_make_list/_make_list
def _make_list(element=''): """Returns a list element :element: Parsed element as a string representation of a list :returns: List element """ listing = [] for e in element: listing.append(e) return [listing]
Diofant-0.11.0
Diofant-0.11.0//diofant/matrices/dense.pyfile:/diofant/matrices/dense.py:function:zeros/zeros
def zeros(r, c=None, cls=None): """Returns a matrix of zeros with ``r`` rows and ``c`` columns; if ``c`` is omitted a square matrix will be returned. See Also ======== diofant.matrices.dense.ones diofant.matrices.dense.eye diofant.matrices.dense.diag """ if cls is None: from . import Matrix as cls return cls.zeros(r, c)
optenum
optenum//options.pyclass:OptionsMeta/get_dict
def get_dict(cls, key_field, *fields): """ Retrieve as dict of {key_field: *fields} mapping. If `fields` is single value, returns `{key:value}` mapping. If `fields` is **tuple**, returns `{key: (tuple of fields value)}` mapping. :param key_field: name of Option field for dict key. Only `code` and `name` are available. :param fields: Names of Option field for values. Can be tuple or single value. Impact value part of return dict. :return: dict of {key_field: *fields} mapping. """ if key_field not in ['code', 'name']: raise NameError( "'%s' is not correct key field. Only 'code' and 'name' can be key field." % str(key_field)) if len(fields) == 0: raise ValueError('No fields argument found.') found_fields = set() for f in fields: if f not in ['code', 'name', 'text']: raise NameError( "'%s' is incorrect Option field. Only 'code', 'name' and 'text' are available." % str(f)) if f in found_fields: raise ValueError("Duplicated fields '%s' found." % str(f)) found_fields.add(f) items = {} for o in cls.__get_name_options_mapping().values(): key = getattr(o, key_field) value = [] for f in fields: value.append(getattr(o, f)) items[key] = value[0] if len(value) == 1 else tuple(value) return items
plone.app.layout-3.4.2
plone.app.layout-3.4.2//plone/app/layout/globals/interfaces.pyclass:ITools/workflow
def workflow(): """The portal_workflow tool"""
openstack-interpreter-0.4.2
openstack-interpreter-0.4.2//openstack_interpreter/common/output.pyfile:/openstack_interpreter/common/output.py:function:newline_list_formatter/newline_list_formatter
def newline_list_formatter(text_list, wrap=None): """format list with newline for each element""" return '\n'.join(text_list or [])
ll
ll//misc.pyfile:/misc.py:function:format_class/format_class
def format_class(obj): """ Format the name of the class of ``obj``. Example:: >>> misc.format_class(42) 'int' >>> misc.format_class(open('README.rst', 'rb')) '_io.BufferedReader' """ if obj.__class__.__module__ not in ('builtins', 'exceptions'): return f'{obj.__class__.__module__}.{obj.__class__.__qualname__}' else: return obj.__class__.__qualname__
eo-learn-core-0.7.3
eo-learn-core-0.7.3//eolearn/core/utilities.pyfile:/eolearn/core/utilities.py:function:bgr_to_rgb/bgr_to_rgb
def bgr_to_rgb(bgr): """Converts Blue, Green, Red to Red, Green, Blue.""" return bgr[..., [2, 1, 0]]
ddtrace
ddtrace//contrib/flask/helpers.pyfile:/contrib/flask/helpers.py:function:get_current_span/get_current_span
def get_current_span(pin, root=False): """Helper to get the current span from the provided pins current call context""" if not pin or not pin.enabled(): return None ctx = pin.tracer.get_call_context() if not ctx: return None if root: return ctx.get_current_root_span() return ctx.get_current_span()
nexusformat
nexusformat//nexus/tree.pyfile:/nexus/tree.py:function:is_iterable/is_iterable
def is_iterable(obj): """Return True if the object is a list or a tuple. Parameters ---------- obj : list or tuple Object to be tested. Returns ------- bool True if the object is a list or a tuple. """ return isinstance(obj, list) or isinstance(obj, tuple)
juju-2.7.1
juju-2.7.1//juju/bundle.pyclass:AddApplicationChange/method
@staticmethod def method(): """method returns an associated ID for the Juju API call. """ return 'deploy'
autocti
autocti//pipeline/tagging.pyfile:/pipeline/tagging.py:function:parallel_front_edge_mask_rows_tag_from_parallel_front_edge_mask_rows/parallel_front_edge_mask_rows_tag_from_parallel_front_edge_mask_rows
def parallel_front_edge_mask_rows_tag_from_parallel_front_edge_mask_rows( parallel_front_edge_mask_rows): """Generate a parallel_front_edge_mask_rows tag, to customize phase names based on the number of rows in the charge injection region at the front edge of the parallel clocking direction are masked during the fit, This changes the phase name 'phase_name' as follows: parallel_front_edge_mask_rows = None -> phase_name parallel_front_edge_mask_rows = (0, 10) -> phase_name_parallel_front_edge_mask_rows_(0,10) parallel_front_edge_mask_rows = (20, 60) -> phase_name_parallel_front_edge_mask_rows_(20,60) """ if parallel_front_edge_mask_rows == None: return '' else: x0 = str(parallel_front_edge_mask_rows[0]) x1 = str(parallel_front_edge_mask_rows[1]) return '_par_front_mask_rows_(' + x0 + ',' + x1 + ')'
chemics-20.4
chemics-20.4//chemics/gas_viscosity.pyfile:/chemics/gas_viscosity.py:function:_mu/_mu
def _mu(df, formula, temp, cas, full): """ Helper for the `mu_gas` function to determine gas viscosity. Parameters ---------- df : dataframe Dataframe from inorganic or organic data formula : str Molecular formula for the gas temp : float Gas temperature cas : str CAS number full : bool Flag to print more information Returns ------- mu_gas : float Viscosity of gas [micropoise] mu_gas, cas, tmin, tmax, a, b, c, d : tuple Additional values returned only if full=True. """ if cas: df = df[df['CAS No.'] == str(cas)] cas = df.loc[formula]['CAS No.'] tmin = df.loc[formula]['temperature, Tmin (K)'] tmax = df.loc[formula]['temperature, Tmax (K)'] a = df.loc[formula]['A'] b = df.loc[formula]['B'] c = df.loc[formula]['C'] d = df.loc[formula]['D'] if temp < tmin or temp > tmax: raise ValueError( f'Temperature out of range. Applicable values are {tmin} - {tmax} K for {formula} gas.' ) mu = a + b * temp + c * temp ** 2 + d * temp ** 3 if full: return mu, cas, tmin, tmax, a, b, c, d else: return mu
operun.crm-2.3.0
operun.crm-2.3.0//src/operun/crm/setuphandlers.pyfile:/src/operun/crm/setuphandlers.py:function:uninstall/uninstall
def uninstall(context): """Uninstall script"""
tootlogger
tootlogger//_version.pyfile:/_version.py:function:render_pep440_pre/render_pep440_pre
def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance']: rendered += '.post.dev%d' % pieces['distance'] else: rendered = '0.post.dev%d' % pieces['distance'] return rendered
survive
survive//utils/plotting.pyfile:/utils/plotting.py:function:add_legend/add_legend
def add_legend(ax, legend_kwargs=None): """Add a legend to a plot. Parameters ---------- ax : matplotlib.axes.Axes The axes on which to add the legend. legend_kwargs : dict, optional Dictionary of keyword parameters to pass to :meth:`matplotlib.axes.Axes.legend`. Returns ------- legend : matplotlib.legend.Legend The :class:`matplotlib.legend.Legend` instance added to the plot. """ legend_params = dict(loc='best', frameon=True, shadow=True) if legend_kwargs is not None: legend_params.update(legend_kwargs) return ax.legend(**legend_params)
mayloop-1.0.0
mayloop-1.0.0//mayloop/imported/twisted/internet_interfaces.pyclass:IResolver/lookupAllRecords
def lookupAllRecords(name, timeout=None): """ Perform an ALL_RECORD lookup. @type name: C{str} @param name: DNS name to resolve. @type timeout: Sequence of C{int} @param timeout: Number of seconds after which to reissue the query. When the last timeout expires, the query is considered failed. @rtype: L{Deferred} @return: A L{Deferred} which fires with a three-tuple of lists of L{twisted.names.dns.RRHeader} instances. The first element of the tuple gives answers. The second element of the tuple gives authorities. The third element of the tuple gives additional information. The L{Deferred} may instead fail with one of the exceptions defined in L{twisted.names.error} or with C{NotImplementedError}. """
revision-0.1.1
revision-0.1.1//revision/util.pyfile:/revision/util.py:function:touch_file/touch_file
def touch_file(file_path): """ Create new, empty file. :param file_path: :type file_path: """ open(file_path, 'a').close()
prestools
prestools//bioinf.pyfile:/bioinf.py:function:hamming_distance/hamming_distance
def hamming_distance(seq_1: str, seq_2: str, ignore_case: bool=False) ->int: """Calculate the Hamming distance between two sequences. Args: seq_1: first sequence to compare seq_2: second sequence to compare ignore_case: ignore case when comparing sequences (default: False) Returns: distance: Hamming distance """ if len(seq_1) != len(seq_2): raise ValueError( 'Cannot calculate Hamming distance of sequences with different lengths.' ) if ignore_case: seq_1 = seq_1.casefold() seq_2 = seq_2.casefold() distance = sum([(1) for i in range(len(seq_1)) if seq_1[i] != seq_2[i] and seq_1[i] != '-' and seq_2[i] != '-']) return distance
sam
sam//common.pyfile:/common.py:function:IPStringtoInt/IPStringtoInt
def IPStringtoInt(ip): """ Converts a number from a dotted decimal string into a single unsigned int. Args: ip: dotted decimal ip address, like 12.34.56.78 Returns: The IP address as a simple 32-bit unsigned integer """ address_mask = ip.split('/') parts = address_mask[0].split('.') ip_int = 0 for i in range(4): ip_int <<= 8 if len(parts) > i: try: ip_int += int(parts[i]) except: pass return ip_int
bpy
bpy//ops/scene.pyfile:/ops/scene.py:function:render_view_add/render_view_add
def render_view_add(): """Add a render view """ pass
colorpalette-0.3.0
colorpalette-0.3.0//colorpalette/utils.pyfile:/colorpalette/utils.py:function:rgb_shade/rgb_shade
def rgb_shade(rbg, percent): """Return shaded rgb by given percent.""" return rbg * (100 - percent) / 100
obs
obs//libs/utils.pyfile:/libs/utils.py:function:get_bucket_key/get_bucket_key
def get_bucket_key(uri): """Return bucket name and key from given S3 URI""" if uri.startswith('s3://'): uri = uri[5:] components = uri.split('/') bucket = components[0] key = '' if len(components) > 1: key = '/'.join(components[1:]) return bucket, key
digital_rf-2.6.3
digital_rf-2.6.3//digital_rf/ringbuffer.pyclass:TimeExpirer/_queue_duration
@staticmethod def _queue_duration(queue): """Get time span in milliseconds of files in a queue.""" try: oldkey, _ = queue[0] newkey, _ = queue[-1] except IndexError: return 0 return newkey - oldkey
ibmsecurity-2020.5.6.0
ibmsecurity-2020.5.6.0//ibmsecurity/isam/aac/server_connections/ws.pyfile:/ibmsecurity/isam/aac/server_connections/ws.py:function:_create_json/_create_json
def _create_json(name, description, locked, connection): """ Create a JSON to be used for the REST API call """ json = {'connection': connection, 'type': 'ws', 'name': name, 'description': description, 'locked': locked} return json
ScopeFoundry-1.1.1
ScopeFoundry-1.1.1//h5_io.pyfile:/h5_io.py:function:h5_create_emd_dataset/h5_create_emd_dataset
def h5_create_emd_dataset(name, h5parent, shape=None, data=None, maxshape= None, dim_arrays=None, dim_names=None, dim_units=None, **kwargs): """ create an EMD dataset v0.2 inside h5parent returns an h5 group emd_grp to access N-dim dataset: emd_grp['data'] to access a specific dimension array: emd_grp['dim1'] HDF5 Hierarchy: --------------- * h5parent * name [emd_grp] (<--returned) - emd_group_type = 1 D data [shape = shape] D dim1 [shape = shape[0]] - name - units ... D dimN [shape = shape[-1]] Parameters ---------- h5parent : parent HDF5 group shape : Dataset shape of N dimensions. Required if "data" isn't provided. data : Provide data to initialize the dataset. If used, you can omit shape and dtype arguments. Keyword Args: dtype : Numpy dtype or string. If omitted, dtype('f') will be used. Required if "data" isn't provided; otherwise, overrides data array's dtype. dim_arrays : optional, a list of N dimension arrays dim_names : optional, a list of N strings naming the dataset dimensions dim_units : optional, a list of N strings specifying units of dataset dimensions Other keyword arguments follow from h5py.File.create_dataset Returns ------- emd_grp : h5 group containing dataset and dimension arrays, see hierarchy below """ h5parent.file['/'].attrs['version_major'] = 0 h5parent.file['/'].attrs['version_minor'] = 2 emd_grp = h5parent.create_group(name) emd_grp.attrs['emd_group_type'] = 1 if data is not None: shape = data.shape data_dset = emd_grp.create_dataset('data', shape=shape, maxshape= maxshape, data=data, **kwargs) if dim_arrays is not None: assert len(dim_arrays) == len(shape) if dim_names is not None: assert len(dim_names) == len(shape) if dim_units is not None: assert len(dim_units) == len(shape) if maxshape is not None: assert len(maxshape) == len(shape) for ii in range(len(shape)): if dim_arrays is not None: dim_array = dim_arrays[ii] dim_dtype = dim_array.dtype else: dim_array = None dim_dtype = float if dim_names is not None: dim_name = dim_names[ii] else: dim_name = 'dim' + str(ii + 1) if dim_units is not None: dim_unit = dim_units[ii] else: dim_unit = None if maxshape is not None: dim_maxshape = maxshape[ii], else: dim_maxshape = None dim_dset = emd_grp.create_dataset('dim' + str(ii + 1), shape=(shape [ii],), dtype=dim_dtype, data=dim_array, maxshape=dim_maxshape) dim_dset.attrs['name'] = dim_name if dim_unit is not None: dim_dset.attrs['unit'] = dim_unit return emd_grp
toolstack-0.1.0
toolstack-0.1.0//toolstack/text/text_preprocessing.pyclass:TextPreprocessing/remove_digits
@staticmethod def remove_digits(df, column): """ Clean text, remove digits Parameters ---------- df : DataFrame The df to perform case operation on column : string, int The column on which the operation has to be performed Returns ------- List """ return df[column].apply(lambda x: ' '.join([x for x in x.split() if not x.isdigit()])).values.tolist()
bryte
bryte//universal.pyfile:/universal.py:function:getfileline/getfileline
def getfileline(file, linenumber): """Get specific line from file""" with open(file) as f: whole_file = f.readlines() line = whole_file[linenumber] return line
borgbackup-1.1.11
borgbackup-1.1.11//src/borg/platform/base.pyfile:/src/borg/platform/base.py:function:process_alive/process_alive
def process_alive(host, pid, thread): """ Check if the (host, pid, thread_id) combination corresponds to a potentially alive process. """ raise NotImplementedError
plone.app.layout-3.4.2
plone.app.layout-3.4.2//plone/app/layout/globals/interfaces.pyclass:IContextState/is_structural_folder
def is_structural_folder(): """True if this is a structural folder """
kur-0.7.0
kur-0.7.0//kur/loss/categorical_crossentropy.pyclass:CategoricalCrossentropy/get_name
@classmethod def get_name(cls): """ Returns the name of the loss function. """ return 'categorical_crossentropy'
wsapi4plone.core-0.2
wsapi4plone.core-0.2//wsapi4plone/core/browser/interfaces.pyclass:IApplicationAPI/put_object
def put_object(params, path=''): """ Put or set the given params on an object of path or context. The params keys should map to the values associated with it from the get_object or get_schema methods. """
mlfinlab
mlfinlab//labeling/tail_sets.pyclass:TailSetLabels/_negative_tail_set
@staticmethod def _negative_tail_set(row): """ Takes as input a row from the vol_adj_price DataFrame and then returns a list of names of the securites in the negative tail set, for this specific row date. This method is used in an apply() setting. :param row: (Series) of volatility adjusted prices. :return: (list) of securities in the negative tail set. """ return list(row[row == -1].index)
fixedpoint
fixedpoint//properties.pyclass:PropertyResolver/__new__
def __new__(cls) ->'PropertyResolver': """Initialize and force only a single instance.""" try: cls.__instance except AttributeError: cls.__instance = super().__new__(cls) cls.__instance.__ignore_mismatch = False return cls.__instance
spreg
spreg//diagnostics.pyfile:/diagnostics.py:function:constant_check/constant_check
def constant_check(array): """ Checks to see numpy array includes a constant. Parameters ---------- array : array an array of variables to be inspected Returns ------- constant : boolean true signifies the presence of a constant Example ------- >>> import numpy as np >>> import libpysal >>> from libpysal import examples >>> import spreg >>> from spreg import OLS >>> db = libpysal.io.open(examples.get_path("columbus.dbf"),"r") >>> y = np.array(db.by_col("CRIME")) >>> y = np.reshape(y, (49,1)) >>> X = [] >>> X.append(db.by_col("INC")) >>> X.append(db.by_col("HOVAL")) >>> X = np.array(X).T >>> reg = OLS(y,X) >>> spreg.constant_check(reg.x) True """ n, k = array.shape constant = False for j in range(k): variable = array[:, (j)] varmin = variable.min() varmax = variable.max() if varmin == varmax: constant = True break return constant
staplelib
staplelib//commands.pyfile:/commands.py:function:int_to_page_alpha/int_to_page_alpha
def int_to_page_alpha(pageno, base): """return uppercase alphabetic page numbers for PAGENO starting at BASE (a or A). Adobe defines them as A to Z, then AA to ZZ, and so on. Yes, that is somewhat wacky.""" div, mod = divmod(pageno - 1, 26) c = chr(mod + ord(base)) return c * (div + 1)
brandelion
brandelion//cli/analyze.pyfile:/cli/analyze.py:function:_proportion/_proportion
def _proportion(a, b): """ Return the len(a & b) / len(a) """ return 1.0 * len(a & b) / len(a)
ulmo-0.8.5
ulmo-0.8.5//ulmo/util/pytables.pyfile:/ulmo/util/pytables.py:function:_update_row_with_dict/_update_row_with_dict
def _update_row_with_dict(row, dict): """sets the values of row to be the values found in dict""" for k, v in dict.items(): row.__setitem__(k, v)
lazyscraper-0.1.2
lazyscraper-0.1.2//lazyscraper/patterns.pyfile:/lazyscraper/patterns.py:function:pattern_extract_forms/pattern_extract_forms
def pattern_extract_forms(tree, nodeclass, nodeid, fields): """Extracts web forms from page""" res = [] formattrlist = ['name', 'id', 'action', 'class', 'method'] inputattrlist = ['name', 'id', 'type', 'class', 'value', 'src', 'size'] textarealist = ['name', 'id', 'size', 'class'] buttonlist = ['name', 'id', 'value', 'class'] selectlist = ['name', 'id', 'multiple', 'size', 'class'] optionlist = ['value', 'selected', 'class'] tagnames = [('input', inputattrlist), ('textarea', textarealist), ( 'button', buttonlist), ('select', selectlist)] allforms = tree.xpath('//form') for form in allforms: fkey = {} for k in formattrlist: if form.attrib.has_key(k): fkey[k] = form.attrib[k] for tag in form.iterdescendants(): if not hasattr(tag, 'tag'): continue for tagname, tlist in tagnames: if tag.tag == tagname: if not tagname in fkey.keys(): fkey[tagname] = [] tval = {'text': tag.text} for k in tlist: if tag.attrib.has_key(k): tval[k] = tag.attrib[k] if tag.tag == 'select': tval['options'] = [] options = tag.xpath('option') for o in options: optionval = {'text': o.text} for k in optionlist: if o.attrib.has_key(k): optionval[k] = o.attrib[k] tval['options'].append(optionval) fkey[tagname].append(tval) res.append(fkey) return {'total': len(res), 'list': res}
PyCO2SYS-1.3.0
PyCO2SYS-1.3.0//PyCO2SYS/equilibria/p1atm.pyfile:/PyCO2SYS/equilibria/p1atm.py:function:kH2CO3_SWS_MPL02/kH2CO3_SWS_MPL02
def kH2CO3_SWS_MPL02(TempK, Sal): """Carbonic acid dissociation constants following MPL02.""" TempC = TempK - 273.15 pK1 = 6.359 - 0.00664 * Sal - 0.01322 * TempC + 4.989e-05 * TempC ** 2 pK2 = 9.867 - 0.01314 * Sal - 0.01904 * TempC + 2.448e-05 * TempC ** 2 K1 = 10.0 ** -pK1 K2 = 10.0 ** -pK2 return K1, K2
mrjob-0.7.2
mrjob-0.7.2//mrjob/spark/harness.pyfile:/mrjob/spark/harness.py:function:_discard_key_and_flatten_values/_discard_key_and_flatten_values
def _discard_key_and_flatten_values(rdd, sort_values=False): """Helper function for :py:func:`_run_combiner` and :py:func:`_shuffle_and_sort`. Given an RDD containing (key, [line1, line2, ...]), discard *key* and return an RDD containing line1, line2, ... Guarantees that lines in the same list will end up in the same partition. If *sort_values* is true, sort each list of lines before flattening it. """ if sort_values: map_f = lambda key_and_lines: sorted(key_and_lines[1]) else: map_f = lambda key_and_lines: key_and_lines[1] return rdd.flatMap(map_f, preservesPartitioning=True)
hackedit-1.0a2
hackedit-1.0a2//hackedit/vendor/future/backports/email/encoders.pyfile:/hackedit/vendor/future/backports/email/encoders.py:function:encode_7or8bit/encode_7or8bit
def encode_7or8bit(msg): """Set the Content-Transfer-Encoding header to 7bit or 8bit.""" orig = msg.get_payload() if orig is None: msg['Content-Transfer-Encoding'] = '7bit' return try: if isinstance(orig, str): orig.encode('ascii') else: orig.decode('ascii') except UnicodeError: charset = msg.get_charset() output_cset = charset and charset.output_charset if output_cset and output_cset.lower().startswith('iso-2022-'): msg['Content-Transfer-Encoding'] = '7bit' else: msg['Content-Transfer-Encoding'] = '8bit' else: msg['Content-Transfer-Encoding'] = '7bit' if not isinstance(orig, str): msg.set_payload(orig.decode('ascii', 'surrogateescape'))
ironic-15.0.0
ironic-15.0.0//ironic/drivers/modules/irmc/boot.pyfile:/ironic/drivers/modules/irmc/boot.py:function:_get_iso_name/_get_iso_name
def _get_iso_name(node, label): """Returns the ISO file name for a given node. :param node: the node for which ISO file name is to be provided. :param label: a string used as a base name for the ISO file. """ return '%s-%s.iso' % (label, node.uuid)
quorum-0.5.36
quorum-0.5.36//src/quorum/model.pyclass:Model/wrap
@classmethod def wrap(cls, models, build=True, handler=None, **kwargs): """ "Wraps" the provided sequence (or single set) of model based data into a sequence of models (or a single model) so that proper business logic may be used for operations on top of that data. In case the extra handler argument is passed it's going to be called for each model that is going to be "wrapped" allowing an extra "layer" for the transformation of the model. The additional named arguments parameters allows extensibility to set extra value in the creation of the wrapped object. This operation is specially useful for API based environments where client side business logic is meant to be added to the static data. :type models: List :param models: Sequence (or single) set of models that are going to be wrapped around instances of the current class. :type build: bool :param build: If the "custom" build operation should be performed after the wrap operation is performed so that new custom attributes may be injected into the resulting instance. :type handler: Function :param handler: Handler function that is going to be called for each of the models after the build process has been performed, allows an extra transform operation to be performed at runtime. :rtype: List :return: The sequence of models (or single model) representing the provided set of dictionary models that were sent as arguments. """ is_sequence = isinstance(models, (list, tuple)) if not is_sequence: models = [models] wrapping = [] for model in models: if not isinstance(model, dict): continue _model = cls(model=model, **kwargs) handler and handler(_model.model) build and cls.build(_model.model, map=False) wrapping.append(_model) if is_sequence: return wrapping else: return wrapping[0] if wrapping else None
xmltool-0.5.2
xmltool-0.5.2//xmltool/elements.pyclass:MultipleMixin/get_child_class
@classmethod def get_child_class(cls, tagname): """Returns the child class where the tagname can be added. For example if it's an element of list return the list class. """ for c in cls._choice_classes: for tn in c._get_creatable_class_by_tagnames(): if tn == tagname: return c
proteofav-0.2.3
proteofav-0.2.3//proteofav/dssp.pyfile:/proteofav/dssp.py:function:_add_dssp_ss_reduced/_add_dssp_ss_reduced
def _add_dssp_ss_reduced(data): """ Utility that adds a new column to the table. Adds a reduced-stated Secondary Structure (SS). :param data: pandas DataFrame object :return: returns a modified pandas DataFrame """ table = data alphas = ['H'] betas = ['E'] coils = ['G', 'I', 'B', 'C', 'T', 'S', '', ' '] sss = [] for ix in table.index: ss = table.loc[ix, 'SS'] if ss in alphas: ss = 'H' elif ss in betas: ss = 'E' elif ss in coils: ss = 'C' else: ss = '-' sss.append(ss) table['SS_CLASS'] = sss return table
spawn-0.3.0
spawn-0.3.0//versioneer.pyfile:/versioneer.py:function:render_pep440_pre/render_pep440_pre
def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces['closest-tag']: rendered = pieces['closest-tag'] if pieces['distance']: rendered += '.post.dev%d' % pieces['distance'] else: rendered = '0.post.dev%d' % pieces['distance'] return rendered
qg_spider_sdk
qg_spider_sdk//pipe/website_man.pyclass:WebsiteMan/register_regain_callback
@classmethod def register_regain_callback(cls, func): """ 注册网站暂停回调,参数为site_code """ cls.website_regain_callback.add(func)
noaodatalab-2.18.8
noaodatalab-2.18.8//vos/html2text.pyfile:/vos/html2text.py:function:google_has_height/google_has_height
def google_has_height(style): """check if the style of the element has the 'height' attribute explicitly defined""" if 'height' in style: return True return False
netdef
netdef//Controllers/SystemMonitorController.pyfile:/Controllers/SystemMonitorController.py:function:get_clean_mount_point_name/get_clean_mount_point_name
def get_clean_mount_point_name(node): """ Replace / or \\ with . Example: .. code-block:: python for disk in psutil.disk_partitions(): print (get_clean_mount_point_name(disk.mountpoint)) :param str node: name of mountpoint :returns: new node name """ if '/' in node: return 'root' + node.replace('/', '.').rstrip('.') elif '\\' in node: return node.replace(':\\', '').rstrip('.') else: return node
sriov-netplan-shim-0.1.1
sriov-netplan-shim-0.1.1//sriov_netplan_shim/pci.pyfile:/sriov_netplan_shim/pci.py:function:format_pci_addr/format_pci_addr
def format_pci_addr(pci_addr: str) ->str: """Format a PCI address with 0 fill for parts :param: pci_addr: unformatted PCI address :type: str :returns: formatted PCI address :rtype: str """ domain, bus, slot_func = pci_addr.split(':') slot, func = slot_func.split('.') return '{}:{}:{}.{}'.format(domain.zfill(4), bus.zfill(2), slot.zfill(2 ), func)
six-1.15.0
six-1.15.0//six.pyfile:/six.py:function:_add_doc/_add_doc
def _add_doc(func, doc): """Add documentation to a function.""" func.__doc__ = doc
fake-bpy-module-2.80-20200428
fake-bpy-module-2.80-20200428//gpu/matrix.pyfile:/gpu/matrix.py:function:translate/translate
def translate(offset: list): """Scale the current stack matrix. :param offset: Translate the current stack matrix. :type offset: list """ pass