Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
6,900
def validate(self, value): try: return float(value) except (__HOLE__, TypeError): raise ValidationError(_('Not a float'))
ValueError
dataset/ETHPy150Open fiam/wapi/validators.py/FloatValidator.validate
6,901
def validate_type(value_type, value): """Validates a value given its type""" try: return TYPE_VALIDATORS[value_type](value) except __HOLE__: return value
KeyError
dataset/ETHPy150Open fiam/wapi/validators.py/validate_type
6,902
def get_type_validator(value_type): """Returns the validator for the given type""" try: return TYPE_VALIDATORS[value_type] except __HOLE__: return None
KeyError
dataset/ETHPy150Open fiam/wapi/validators.py/get_type_validator
6,903
def _isstr(s): try: _basestring = basestring except __HOLE__: _basestring = str return isinstance(s, _basestring) # If any parameter is a string, parse it as JSON
NameError
dataset/ETHPy150Open memphis-iis/GLUDB/gludb/versioning.py/_isstr
6,904
def search_subschemasubentry_s(self,dn=''): """ Returns the distinguished name of the sub schema sub entry for a part of a DIT specified by dn. None as result indicates that the DN of the sub schema sub entry could not be determined. """ try: r = self.search_s( dn,ldap.SCOPE_BASE,'(objectClass=*)',['subschemaSubentry'] ) except (ldap.NO_SUCH_OBJECT,ldap.NO_SUCH_ATTRIBUTE,ldap.INSUFFICIENT_ACCESS): r = [] except ldap.UNDEFINED_TYPE: return None try: if r: e = ldap.cidict.cidict(r[0][1]) search_subschemasubentry_dn = e.get('subschemaSubentry',[None])[0] if search_subschemasubentry_dn is None: if dn: # Try to find sub schema sub entry in root DSE return self.search_subschemasubentry_s(dn='') else: # If dn was already root DSE we can return here return None else: return search_subschemasubentry_dn except __HOLE__: return None
IndexError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/python-ldap-2.3.13/Lib/ldap/ldapobject.py/SimpleLDAPObject.search_subschemasubentry_s
6,905
def logLabel(self, preferredRole='*', lang=None): try: return self._logLabel except __HOLE__: self._logLabel = self.genLabel(role=preferredRole,strip=True) or self.id or self.xlinkLabel return self._logLabel
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelFormulaResource.logLabel
6,906
@property def groupFilterRelationships(self): try: return self._groupFilterRelationships except __HOLE__: self._groupFilterRelationships = self.modelXbrl.relationshipSet(XbrlConst.variableSetFilter).fromModelObject(self) return self._groupFilterRelationships
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelVariableSet.groupFilterRelationships
6,907
def evaluate(self, xpCtx): try: return xpCtx.evaluate(self.testProg) except __HOLE__: return None
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelValueAssertion.evaluate
6,908
def evaluate(self, xpCtx, typeQname): try: return xpCtx.evaluateAtomicValue(self.selectProg, typeQname) except __HOLE__: return None
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelParameter.evaluate
6,909
@property def parameterQname(self): # cannot overload with element's qname, needed for schema particle validation try: return self._parameterQname except __HOLE__: self._parameterQname = self.prefixedNameQname(self.name) return self._parameterQname
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelParameter.parameterQname
6,910
@property def asType(self): try: return self._asType except __HOLE__: self._asType = self.prefixedNameQname(self.get("as")) return self._asType
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelParameter.asType
6,911
def variableRefs(self, progs=[], varRefSet=None): try: return self._variableRefs except __HOLE__: self._variableRefs = super(ModelFactVariable, self).variableRefs(self.fallbackValueProg, varRefSet) return self._variableRefs
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelFactVariable.variableRefs
6,912
@property def filterRelationships(self): try: return self._filterRelationships except __HOLE__: rels = [] # order so conceptName filter is first (if any) (may want more sorting in future) for rel in self.modelXbrl.relationshipSet(XbrlConst.variableFilter).fromModelObject(self): if isinstance(rel.toModelObject,ModelConceptName): rels.insert(0, rel) # put conceptName filters first else: rels.append(rel) self._filterRelationships = rels return rels
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelFactVariable.filterRelationships
6,913
def evalTest(self, xpCtx): try: return xpCtx.evaluateBooleanValue(self.testProg) except __HOLE__: return True # true if no test attribute because predicate has no filtering action
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelPrecondition.evalTest
6,914
def hasNoFilterVariableDependencies(self, xpCtx): try: return self._hasNoVariableDependencies except __HOLE__: self._hasNoVariableDependencies = len(self.variableRefs() - xpCtx.parameterQnames) == 0 return self._hasNoVariableDependencies
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelFilter.hasNoFilterVariableDependencies
6,915
@property def isFilterShared(self): try: return self._isFilterShared except __HOLE__: self._isFilterShared = len(self.modelXbrl.relationshipSet("XBRL-formulae").toModelObject(self)) > 1 return self._isFilterShared
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelFilter.isFilterShared
6,916
def evalTest(self, xpCtx, fact): try: return xpCtx.evaluateBooleanValue(self.testProg, fact) except __HOLE__: return True # true if no test attribute because predicate has no filtering action
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelTestFilter.evalTest
6,917
@property def rePattern(self): try: return self._rePattern except __HOLE__: self._rePattern = re.compile(self.pattern) return self._rePattern
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelPatternFilter.rePattern
6,918
def aspectsCovered(self, varBinding, xpCtx=None): try: return self._aspectsCovered except __HOLE__: self._aspectsCovered = set() self._dimsExcluded = set() self.isAll = False self.allDimensions = False for aspectElt in XmlUtil.children(self, XbrlConst.acf, "aspect"): aspect = XmlUtil.text(aspectElt) if aspect == "all": self.isAll = True self.allDimensions = True self._aspectsCovered |= { Aspect.LOCATION, Aspect.CONCEPT, Aspect.ENTITY_IDENTIFIER, Aspect.PERIOD, Aspect.UNIT, Aspect.NON_XDT_SEGMENT, Aspect.NON_XDT_SCENARIO, Aspect.COMPLETE_SEGMENT, Aspect.COMPLETE_SCENARIO} elif aspect == "dimensions": self.allDimensions = True else: self._aspectsCovered.add( aspectFromToken[aspect] ) for dimElt in XmlUtil.descendants(self, XbrlConst.acf, "qname"): dimAspect = qname( dimElt, XmlUtil.text(dimElt) ) if dimElt.getparent().localName == "excludeDimension": self._dimsExcluded.add(dimAspect) else: self._aspectsCovered.add(dimAspect) if xpCtx: # provided during validate formula checking for dimProgs, isExcluded in ((self.includedDimQnameProgs, False),(self.excludedDimQnameProgs, True)): for dimProg in dimProgs: dimAspect = xpCtx.evaluateAtomicValue(dimProg, 'xs:QName') if isExcluded: self._dimsExcluded.add(dimAspect) else: self._aspectsCovered.add(dimAspect) return self._aspectsCovered
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelAspectCover.aspectsCovered
6,919
@property def conceptQnames(self): try: return self._conceptQnames except __HOLE__: self._conceptQnames = set() for qnameElt in XmlUtil.descendants(self, XbrlConst.cf, "qname"): self._conceptQnames.add( qname( qnameElt, XmlUtil.text(qnameElt) ) ) return self._conceptQnames
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelConceptName.conceptQnames
6,920
def evalQnames(self, xpCtx, fact): try: return set(xpCtx.evaluateAtomicValue(exprProg, 'xs:QName', fact) for exprProg in self.qnameExpressionProgs) except __HOLE__: return set()
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelConceptName.evalQnames
6,921
@property def generations(self): try: return _INT( XmlUtil.childText(self, XbrlConst.crf, "generations") ) except (__HOLE__, ValueError): if self.axis in ('sibling', 'child', 'parent'): return 1 return 0
TypeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelConceptRelation.generations
6,922
@property def aspectName(self): try: return self._aspectName except __HOLE__: self._aspectName = self.localName[5].lower() + self.localName[6:] return self._aspectName
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelMatchFilter.aspectName
6,923
@property def matchAny(self): try: return self._matchAny except __HOLE__: self._matchAny = self.get("matchAny") in ("true", "1") return self._matchAny
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelMatchFilter.matchAny
6,924
@property def aspect(self): try: return self._aspect except __HOLE__: self._aspect = aspectFromToken[self.aspectName] if self._aspect == Aspect.DIMENSIONS: self._aspect = self.dimension return self._aspect
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelMatchFilter.aspect
6,925
@property def dimQname(self): try: return self._dimQname except __HOLE__: dQn = XmlUtil.child(XmlUtil.child(self,XbrlConst.df,"dimension"), XbrlConst.df, "qname") self._dimQname = qname( dQn, XmlUtil.text(dQn) ) if dQn is not None else None return self._dimQname
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelExplicitDimension.dimQname
6,926
def variableRefs(self, progs=[], varRefSet=None): # no subclass calls this try: return super(ModelMessage, self).variableRefs(self.expressionProgs, varRefSet) except __HOLE__: return set() # no expressions
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelMessage.variableRefs
6,927
@property def name(self): try: return self._name except __HOLE__: self._name = self.get("name") return self._name
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionSignature.name
6,928
@property def functionQname(self): # cannot overload qname, needed for element and particle validation try: return self._functionQname except __HOLE__: self._functionQname = self.prefixedNameQname(self.name) return self._functionQname
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionSignature.functionQname
6,929
@property def outputType(self): try: return self._outputType except __HOLE__: self._outputType = self.prefixedNameQname(self.get("output")) return self._outputType
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionSignature.outputType
6,930
@property def inputTypes(self): try: return self._inputTypes except __HOLE__: self._inputTypes = [elt.prefixedNameQname(elt.get("type")) for elt in XmlUtil.children(self, XbrlConst.variable, "input")] return self._inputTypes
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionSignature.inputTypes
6,931
@property def inputNames(self): try: return self._inputNames except __HOLE__: self._inputNames = [qname(elt, elt.get("name")) for elt in XmlUtil.children(self, XbrlConst.cfi, "input")] return self._inputNames
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionImplementation.inputNames
6,932
@property def stepExpressions(self): try: return self._stepExpressions except __HOLE__: self._stepExpressions = [(qname(elt, elt.get("name")), elt.text) for elt in XmlUtil.children(self, XbrlConst.cfi, "step")] return self._stepExpressions
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionImplementation.stepExpressions
6,933
@property def outputExpression(self): try: return self._outputExpression except __HOLE__: outputElt = XmlUtil.child(self, XbrlConst.cfi, "output") self._outputExpression = XmlUtil.text(outputElt) if outputElt is not None else None return self._outputExpression
AttributeError
dataset/ETHPy150Open Arelle/Arelle/arelle/ModelFormulaObject.py/ModelCustomFunctionImplementation.outputExpression
6,934
def __init__(self, environment='production', api_base=None, json=False, _create_session=True): """Sets up Requests Session to be used for all connections to T1. :param environment: str to look up API Base to use. e.g. 'production' for https://api.mediamath.com/api/v2.0 :param api_base: str API domain. should be the qualified domain name without trailing slash. e.g. "api.mediamath.com". :param _create_session: bool flag to create a Requests Session. Should only be used for initial T1 instantiation. """ if api_base is None: try: Connection.__setattr__(self, 'api_base', API_BASES[environment]) except __HOLE__: raise ClientError("Environment: {!r}, does not exist." .format(environment)) else: Connection.__setattr__(self, 'api_base', api_base) Connection.__setattr__(self, 'json', json) if json: Connection.__setattr__(self, '_parser', JSONParser) else: Connection.__setattr__(self, '_parser', XMLParser) if _create_session: Connection.__setattr__(self, 'session', Session()) self.session.headers['User-Agent'] = _generate_user_agent() if json: self.session.headers['Accept'] = ACCEPT_HEADERS['json']
KeyError
dataset/ETHPy150Open MediaMath/t1-python/terminalone/connection.py/Connection.__init__
6,935
def _showImage(commandForImage): imageDirectory = '/tmp/xcode_debug_images/' imageName = time.strftime("%Y-%m-%d-%H-%M-%S", time.gmtime()) + ".png" imagePath = imageDirectory + imageName try: os.makedirs(imageDirectory) except __HOLE__ as e: if e.errno == errno.EEXIST and os.path.isdir(imageDirectory): pass else: raise imageDataAddress = fb.evaluateObjectExpression('UIImagePNGRepresentation((id)' + commandForImage + ')') imageBytesStartAddress = fb.evaluateExpression('(void *)[(id)' + imageDataAddress + ' bytes]') imageBytesLength = fb.evaluateExpression('(NSUInteger)[(id)' + imageDataAddress + ' length]') address = int(imageBytesStartAddress, 16) length = int(imageBytesLength) if not (address or length): print 'Could not get image data.' return process = lldb.debugger.GetSelectedTarget().GetProcess() error = lldb.SBError() mem = process.ReadMemory(address, length, error) if error is not None and str(error) != 'success': print error else: imgFile = open(imagePath, 'wb') imgFile.write(mem) imgFile.close() os.system('open ' + imagePath)
OSError
dataset/ETHPy150Open facebook/chisel/commands/FBVisualizationCommands.py/_showImage
6,936
def read_montage(kind, ch_names=None, path=None, unit='m', transform=False): """Read a generic (built-in) montage from a file This function can be used to read electrode positions from a user specified file using the `kind` and `path` parameters. Alternatively, use only the `kind` parameter to load one of the built-in montages: =================== ===================================================== Kind description =================== ===================================================== standard_1005 Electrodes are named and positioned according to the international 10-05 system. standard_1020 Electrodes are named and positioned according to the international 10-20 system. standard_alphabetic Electrodes are named with LETTER-NUMBER combinations (A1, B2, F4, etc.) standard_postfixed Electrodes are named according to the international 10-20 system using postfixes for intermediate positions. standard_prefixed Electrodes are named according to the international 10-20 system using prefixes for intermediate positions. standard_primed Electrodes are named according to the international 10-20 system using prime marks (' and '') for intermediate positions. biosemi16 BioSemi cap with 16 electrodes biosemi32 BioSemi cap with 32 electrodes biosemi64 BioSemi cap with 64 electrodes biosemi128 BioSemi cap with 128 electrodes biosemi160 BioSemi cap with 160 electrodes biosemi256 BioSemi cap with 256 electrodes easycap-M10 Brainproducts EasyCap with electrodes named according to the 10-05 system easycap-M1 Brainproduct EasyCap with numbered electrodes EGI_256 Geodesic Sensor Net with 256 channels GSN-HydroCel-32 HydroCel Geodesic Sensor Net with 32 electrodes GSN-HydroCel-64_1.0 HydroCel Geodesic Sensor Net with 64 electrodes GSN-HydroCel-65_1.0 HydroCel Geodesic Sensor Net with 64 electrodes + Cz GSN-HydroCel-128 HydroCel Geodesic Sensor Net with 128 electrodes GSN-HydroCel-129 HydroCel Geodesic Sensor Net with 128 electrodes + Cz GSN-HydroCel-256 HydroCel Geodesic Sensor Net with 256 electrodes GSN-HydroCel-257 HydroCel Geodesic Sensor Net with 256 electrodes + Cz =================== ===================================================== Parameters ---------- kind : str The name of the montage file (e.g. kind='easycap-M10' for 'easycap-M10.txt'). Files with extensions '.elc', '.txt', '.csd', '.elp', '.hpts', '.sfp' or '.loc' ('.locs' and '.eloc') are supported. ch_names : list of str | None If not all electrodes defined in the montage are present in the EEG data, use this parameter to select subset of electrode positions to load. If None (default), all defined electrode positions are returned. path : str | None The path of the folder containing the montage file. Defaults to the mne/channels/data/montages folder in your mne-python installation. unit : 'm' | 'cm' | 'mm' Unit of the input file. If not 'm' (default), coordinates will be rescaled to 'm'. transform : bool If True, points will be transformed to Neuromag space. The fidicuals, 'nasion', 'lpa', 'rpa' must be specified in the montage file. Useful for points captured using Polhemus FastSCAN. Default is False. Returns ------- montage : instance of Montage The montage. See Also -------- read_dig_montage : To read subject-specific digitization information. Notes ----- Built-in montages are not scaled or transformed by default. Montages can contain fiducial points in addition to electrode locations, e.g. ``biosemi-64`` contains 67 total channels. .. versionadded:: 0.9.0 """ if path is None: path = op.join(op.dirname(__file__), 'data', 'montages') if not op.isabs(kind): supported = ('.elc', '.txt', '.csd', '.sfp', '.elp', '.hpts', '.loc', '.locs', '.eloc') montages = [op.splitext(f) for f in os.listdir(path)] montages = [m for m in montages if m[1] in supported and kind == m[0]] if len(montages) != 1: raise ValueError('Could not find the montage. Please provide the ' 'full path.') kind, ext = montages[0] fname = op.join(path, kind + ext) else: kind, ext = op.splitext(kind) fname = op.join(path, kind + ext) if ext == '.sfp': # EGI geodesic dtype = np.dtype('S4, f8, f8, f8') data = np.loadtxt(fname, dtype=dtype) pos = np.c_[data['f1'], data['f2'], data['f3']] ch_names_ = data['f0'].astype(np.str) elif ext == '.elc': # 10-5 system ch_names_ = [] pos = [] with open(fname) as fid: for line in fid: if 'Positions\n' in line: break pos = [] for line in fid: if 'Labels\n' in line: break pos.append(list(map(float, line.split()))) for line in fid: if not line or not set(line) - set([' ']): break ch_names_.append(line.strip(' ').strip('\n')) pos = np.array(pos) elif ext == '.txt': # easycap try: # newer version data = np.genfromtxt(fname, dtype='str', skip_header=1) except TypeError: data = np.genfromtxt(fname, dtype='str', skiprows=1) ch_names_ = list(data[:, 0]) theta, phi = data[:, 1].astype(float), data[:, 2].astype(float) x = 85. * np.cos(np.deg2rad(phi)) * np.sin(np.deg2rad(theta)) y = 85. * np.sin(np.deg2rad(theta)) * np.sin(np.deg2rad(phi)) z = 85. * np.cos(np.deg2rad(theta)) pos = np.c_[x, y, z] elif ext == '.csd': # CSD toolbox dtype = [('label', 'S4'), ('theta', 'f8'), ('phi', 'f8'), ('radius', 'f8'), ('x', 'f8'), ('y', 'f8'), ('z', 'f8'), ('off_sph', 'f8')] try: # newer version table = np.loadtxt(fname, skip_header=2, dtype=dtype) except __HOLE__: table = np.loadtxt(fname, skiprows=2, dtype=dtype) ch_names_ = table['label'] theta = (2 * np.pi * table['theta']) / 360. phi = (2 * np.pi * table['phi']) / 360. pos = _sphere_to_cartesian(theta, phi, r=1.0) pos = np.asarray(pos).T elif ext == '.elp': # standard BESA spherical dtype = np.dtype('S8, S8, f8, f8, f8') try: data = np.loadtxt(fname, dtype=dtype, skip_header=1) except TypeError: data = np.loadtxt(fname, dtype=dtype, skiprows=1) az = data['f2'] horiz = data['f3'] radius = np.abs(az / 180.) angles = np.array([90. - h if a >= 0. else -90. - h for h, a in zip(horiz, az)]) sph_phi = (0.5 - radius) * 180. sph_theta = angles azimuth = sph_theta / 180.0 * np.pi elevation = sph_phi / 180.0 * np.pi r = 85. y, x, z = _sphere_to_cartesian(azimuth, elevation, r) pos = np.c_[x, y, z] ch_names_ = data['f1'].astype(np.str) elif ext == '.hpts': # MNE-C specified format for generic digitizer data dtype = [('type', 'S8'), ('name', 'S8'), ('x', 'f8'), ('y', 'f8'), ('z', 'f8')] data = np.loadtxt(fname, dtype=dtype) pos = np.vstack((data['x'], data['y'], data['z'])).T ch_names_ = data['name'].astype(np.str) elif ext in ('.loc', '.locs', '.eloc'): ch_names_ = np.loadtxt(fname, dtype='S4', usecols=[3]).astype(np.str).tolist() dtype = {'names': ('angle', 'radius'), 'formats': ('f4', 'f4')} angle, radius = np.loadtxt(fname, dtype=dtype, usecols=[1, 2], unpack=True) sph_phi, sph_theta = _topo_to_sphere(angle, radius) azimuth = sph_theta / 180.0 * np.pi elevation = sph_phi / 180.0 * np.pi r = np.ones((len(ch_names_), )) x, y, z = _sphere_to_cartesian(azimuth, elevation, r) pos = np.c_[-y, x, z] else: raise ValueError('Currently the "%s" template is not supported.' % kind) selection = np.arange(len(pos)) if unit == 'mm': pos /= 1e3 elif unit == 'cm': pos /= 1e2 elif unit != 'm': raise ValueError("'unit' should be either 'm', 'cm', or 'mm'.") if transform: names_lower = [name.lower() for name in list(ch_names_)] if ext == '.hpts': fids = ('2', '1', '3') # Alternate cardinal point names else: fids = ('nz', 'lpa', 'rpa') missing = [name for name in fids if name not in names_lower] if missing: raise ValueError("The points %s are missing, but are needed " "to transform the points to the MNE coordinate " "system. Either add the points, or read the " "montage with transform=False. " % missing) nasion = pos[names_lower.index(fids[0])] lpa = pos[names_lower.index(fids[1])] rpa = pos[names_lower.index(fids[2])] neuromag_trans = get_ras_to_neuromag_trans(nasion, lpa, rpa) pos = apply_trans(neuromag_trans, pos) if ch_names is not None: sel, ch_names_ = zip(*[(i, e) for i, e in enumerate(ch_names_) if e in ch_names]) sel = list(sel) pos = pos[sel] selection = selection[sel] else: ch_names_ = list(ch_names_) kind = op.split(kind)[-1] return Montage(pos=pos, ch_names=ch_names_, kind=kind, selection=selection)
TypeError
dataset/ETHPy150Open mne-tools/mne-python/mne/channels/montage.py/read_montage
6,937
def generate_run_target(self, target, outfile): runnerscript = [sys.executable, self.environment.get_build_command(), '--internal', 'commandrunner'] deps = [] arg_strings = [] for i in target.args: if isinstance(i, str): arg_strings.append(i) elif isinstance(i, (build.BuildTarget, build.CustomTarget)): relfname = self.get_target_filename(i) deps.append(relfname) arg_strings.append(os.path.join(self.environment.get_build_dir(), relfname)) else: mlog.debug(str(i)) raise MesonException('Unreachable code in generate_run_target.') elem = NinjaBuildElement(self.all_outputs, target.name, 'CUSTOM_COMMAND', deps) cmd = runnerscript + [self.environment.get_source_dir(), self.environment.get_build_dir(), target.subdir] texe = target.command try: texe = texe.held_object except __HOLE__: pass if isinstance(texe, build.Executable): abs_exe = os.path.join(self.environment.get_build_dir(), self.get_target_filename(texe)) deps.append(self.get_target_filename(texe)) if self.environment.is_cross_build() \ and self.environment.cross_info.config['binaries'].get('exe_wrapper', None) is not None: cmd += [self.environment.cross_info.config['binaries']['exe_wrapper']] cmd.append(abs_exe) else: cmd.append(target.command) cmd += arg_strings elem.add_item('COMMAND', cmd) elem.add_item('description', 'Running external command %s.' % target.name) elem.add_item('pool', 'console') elem.write(outfile) self.processed_targets[target.name + target.type_suffix()] = True
AttributeError
dataset/ETHPy150Open mesonbuild/meson/mesonbuild/backend/ninjabackend.py/NinjaBackend.generate_run_target
6,938
def generate_dynamic_link_rules(self, outfile): ctypes = [(self.build.compilers, False)] if self.environment.is_cross_build(): if self.environment.cross_info.need_cross_compiler(): ctypes.append((self.build.cross_compilers, True)) else: # Native compiler masquerades as the cross compiler. ctypes.append((self.build.compilers, True)) else: ctypes.append((self.build.cross_compilers, True)) for (complist, is_cross) in ctypes: for compiler in complist: langname = compiler.get_language() if langname == 'java' or langname == 'vala' or\ langname == 'rust' or langname == 'cs': continue crstr = '' cross_args = [] if is_cross: crstr = '_CROSS' try: cross_args = self.environment.cross_info.config['properties'][langname + '_link_args'] except __HOLE__: pass rule = 'rule %s%s_LINKER\n' % (langname, crstr) if mesonlib.is_windows(): command_template = ''' command = %s @$out.rsp rspfile = $out.rsp rspfile_content = %s $ARGS %s $in $LINK_ARGS $aliasing ''' else: command_template = ' command = %s %s $ARGS %s $in $LINK_ARGS $aliasing\n' command = command_template % \ (' '.join(compiler.get_linker_exelist()),\ ' '.join(cross_args),\ ' '.join(compiler.get_linker_output_args('$out'))) description = ' description = Linking target $out' outfile.write(rule) outfile.write(command) outfile.write(description) outfile.write('\n') scriptdir = self.environment.get_script_dir() outfile.write('\n') symrule = 'rule SHSYM\n' symcmd = ' command = "%s" "%s" %s %s %s %s $CROSS\n' % (ninja_quote(sys.executable), self.environment.get_build_command(), '--internal', 'symbolextractor', '$in', '$out') synstat = ' restat = 1\n' syndesc = ' description = Generating symbol file $out.\n' outfile.write(symrule) outfile.write(symcmd) outfile.write(synstat) outfile.write(syndesc) outfile.write('\n')
KeyError
dataset/ETHPy150Open mesonbuild/meson/mesonbuild/backend/ninjabackend.py/NinjaBackend.generate_dynamic_link_rules
6,939
def generate_compile_rule_for(self, langname, compiler, qstr, is_cross, outfile): if langname == 'java': if not is_cross: self.generate_java_compile_rule(compiler, outfile) return if langname == 'cs': if not is_cross: self.generate_cs_compile_rule(compiler, outfile) return if langname == 'vala': if not is_cross: self.generate_vala_compile_rules(compiler, outfile) return if langname == 'rust': if not is_cross: self.generate_rust_compile_rules(compiler, outfile) return if langname == 'swift': if not is_cross: self.generate_swift_compile_rules(compiler, outfile) return if langname == 'fortran': self.generate_fortran_dep_hack(outfile) if is_cross: crstr = '_CROSS' else: crstr = '' rule = 'rule %s%s_COMPILER\n' % (langname, crstr) depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') quoted_depargs = [] for d in depargs: if d != '$out' and d != '$in': d = qstr % d quoted_depargs.append(d) cross_args = [] if is_cross: try: cross_args = self.environment.cross_info.config['properties'][langname + '_args'] except __HOLE__: pass if mesonlib.is_windows(): command_template = ''' command = %s @$out.rsp rspfile = $out.rsp rspfile_content = %s $ARGS %s %s %s $in ''' else: command_template = ' command = %s %s $ARGS %s %s %s $in\n' command = command_template % \ (' '.join(compiler.get_exelist()),\ ' '.join(cross_args), ' '.join(quoted_depargs),\ ' '.join(compiler.get_output_args('$out')),\ ' '.join(compiler.get_compile_only_args())) description = ' description = Compiling %s object $out\n' % langname if compiler.get_id() == 'msvc': deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' deps += ' depfile = $DEPFILE\n' outfile.write(rule) outfile.write(command) outfile.write(deps) outfile.write(description) outfile.write('\n')
KeyError
dataset/ETHPy150Open mesonbuild/meson/mesonbuild/backend/ninjabackend.py/NinjaBackend.generate_compile_rule_for
6,940
def generate_pch_rule_for(self, langname, compiler, qstr, is_cross, outfile): if langname != 'c' and langname != 'cpp': return if is_cross: crstr = '_CROSS' else: crstr = '' rule = 'rule %s%s_PCH\n' % (langname, crstr) depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') cross_args = [] if is_cross: try: cross_args = self.environment.cross_info.config['properties'][langname + '_args'] except __HOLE__: pass quoted_depargs = [] for d in depargs: if d != '$out' and d != '$in': d = qstr % d quoted_depargs.append(d) if compiler.get_id() == 'msvc': output = '' else: output = ' '.join(compiler.get_output_args('$out')) command = " command = %s %s $ARGS %s %s %s $in\n" % \ (' '.join(compiler.get_exelist()),\ ' '.join(cross_args),\ ' '.join(quoted_depargs),\ output,\ ' '.join(compiler.get_compile_only_args())) description = ' description = Precompiling header %s\n' % '$in' if compiler.get_id() == 'msvc': deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' deps += ' depfile = $DEPFILE\n' outfile.write(rule) outfile.write(command) outfile.write(deps) outfile.write(description) outfile.write('\n')
KeyError
dataset/ETHPy150Open mesonbuild/meson/mesonbuild/backend/ninjabackend.py/NinjaBackend.generate_pch_rule_for
6,941
def generate_shlib_aliases(self, target, outdir): basename = target.get_filename() aliases = target.get_aliaslist() for alias in aliases: aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias) try: os.remove(aliasfile) except Exception: pass try: os.symlink(basename, aliasfile) except __HOLE__: mlog.debug("Library versioning disabled because symlinks are not supported.") except OSError: mlog.debug("Library versioning disabled because we do not have symlink creation privileges.")
NotImplementedError
dataset/ETHPy150Open mesonbuild/meson/mesonbuild/backend/ninjabackend.py/NinjaBackend.generate_shlib_aliases
6,942
def get_style_by_name(name): if name in STYLE_MAP: mod, cls = STYLE_MAP[name].split('::') builtin = "yes" else: for found_name, style in find_plugin_styles(): if name == found_name: return style # perhaps it got dropped into our styles package builtin = "" mod = name cls = name.title() + "Style" try: mod = __import__('pygments.styles.' + mod, None, None, [cls]) except __HOLE__: raise ClassNotFound("Could not find style module %r" % mod + (builtin and ", though it should be builtin") + ".") try: return getattr(mod, cls) except AttributeError: raise ClassNotFound("Could not find style class %r in style module." % cls)
ImportError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/Pygments-1.3.1/pygments/styles/__init__.py/get_style_by_name
6,943
def load_sync_steps(self): dep_path = Config().observer_dependency_graph logger.info('Loading model dependency graph from %s' % dep_path) try: # This contains dependencies between records, not sync steps self.model_dependency_graph = json.loads(open(dep_path).read()) except Exception,e: raise e try: backend_path = Config().observer_pl_dependency_graph logger.info('Loading backend dependency graph from %s' % backend_path) # This contains dependencies between backend records self.backend_dependency_graph = json.loads(open(backend_path).read()) except Exception,e: logger.info('Backend dependency graph not loaded') # We can work without a backend graph self.backend_dependency_graph = {} provides_dict = {} for s in self.sync_steps: self.step_lookup[s.__name__] = s for m in s.provides: try: provides_dict[m.__name__].append(s.__name__) except __HOLE__: provides_dict[m.__name__]=[s.__name__] step_graph = {} for k,v in self.model_dependency_graph.iteritems(): try: for source in provides_dict[k]: for m in v: try: for dest in provides_dict[m]: # no deps, pass try: if (dest not in step_graph[source]): step_graph[source].append(dest) except: step_graph[source]=[dest] except KeyError: pass except KeyError: pass # no dependencies, pass #import pdb #pdb.set_trace() if (self.backend_dependency_graph): backend_dict = {} for s in self.sync_steps: for m in s.serves: backend_dict[m]=s.__name__ for k,v in backend_dependency_graph.iteritems(): try: source = backend_dict[k] for m in v: try: dest = backend_dict[m] except KeyError: # no deps, pass pass step_graph[source]=dest except KeyError: pass # no dependencies, pass self.dependency_graph = step_graph self.deletion_dependency_graph = invert_graph(step_graph) self.ordered_steps = toposort(self.dependency_graph, map(lambda s:s.__name__,self.sync_steps)) print "Order of steps=",self.ordered_steps self.load_run_times()
KeyError
dataset/ETHPy150Open open-cloud/xos/xos/synchronizers/ec2/event_loop.py/XOSObserver.load_sync_steps
6,944
def check_duration(self, step, duration): try: if (duration > step.deadline): logger.info('Sync step %s missed deadline, took %.2f seconds'%(step.name,duration)) except __HOLE__: # S doesn't have a deadline pass
AttributeError
dataset/ETHPy150Open open-cloud/xos/xos/synchronizers/ec2/event_loop.py/XOSObserver.check_duration
6,945
def check_schedule(self, step, deletion): last_run_times = self.last_run_times if not deletion else self.last_deletion_run_times time_since_last_run = time.time() - last_run_times.get(step.__name__, 0) try: if (time_since_last_run < step.requested_interval): raise StepNotReady except __HOLE__: logger.info('Step %s does not have requested_interval set'%step.__name__) raise StepNotReady
AttributeError
dataset/ETHPy150Open open-cloud/xos/xos/synchronizers/ec2/event_loop.py/XOSObserver.check_schedule
6,946
def sync(self, S, deletion): step = self.step_lookup[S] start_time=time.time() dependency_graph = self.dependency_graph if not deletion else self.deletion_dependency_graph # Wait for step dependencies to be met try: deps = self.dependency_graph[S] has_deps = True except KeyError: has_deps = False if (has_deps): for d in deps: cond = self.step_conditions[d] cond.acquire() if (self.step_status[d] is STEP_STATUS_WORKING): cond.wait() cond.release() go = self.step_status[d] == STEP_STATUS_OK else: go = True if (not go): self.failed_steps.append(sync_step) my_status = STEP_STATUS_KO else: sync_step = step(driver=self.driver,error_map=self.error_mapper) sync_step.__name__ = step.__name__ sync_step.dependencies = [] try: mlist = sync_step.provides for m in mlist: sync_step.dependencies.extend(self.model_dependency_graph[m.__name__]) except KeyError: pass sync_step.debug_mode = debug_mode should_run = False try: # Various checks that decide whether # this step runs or not self.check_class_dependency(sync_step, self.failed_steps) # dont run Slices if Sites failed self.check_schedule(sync_step, deletion) # dont run sync_network_routes if time since last run < 1 hour should_run = True except StepNotReady: logging.info('Step not ready: %s'%sync_step.__name__) self.failed_steps.append(sync_step) my_status = STEP_STATUS_KO except Exception,e: logging.error('%r',e) logger.log_exc("sync step failed: %r. Deletion: %r"%(sync_step,deletion)) self.failed_steps.append(sync_step) my_status = STEP_STATUS_KO if (should_run): try: duration=time.time() - start_time logger.info('Executing step %s' % sync_step.__name__) failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion) self.check_duration(sync_step, duration) if failed_objects: self.failed_step_objects.update(failed_objects) my_status = STEP_STATUS_OK self.update_run_time(sync_step,deletion) except Exception,e: logging.error('Model step failed. This seems like a misconfiguration or bug: %r. This error will not be relayed to the user!',e) logger.log_exc(e) self.failed_steps.append(S) my_status = STEP_STATUS_KO else: my_status = STEP_STATUS_OK try: my_cond = self.step_conditions[S] my_cond.acquire() self.step_status[S]=my_status my_cond.notify_all() my_cond.release() except __HOLE__,e: logging.info('Step %r is a leaf') pass
KeyError
dataset/ETHPy150Open open-cloud/xos/xos/synchronizers/ec2/event_loop.py/XOSObserver.sync
6,947
def datasource(lat, lon, source_dir): """ Return a gdal datasource for an SRTM3 lat, lon corner. If it doesn't already exist locally in source_dir, grab a new one. """ # # Create a URL # try: reg = region(lat, lon) except __HOLE__: # we're probably outside a known region return None fmt = 'http://dds.cr.usgs.gov/srtm/version2_1/SRTM3/%s/%s.hgt.zip' url = fmt % (reg, filename(lat, lon)) # # Create a local filepath # s, host, path, p, q, f = urlparse(url) dem_dir = md5(url).hexdigest()[:3] dem_dir = join(source_dir, dem_dir) dem_path = join(dem_dir, basename(path)[:-4]) dem_none = dem_path[:-4]+'.404' # # Check if the file exists locally # if exists(dem_path): return gdal.Open(dem_path, gdal.GA_ReadOnly) if exists(dem_none): return None if not exists(dem_dir): makedirs(dem_dir) chmod(dem_dir, 0777) assert isdir(dem_dir) # # Grab a fresh remote copy # print >> stderr, 'Retrieving', url, 'in DEM.SRTM3.datasource().' conn = HTTPConnection(host, 80) conn.request('GET', path) resp = conn.getresponse() if resp.status == 404: # we're probably outside the coverage area print >> open(dem_none, 'w'), url return None assert resp.status == 200, (resp.status, resp.read()) try: # # Get the DEM out of the zip file # handle, zip_path = mkstemp(prefix='srtm3-', suffix='.zip') write(handle, resp.read()) close(handle) zipfile = ZipFile(zip_path, 'r') # # Write the actual DEM # dem_file = open(dem_path, 'w') dem_file.write(zipfile.read(zipfile.namelist()[0])) dem_file.close() chmod(dem_path, 0666) finally: unlink(zip_path) # # The file better exist locally now # return gdal.Open(dem_path, gdal.GA_ReadOnly)
ValueError
dataset/ETHPy150Open migurski/DEM-Tools/Hillup/data/SRTM3.py/datasource
6,948
def mkdir_p(path): ''' mkdir -p http://stackoverflow.com/a/600612/127816 ''' try: os.makedirs(path) except __HOLE__ as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise
OSError
dataset/ETHPy150Open saltstack/salt/salt/netapi/rest_wsgi.py/mkdir_p
6,949
def get_json(environ): ''' Return the request body as JSON ''' content_type = environ.get('CONTENT_TYPE', '') if content_type != 'application/json': raise HTTPError(406, 'JSON required') try: return json.loads(read_body(environ)) except __HOLE__ as exc: raise HTTPError(400, exc)
ValueError
dataset/ETHPy150Open saltstack/salt/salt/netapi/rest_wsgi.py/get_json
6,950
def application(environ, start_response): ''' Process the request and return a JSON response. Catch errors and return the appropriate HTTP code. ''' # Instantiate APIClient once for the whole app saltenviron(environ) # Call the dispatcher try: resp = list(dispatch(environ)) code = 200 except HTTPError as exc: code = exc.code resp = str(exc) except salt.exceptions.EauthAuthenticationError as exc: code = 401 resp = str(exc) except Exception as exc: code = 500 resp = str(exc) # Convert the response to JSON try: ret = json.dumps({'return': resp}) except __HOLE__ as exc: code = 500 ret = str(exc) # Return the response start_response(H[code], get_headers(ret, { 'Content-Type': 'application/json', })) return (ret,)
TypeError
dataset/ETHPy150Open saltstack/salt/salt/netapi/rest_wsgi.py/application
6,951
def start(): ''' Start simple_server() ''' from wsgiref.simple_server import make_server # When started outside of salt-api __opts__ will not be injected if '__opts__' not in globals(): globals()['__opts__'] = get_opts() if __virtual__() is False: raise SystemExit(1) mod_opts = __opts__.get(__virtualname__, {}) # pylint: disable=C0103 httpd = make_server('localhost', mod_opts['port'], application) try: httpd.serve_forever() except __HOLE__: raise SystemExit(0)
KeyboardInterrupt
dataset/ETHPy150Open saltstack/salt/salt/netapi/rest_wsgi.py/start
6,952
def parse_packetized_elementary_stream(data): try: pes = bitstring.ConstBitStream(data[data.index(b'\x00\x00\x01'):]) except __HOLE__: return while pes.pos < len(pes): packet = {} if pes.read(24) != '0x000001': break packet.update({ 'stream_id': pes.read(8), 'packet_length': pes.read(16).uint, }) if pes.peek(2) == '0b10': pes.read(2) packet.update({ 'scrambling_control': pes.read(2), 'priority': pes.read(1).bool, 'data_alignment_indicator': pes.read(1).bool, 'copyright': pes.read(1).bool, 'original_or_copy': pes.read(1).bool, 'pts_dts_indicator': pes.read(2), 'escr_flag': pes.read(1).bool, 'es_rate_flag': pes.read(1).bool, 'dsm_trick_mode_flag': pes.read(1).bool, 'additional_copy_info_flag': pes.read(1).bool, 'crc_flag': pes.read(1).bool, 'extension_flag': pes.read(1).bool, 'pes_header_length': pes.read(8).uint, }) pes.read(8 * packet['pes_header_length']) remaining_length = packet['packet_length'] if 'scrambling_control' in packet: remaining_length -= 3 + packet['pes_header_length'] packet.update({ 'payload': pes.read(8 * remaining_length).bytes }) yield packet
ValueError
dataset/ETHPy150Open schlarpc/SeriousCast/mpegutils.py/parse_packetized_elementary_stream
6,953
def linkage_tree(X, connectivity=None, n_components=None, n_clusters=None, linkage='complete', affinity="euclidean", return_distance=False): """Linkage agglomerative clustering based on a Feature matrix. The inertia matrix uses a Heapq-based representation. This is the structured version, that takes into account some topological structure between samples. Read more in the :ref:`User Guide <hierarchical_clustering>`. Parameters ---------- X : array, shape (n_samples, n_features) feature matrix representing n_samples samples to be clustered connectivity : sparse matrix (optional). connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. The matrix is assumed to be symmetric and only the upper triangular half is used. Default is None, i.e, the Ward algorithm is unstructured. n_clusters : int (optional) Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. In this case, the complete tree is not computed, thus the 'children' output is of limited use, and the 'parents' output should rather be used. This option is valid only when specifying a connectivity matrix. linkage : {"average", "complete"}, optional, default: "complete" Which linkage criteria to use. The linkage criterion determines which distance to use between sets of observation. - average uses the average of the distances of each observation of the two sets - complete or maximum linkage uses the maximum distances between all observations of the two sets. affinity : string or callable, optional, default: "euclidean". which metric to use. Can be "euclidean", "manhattan", or any distance know to paired distance (see metric.pairwise) return_distance : bool, default False whether or not to return the distances between the clusters. Returns ------- children : 2D array, shape (n_nodes-1, 2) The children of each non-leaf node. Values less than `n_samples` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_samples` is a non-leaf node and has children `children_[i - n_samples]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_samples + i` n_components : int The number of connected components in the graph. n_leaves : int The number of leaves in the tree. parents : 1D array, shape (n_nodes, ) or None The parent of each node. Only returned when a connectivity matrix is specified, elsewhere 'None' is returned. distances : ndarray, shape (n_nodes-1,) Returned when return_distance is set to True. distances[i] refers to the distance between children[i][0] and children[i][1] when they are merged. See also -------- ward_tree : hierarchical clustering with ward linkage """ X = np.asarray(X) if X.ndim == 1: X = np.reshape(X, (-1, 1)) n_samples, n_features = X.shape linkage_choices = {'complete': _hierarchical.max_merge, 'average': _hierarchical.average_merge} try: join_func = linkage_choices[linkage] except __HOLE__: raise ValueError( 'Unknown linkage option, linkage should be one ' 'of %s, but %s was given' % (linkage_choices.keys(), linkage)) if connectivity is None: from scipy.cluster import hierarchy # imports PIL if n_clusters is not None: warnings.warn('Partial build of the tree is implemented ' 'only for structured clustering (i.e. with ' 'explicit connectivity). The algorithm ' 'will build the full tree and only ' 'retain the lower branches required ' 'for the specified number of clusters', stacklevel=2) if affinity == 'precomputed': # for the linkage function of hierarchy to work on precomputed # data, provide as first argument an ndarray of the shape returned # by pdist: it is a flat array containing the upper triangular of # the distance matrix. i, j = np.triu_indices(X.shape[0], k=1) X = X[i, j] elif affinity == 'l2': # Translate to something understood by scipy affinity = 'euclidean' elif affinity in ('l1', 'manhattan'): affinity = 'cityblock' elif callable(affinity): X = affinity(X) i, j = np.triu_indices(X.shape[0], k=1) X = X[i, j] out = hierarchy.linkage(X, method=linkage, metric=affinity) children_ = out[:, :2].astype(np.int) if return_distance: distances = out[:, 2] return children_, 1, n_samples, None, distances return children_, 1, n_samples, None connectivity, n_components = _fix_connectivity(X, connectivity) connectivity = connectivity.tocoo() # Put the diagonal to zero diag_mask = (connectivity.row != connectivity.col) connectivity.row = connectivity.row[diag_mask] connectivity.col = connectivity.col[diag_mask] connectivity.data = connectivity.data[diag_mask] del diag_mask if affinity == 'precomputed': distances = X[connectivity.row, connectivity.col] else: # FIXME We compute all the distances, while we could have only computed # the "interesting" distances distances = paired_distances(X[connectivity.row], X[connectivity.col], metric=affinity) connectivity.data = distances if n_clusters is None: n_nodes = 2 * n_samples - 1 else: assert n_clusters <= n_samples n_nodes = 2 * n_samples - n_clusters if return_distance: distances = np.empty(n_nodes - n_samples) # create inertia heap and connection matrix A = np.empty(n_nodes, dtype=object) inertia = list() # LIL seems to the best format to access the rows quickly, # without the numpy overhead of slicing CSR indices and data. connectivity = connectivity.tolil() # We are storing the graph in a list of IntFloatDict for ind, (data, row) in enumerate(zip(connectivity.data, connectivity.rows)): A[ind] = IntFloatDict(np.asarray(row, dtype=np.intp), np.asarray(data, dtype=np.float64)) # We keep only the upper triangular for the heap # Generator expressions are faster than arrays on the following inertia.extend(_hierarchical.WeightedEdge(d, ind, r) for r, d in zip(row, data) if r < ind) del connectivity heapify(inertia) # prepare the main fields parent = np.arange(n_nodes, dtype=np.intp) used_node = np.ones(n_nodes, dtype=np.intp) children = [] # recursive merge loop for k in xrange(n_samples, n_nodes): # identify the merge while True: edge = heappop(inertia) if used_node[edge.a] and used_node[edge.b]: break i = edge.a j = edge.b if return_distance: # store distances distances[k - n_samples] = edge.weight parent[i] = parent[j] = k children.append((i, j)) # Keep track of the number of elements per cluster n_i = used_node[i] n_j = used_node[j] used_node[k] = n_i + n_j used_node[i] = used_node[j] = False # update the structure matrix A and the inertia matrix # a clever 'min', or 'max' operation between A[i] and A[j] coord_col = join_func(A[i], A[j], used_node, n_i, n_j) for l, d in coord_col: A[l].append(k, d) # Here we use the information from coord_col (containing the # distances) to update the heap heappush(inertia, _hierarchical.WeightedEdge(d, k, l)) A[k] = coord_col # Clear A[i] and A[j] to save memory A[i] = A[j] = 0 # Separate leaves in children (empty lists up to now) n_leaves = n_samples # # return numpy array for efficient caching children = np.array(children)[:, ::-1] if return_distance: return children, n_components, n_leaves, parent, distances return children, n_components, n_leaves, parent # Matching names to tree-building strategies
KeyError
dataset/ETHPy150Open scikit-learn/scikit-learn/sklearn/cluster/hierarchical.py/linkage_tree
6,954
def run(flags, output=sys.stdout): try : if flags.structs: # which protocol to use if flags.protocol == 'binary': protocol = TBinaryProtocol elif flags.protocol == 'compact': protocol = TCompactProtocol elif flags.protocol == 'json': protocol = TJSONProtocol else: output.write('Unknown protocol: %s' % flags.protocol) output.write('Valid options for --protocol are: %s' % VALID_PROTOCOLS) sys.exit(1) thrift_file = ThriftStructFile( protocol, file_name=flags.file, read_values=not flags.skip_values, padding=flags.padding, debug=flags.debug ) else: thrift_file = ThriftMessageFile( file_name=flags.file, finagle_thrift=flags.finagle_thrift, read_values=not flags.skip_values, padding=flags.padding, debug=flags.debug ) except ThriftFile.Error as ex: output.write(ex.message) sys.exit(1) pp = pprint.PrettyPrinter(indent=4) holes = [] total_msg_read = 0 try: for msg, hole in thrift_file: output.write(pp.pformat(msg.as_dict) if flags.pretty else msg) output.write('\n') if hole: holes.append(hole) total_msg_read += 1 if 0 < flags.max_messages <= total_msg_read: break except __HOLE__: pass what = 'structs' if flags.structs else 'msgs' if holes: output.write('Read %s: %d\nHoles: %d\n' % (what, total_msg_read, len(holes))) if flags.show_holes: for idx, hole in enumerate(holes, start=1): output.write('#%d: start=%d, size=%d' % (idx, hole[0], hole[1])) else: output.write('Read %s: %d\nNo bytes skipped' % (what, total_msg_read))
KeyboardInterrupt
dataset/ETHPy150Open pinterest/thrift-tools/thrift_tools/file_reader.py/run
6,955
def save(self, **kwargs): from reviewboard.reviews.models.review_request import ReviewRequest self.timestamp = timezone.now() super(BaseComment, self).save() try: # Update the review timestamp, but only if it's a draft. # Otherwise, resolving an issue will change the timestamp of # the review. review = self.get_review() if not review.public: review.timestamp = self.timestamp review.save() else: if (not self.is_reply() and self.issue_opened and self._loaded_issue_status != self.issue_status): # The user has toggled the issue status of this comment, # so update the issue counts for the review request. old_field = ReviewRequest.ISSUE_COUNTER_FIELDS[ self._loaded_issue_status] new_field = ReviewRequest.ISSUE_COUNTER_FIELDS[ self.issue_status] CounterField.increment_many( self.get_review_request(), { old_field: -1, new_field: 1, }) q = ReviewRequest.objects.filter(pk=review.review_request_id) q.update(last_review_activity_timestamp=self.timestamp) except __HOLE__: pass
ObjectDoesNotExist
dataset/ETHPy150Open reviewboard/reviewboard/reviewboard/reviews/models/base_comment.py/BaseComment.save
6,956
def __init__(self, distro, root_dir, instances, opts, group, prior_groups): super(YumDependencyHandler, self).__init__(distro, root_dir, instances, opts, group, prior_groups) # Various paths we will use while operating self.rpmbuild_dir = sh.joinpths(self.deps_dir, "rpmbuild") self.prebuild_dir = sh.joinpths(self.deps_dir, "prebuild") self.deps_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps") self.deps_src_repo_dir = sh.joinpths(self.deps_dir, "openstack-deps-sources") self.rpm_sources_dir = sh.joinpths(self.rpmbuild_dir, "SOURCES") self.anvil_repo_dir = sh.joinpths(self.root_dir, "repo") self.generated_srpms_filename = sh.joinpths(self.deps_dir, "generated-srpms-%s" % group) self.build_requires_filename = sh.joinpths(self.deps_dir, "build-requires-%s" % group) self.yum_satisfies_filename = sh.joinpths(self.deps_dir, "yum-satisfiable-%s" % group) self.rpm_build_requires_filename = sh.joinpths(self.deps_dir, "rpm-build-requires-%s" % group) # Executables we require to operate self.rpmbuild_executable = sh.which("rpmbuild") self.specprint_executable = sh.which('specprint', ["tools/"]) # We inspect yum for packages, this helper allows us to do this. self.helper = yum_helper.Helper(self.log_dir, self.REPOS) self.envra_helper = envra_helper.Helper() # See if we are requested to run at a higher make parallelism level try: self.jobs = max(self.JOBS, int(self.opts.get('jobs'))) except (__HOLE__, ValueError): self.jobs = self.JOBS
TypeError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler.__init__
6,957
def _fetch_epoch_mapping(self): epoch_map = self.distro.get_dependency_config("epoch_map", quiet=True) if not epoch_map: epoch_map = {} epoch_skips = self.distro.get_dependency_config("epoch_skips", quiet=True) if not epoch_skips: epoch_skips = _DEFAULT_SKIP_EPOCHS if not isinstance(epoch_skips, (list, tuple)): epoch_skips = [i.strip() for i in epoch_skips.split(",")] built_epochs = {} for name in self.python_names: if name in epoch_map: built_epochs[name] = str(epoch_map.pop(name)) else: built_epochs[name] = str(self.OPENSTACK_EPOCH) # Ensure epochs set by a yum searching (that are not in the list of # epochs to provide) are correctly set when building dependent # packages... keep_names = set() try: yum_satisfies = sh.load_file(self.yum_satisfies_filename) except IOError as e: if e.errno != errno.ENOENT: raise else: for line in yum_satisfies.splitlines(): raw_req_rpm = utils.parse_json(line) req = pip_helper.extract_requirement(raw_req_rpm['requirement']) if req.key in epoch_map: LOG.debug("Ensuring manually set epoch is retained for" " requirement '%s' with epoch %s", req, epoch_map[req.key]) keep_names.add(req.key) else: rpm_info = raw_req_rpm['rpm'] rpm_epoch = rpm_info.get('epoch') if rpm_epoch and str(rpm_epoch) not in epoch_skips: LOG.debug("Adding in yum satisfiable package %s for" " requirement '%s' with epoch %s from repo %s", rpm_info['name'], req, rpm_epoch, rpm_info['repo']) keep_names.add(req.key) epoch_map[req.key] = str(rpm_epoch) # Exclude names from the epoch map that we never downloaded in the # first place or that we did not just set automatically (since these # are not useful and should not be set in the first place). try: _pip_reqs, downloaded_reqs = pip_helper.read_requirement_files([self.build_requires_filename]) except __HOLE__ as e: if e.errno != errno.ENOENT: raise else: downloaded_names = set([req.key for req in downloaded_reqs]) tmp_epoch_map = {} for (name, epoch) in six.iteritems(epoch_map): name = name.lower() if name in downloaded_names or name in keep_names: tmp_epoch_map[name] = str(epoch) else: LOG.debug("Discarding %s:%s from the epoch mapping since" " it was not part of the downloaded (or automatically" " included) build requirements", name, epoch) epoch_map = tmp_epoch_map epoch_map.update(built_epochs) return epoch_map
IOError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler._fetch_epoch_mapping
6,958
def build_binary(self): def is_src_rpm(path): if not path: return False if not sh.isfile(path): return False if not path.lower().endswith('.src.rpm'): return False return True def list_src_rpms(path): path_files = [] restricted = set() if sh.isdir(path): path_files = sh.listdir(path, filter_func=is_src_rpm) try: # Leave other groups files alone... restricted = set(_get_lines(self.generated_srpms_filename)) except __HOLE__ as e: if e.errno != errno.ENOENT: raise filtered = [] for path in path_files: if path in restricted: filtered.append(path) path_files = filtered return sorted(path_files) def move_rpms(repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) search_dirs = [ sh.joinpths(self.rpmbuild_dir, "RPMS"), ] for sub_dir in sh.listdir(self.rpmbuild_dir, dirs_only=True): search_dirs.append(sh.joinpths(sub_dir, "RPMS")) moved = [] for source_dir in search_dirs: moved.extend(self._move_rpm_files(source_dir, repo_dir)) return moved def build(repo_dir, repo_name, header_tpl, group, built_files): repo_files = [] for srpm in list_src_rpms(repo_dir): if srpm not in built_files: repo_files.append(srpm) if not repo_files: return [] utils.log_iterable(repo_files, header=header_tpl % (len(repo_files), self.SRC_REPOS[repo_name], self.jobs), logger=LOG) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" if self.opts.get("overwrite_configs", False): rpmbuild_flags += " --define 'overwrite_configs 1'" with sh.remove_before(self.rpmbuild_dir): self._create_rpmbuild_subdirs() # This is needed so that make correctly identifies the right # files and the right *.mark files and so-on; instead of # grabbing all the files (including ones we don't want to # build just yet...) files_dirname = '%s-%s-build' % (repo_name, group) files_dir = sh.joinpths(self.deps_dir, files_dirname) sh.mkdirslist(files_dir) for srpm in repo_files: sh.copy(srpm, sh.joinpths(files_dir, sh.basename(srpm))) try: self.py2rpm_helper.build_all_binaries(repo_name, files_dir, rpmbuild_flags, self.tracewriter, self.jobs) finally: # If we made any rpms (even if a failure happened, make # sure that we move them to the right target repo). moved_rpms = move_rpms(repo_name) if len(moved_rpms) > 0: self._create_repo(repo_name) return repo_files def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = [] try: build_requirements.extend(_get_lines(self.rpm_build_requires_filename)) except IOError as e: if e.errno != errno.ENOENT: raise built_files = [] built_requirements = [] for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] available_paths = list_src_rpms(repo_dir) envra_path_details = self.envra_helper.explode(*available_paths) for (path, envra_detail) in zip(available_paths, envra_path_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) built_requirements.append(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.copy(path, sh.joinpths(prebuild_dir, sh.basename(path))) built_files.extend( build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their' ' SRPMs for repo %s using %s jobs', "%s-prebuild" % self.group, built_files)) leftover_requirements = set() for req in build_requirements: if req not in built_requirements: leftover_requirements.add(req) return (leftover_requirements, built_files) leftover_requirements, built_files = pre_build() if leftover_requirements: utils.log_iterable(sorted(leftover_requirements), header="%s unsatisfied build requirements (these" " will need to be satisfied by existing" " repositories)" % len(leftover_requirements), logger=LOG) for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) built_files.extend( build(repo_dir, repo_name, 'Building %s RPM packages from their SRPMs for repo %s' ' using %s jobs', self.group, built_files))
IOError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler.build_binary
6,959
def _write_spec_file(self, instance, rpm_name, template_name, params): requires_what = params.get('requires', []) conflicts_what = params.get('conflicts', []) test_requires_what = params.get('test_requires', []) test_conflicts_what = params.get('test_conflicts', []) egg_info = getattr(instance, 'egg_info', None) if egg_info: def ei_names(key): try: requires_python = [str(req) for req in egg_info[key]] except __HOLE__: return [], [] else: return self.py2rpm_helper.names_to_rpm_deps(requires_python) rpm_requires, rpm_conflicts = ei_names('dependencies') requires_what.extend(rpm_requires) conflicts_what.extend(rpm_conflicts) rpm_test_requires, rpm_test_conflicts = ei_names('test_dependencies') test_requires_what.extend(rpm_test_requires) test_conflicts_what.extend(rpm_test_conflicts) params["requires"] = requires_what params["conflicts"] = conflicts_what params["test_requires"] = test_requires_what params["test_conflicts"] = test_conflicts_what params["epoch"] = self.OPENSTACK_EPOCH params["part_fn"] = lambda filename: sh.joinpths( settings.TEMPLATE_DIR, self.SPEC_TEMPLATE_DIR, filename) parsed_version = pkg_resources.parse_version(params["version"]) params.update(self._make_spec_functors(parsed_version)) content = utils.load_template(self.SPEC_TEMPLATE_DIR, template_name)[1] spec_filename = sh.joinpths(self.rpmbuild_dir, "SPECS", "%s.spec" % rpm_name) sh.write_file(spec_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) return spec_filename
KeyError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler._write_spec_file
6,960
def _build_from_spec(self, instance, spec_filename, patches=None): pkg_dir = instance.get_option('app_dir') if sh.isfile(sh.joinpths(pkg_dir, "setup.py")): self._write_python_tarball(instance, pkg_dir, ENSURE_NOT_MISSING) else: self._write_git_tarball(instance, pkg_dir, spec_filename) self._copy_sources(instance) if patches: self._copy_patches(patches) cmdline = [self.specprint_executable] cmdline.extend(['-f', spec_filename]) spec_details = json.loads(sh.execute(cmdline)[0]) rpm_requires = [] for k in ('requires', 'requirenevrs'): try: rpm_requires.extend(spec_details['headers'][k]) except (__HOLE__, TypeError): pass if rpm_requires: buff = six.StringIO() buff.write("# %s\n" % instance.name) if rpm_requires: for req in rpm_requires: buff.write("%s\n" % req) buff.write("\n") sh.append_file(self.rpm_build_requires_filename, buff.getvalue()) self._copy_startup_scripts(instance, spec_details) self._copy_systemd_scripts(instance, spec_details) cmdline = [ self.rpmbuild_executable, "-bs", "--define", "_topdir %s" % self.rpmbuild_dir, spec_filename, ] out_filename = sh.joinpths(self.log_dir, "rpmbuild-%s.log" % instance.name) sh.execute_save_output(cmdline, out_filename)
KeyError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler._build_from_spec
6,961
def _find_template_and_rpm_name(self, instance, build_name): search_names = [(build_name, "%s.spec" % build_name)] try: egg_name = instance.egg_info['name'] except __HOLE__: pass else: if any(s.endswith("client") for s in (instance.name, egg_name, build_name)): search_names.append([egg_name, "python-commonclient.spec"]) search_names.extend([ ("openstack-%s" % (egg_name), "openstack-%s.spec" % (egg_name)), (egg_name, "%s.spec" % (egg_name)), ]) # Return the first that exists (if any from this list) for (rpm_name, template_name) in search_names: spec_filename = sh.joinpths(settings.TEMPLATE_DIR, self.SPEC_TEMPLATE_DIR, template_name) if sh.isfile(spec_filename): return (rpm_name, template_name) return (None, None)
AttributeError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler._find_template_and_rpm_name
6,962
def _build_openstack_package(self, instance): params = self._package_parameters(instance) patches = instance.list_patches("package") params['patches'] = [sh.basename(fn) for fn in patches] build_name = instance.get_option('build_name', default_value=instance.name) (rpm_name, template_name) = self._find_template_and_rpm_name(instance, build_name) try: egg_name = instance.egg_info['name'] params["version"] = instance.egg_info["version"] except __HOLE__: pass else: if any(s.endswith("client") for s in (instance.name, egg_name, build_name)): client_name = utils.strip_prefix_suffix(egg_name, "python-", "client") if not client_name: msg = "Bad client package name %s" % (egg_name) raise excp.PackageException(msg) params["clientname"] = client_name params["apiname"] = instance.get_option( 'api_name', default_value=client_name.title()) if all((rpm_name, template_name)): spec_filename = self._write_spec_file(instance, rpm_name, template_name, params) self._build_from_spec(instance, spec_filename, patches) else: self.py2rpm_helper.build_srpm(source=instance.get_option("app_dir"), log_filename=instance.name, release=params.get("release"), with_tests=not params.get("no_tests"))
AttributeError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler._build_openstack_package
6,963
def _get_rpm_names(self, from_deps=True, from_instances=True): desired_rpms = [] py_reqs = set() if from_instances: inst_packages = list(self.requirements["requires"]) for inst in self.instances: inst_packages.extend(inst.package_names()) if sh.isdir(inst.get_option("app_dir")): try: py_req = inst.egg_info['req'] except __HOLE__: pass else: rpm_name, _ = self._find_template_and_rpm_name( inst, inst.get_option('build_name', default_value=inst.name) ) if rpm_name is not None: desired_rpms.append((rpm_name, py_req)) else: py_reqs.add(py_req) for rpm_name in inst_packages: desired_rpms.append((rpm_name, None)) if from_deps: # This file should have all the requirements (including test ones) # that we need to install (and which should have been built as rpms # in the previous build stages). requires = sh.load_file(self.gathered_requires_filename).splitlines() for line in [line.strip() for line in requires if line.strip()]: py_reqs.add(pip_helper.extract_requirement(line)) rpm_names = self.py2rpm_helper.names_to_rpm_names([req.key for req in py_reqs]) desired_rpms.extend((rpm_names[req.key], req) for req in py_reqs) def _format_name(rpm_name, py_req): full_name = str(rpm_name).strip() if py_req is not None: full_name += ','.join(''.join(x) for x in py_req.specs) return full_name return sorted(_format_name(rpm_name, py_req) for rpm_name, py_req in desired_rpms)
AttributeError
dataset/ETHPy150Open openstack/anvil/anvil/packaging/yum.py/YumDependencyHandler._get_rpm_names
6,964
def test_enable_mode(net_connect, commands, expected_responses): ''' Test entering enable mode Catch exception for devices that don't support enable ''' try: net_connect.enable() enable_prompt = net_connect.find_prompt() assert enable_prompt == expected_responses['enable_prompt'] except __HOLE__: assert True == True
AttributeError
dataset/ETHPy150Open ktbyers/netmiko/tests/test_netmiko_show.py/test_enable_mode
6,965
def unescape(self): r"""Unescape markup again into an unicode string. This also resolves known HTML4 and XHTML entities: >>> Markup("Main &raquo; <em>About</em>").unescape() u'Main \xbb <em>About</em>' """ from jinja2._markupsafe._constants import HTML_ENTITIES def handle_match(m): name = m.group(1) if name in HTML_ENTITIES: return unichr(HTML_ENTITIES[name]) try: if name[:2] in ('#x', '#X'): return unichr(int(name[2:], 16)) elif name.startswith('#'): return unichr(int(name[1:])) except __HOLE__: pass return u'' return _entity_re.sub(handle_match, unicode(self))
ValueError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/jinja2-2.6/jinja2/_markupsafe/__init__.py/Markup.unescape
6,966
def document_image(self): """ Processes an XTAF image including STFS files and embedded GPD and Account files """ if self.filename == None: return self.output("Opening %s" % self.filename, self.errfd) x = partition.Partition(self.filename) self.print_xtaf(x) # Find STFS files self.output("Processing all files", self.errfd) for filename in x.allfiles: try: if xboxmagic.find_type(data = x.read_file(filename, size=0x10)) == "STFS": self.output("Processing STFS file %s" % filename, self.errfd) s = stfs.STFS(filename, fd=x.open_fd(filename)) self.print_stfs(s) # Check to see if this is a gamertag STFS for stfsfile in s.allfiles: try: if stfsfile.endswith("Account"): magic = xboxmagic.find_type(data = s.read_file(s.allfiles[stfsfile], size=404)) elif stfsfile.upper().endswith(("PNG", "GPD")): magic = xboxmagic.find_type(data = s.read_file(s.allfiles[stfsfile], size=0x10)) else: magic = 'Unknown' # Process GPD files if magic == 'XDBF': self.output("Processing GPD File %s" % stfsfile, self.errfd) # Maybe STFS needs an equivalent to Partition.open_fd(filename) g = xdbf.XDBF(stfsfile, fd=StringIO(s.read_file(s.allfiles[stfsfile]))) self.print_xdbf(g) if self.image_directory != None: # Extract all the images for gpdimage in g.images: with open("%s/%s-%x-%s" %\ (self.image_directory, os.path.basename(filename), gpdimage,\ stfsfile[1:].replace('/', '-')), 'w') as fd: fd.write(g.images[gpdimage]) # Decrypt and print Account blob if magic == 'Account': self.output("Processing Account Blob", self.errfd) a = account.Account(s.read_file(s.allfiles[stfsfile])) self.print_account(a) # Extract all the images if magic == 'PNG' and self.image_directory != None: self.output("Processing Image File %s" % stfsfile, self.errfd) with open("%s/%s-%s.png" %\ (self.image_directory, os.path.basename(filename), stfsfile[1:].replace('/', '-')),\ 'w') as fd: fd.write(s.read_file(s.allfiles[stfsfile])) except (IOError, OverflowError, __HOLE__) as e: # GPD / Account error self.output("GPD/Account Error: %s %s %s" % (stfsfile, type(e), e), self.errfd) continue except (IOError, OverflowError, AssertionError) as e: # STFS Error self.output("STFS Error: %s %s %s" % (filename, type(e), e), self.errfd) continue
AssertionError
dataset/ETHPy150Open arkem/py360/report360.py/Report360.document_image
6,967
def jseval(script): """Run code in the JS interpreter and return output.""" try: interpreter = Popen(['js'], stdin=PIPE, stdout=PIPE) except __HOLE__: return script result, errors = interpreter.communicate(script) if interpreter.poll() or errors: return script return result
OSError
dataset/ETHPy150Open JT5D/Alfred-Popclip-Sublime/Sublime Text 2/JsFormat/libs/jsbeautifier/unpackers/evalbased.py/jseval
6,968
@internationalizeDocstring def errno(self, irc, msg, args, s): """<error number or code> Returns the number of an errno code, or the errno code of a number. """ try: i = int(s) name = errno.errorcode[i] except __HOLE__: name = s.upper() try: i = getattr(errno, name) except AttributeError: irc.reply(_('I can\'t find the errno number for that code.')) return except KeyError: name = _('(unknown)') irc.reply(format(_('%s (#%i): %s'), name, i, os.strerror(i)))
ValueError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.errno
6,969
@internationalizeDocstring def spell(self, irc, msg, args, word): """<word> Returns the result of passing <word> to aspell/ispell. The results shown are sorted from best to worst in terms of being a likely match for the spelling of <word>. """ # We are only checking the first word spellCmd = self.registryValue('spell.command') if not spellCmd: irc.error(_('The spell checking command is not configured. If one ' 'is installed, reconfigure ' 'supybot.plugins.Unix.spell.command appropriately.'), Raise=True) spellLang = self.registryValue('spell.language') or 'en' if word and not word[0].isalpha(): irc.error(_('<word> must begin with an alphabet character.')) return try: inst = subprocess.Popen([spellCmd, '-l', spellLang, '-a'], close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) except __HOLE__ as e: irc.error(e, Raise=True) ret = inst.poll() if ret is not None: s = inst.stderr.readline().decode('utf8') if not s: s = inst.stdout.readline().decode('utf8') s = s.rstrip('\r\n') s = s.lstrip('Error: ') irc.error(s, Raise=True) (out, err) = inst.communicate(word.encode()) inst.wait() lines = [x.decode('utf8') for x in out.splitlines() if x] lines.pop(0) # Banner if not lines: irc.error(_('No results found.'), Raise=True) line = lines.pop(0) line2 = '' if lines: line2 = lines.pop(0) # parse the output # aspell will sometimes list spelling suggestions after a '*' or '+' # line for complex words. if line[0] in '*+' and line2: line = line2 if line[0] in '*+': resp = format(_('%q may be spelled correctly.'), word) elif line[0] == '#': resp = format(_('I could not find an alternate spelling for %q'), word) elif line[0] == '&': matches = line.split(':')[1].strip() resp = format(_('Possible spellings for %q: %L.'), word, matches.split(', ')) else: resp = _('Something unexpected was seen in the [ai]spell output.') irc.reply(resp)
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.spell
6,970
@internationalizeDocstring def fortune(self, irc, msg, args): """takes no arguments Returns a fortune from the *nix fortune program. """ channel = msg.args[0] fortuneCmd = self.registryValue('fortune.command') if fortuneCmd: args = [fortuneCmd] if self.registryValue('fortune.short', channel): args.append('-s') if self.registryValue('fortune.equal', channel): args.append('-e') if self.registryValue('fortune.offensive', channel): args.append('-a') args.extend(self.registryValue('fortune.files', channel)) try: with open(os.devnull) as null: inst = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=null) except __HOLE__ as e: irc.error(_('It seems the configured fortune command was ' 'not available.'), Raise=True) (out, err) = inst.communicate() inst.wait() if minisix.PY3: lines = [i.decode('utf-8').rstrip() for i in out.splitlines()] lines = list(map(str, lines)) else: lines = out.splitlines() lines = list(map(str.rstrip, lines)) lines = filter(None, lines) irc.replies(lines, joiner=' ') else: irc.error(_('The fortune command is not configured. If fortune is ' 'installed on this system, reconfigure the ' 'supybot.plugins.Unix.fortune.command configuration ' 'variable appropriately.'))
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.fortune
6,971
@internationalizeDocstring def wtf(self, irc, msg, args, foo, something): """[is] <something> Returns wtf <something> is. 'wtf' is a *nix command that first appeared in NetBSD 1.5. In most *nices, it's available in some sort of 'bsdgames' package. """ wtfCmd = self.registryValue('wtf.command') if wtfCmd: something = something.rstrip('?') try: with open(os.devnull, 'r+') as null: inst = subprocess.Popen([wtfCmd, something], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=null) except __HOLE__: irc.error(_('It seems the configured wtf command was not ' 'available.'), Raise=True) (out, foo) = inst.communicate() inst.wait() if out: response = out.decode('utf8').splitlines()[0].strip() response = utils.str.normalizeWhitespace(response) irc.reply(response) else: irc.error(_('The wtf command is not configured. If it is installed ' 'on this system, reconfigure the ' 'supybot.plugins.Unix.wtf.command configuration ' 'variable appropriately.'))
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.wtf
6,972
def _make_ping(command): def f(self, irc, msg, args, optlist, host): """[--c <count>] [--i <interval>] [--t <ttl>] [--W <timeout>] [--4|--6] <host or ip> Sends an ICMP echo request to the specified host. The arguments correspond with those listed in ping(8). --c is limited to 10 packets or less (default is 5). --i is limited to 5 or less. --W is limited to 10 or less. --4 and --6 can be used if and only if the system has a unified ping command. """ pingCmd = self.registryValue(registry.join([command, 'command'])) if not pingCmd: irc.error('The ping command is not configured. If one ' 'is installed, reconfigure ' 'supybot.plugins.Unix.%s.command appropriately.' % command, Raise=True) else: try: host = host.group(0) except __HOLE__: pass args = [pingCmd] for opt, val in optlist: if opt == 'c' and val > 10: val = 10 if opt == 'i' and val > 5: val = 5 if opt == 'W' and val > 10: val = 10 args.append('-%s' % opt) if opt not in ('4', '6'): args.append(str(val)) if '-c' not in args: args.append('-c') args.append('5') args.append(host) try: with open(os.devnull) as null: inst = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=null) except OSError as e: irc.error('It seems the configured ping command was ' 'not available (%s).' % e, Raise=True) result = inst.communicate() if result[1]: # stderr irc.error(' '.join(result[1].decode('utf8').split())) else: response = result[0].decode('utf8').split("\n"); if response[1]: irc.reply(' '.join(response[1].split()[3:5]).split(':')[0] + ': ' + ' '.join(response[-3:])) else: irc.reply(' '.join(response[0].split()[1:3]) + ': ' + ' '.join(response[-3:])) f.__name__ = command _hostExpr = re.compile(r'^[a-z0-9][a-z0-9\.-]*[a-z0-9]$', re.I) return thread(wrap(f, [getopts({'c':'positiveInt','i':'float', 't':'positiveInt','W':'positiveInt', '4':'', '6':''}), first('ip', ('matches', _hostExpr, 'Invalid hostname'))]))
AttributeError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix._make_ping
6,973
def sysuptime(self, irc, msg, args): """takes no arguments Returns the uptime from the system the bot is runnning on. """ uptimeCmd = self.registryValue('sysuptime.command') if uptimeCmd: args = [uptimeCmd] try: with open(os.devnull) as null: inst = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=null) except __HOLE__ as e: irc.error('It seems the configured uptime command was ' 'not available.', Raise=True) (out, err) = inst.communicate() inst.wait() lines = out.splitlines() lines = [x.decode('utf8').rstrip() for x in lines] lines = filter(None, lines) irc.replies(lines, joiner=' ') else: irc.error('The uptime command is not configured. If uptime is ' 'installed on this system, reconfigure the ' 'supybot.plugins.Unix.sysuptime.command configuration ' 'variable appropriately.')
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.sysuptime
6,974
def sysuname(self, irc, msg, args): """takes no arguments Returns the uname -a from the system the bot is runnning on. """ unameCmd = self.registryValue('sysuname.command') if unameCmd: args = [unameCmd, '-a'] try: with open(os.devnull) as null: inst = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=null) except __HOLE__ as e: irc.error('It seems the configured uptime command was ' 'not available.', Raise=True) (out, err) = inst.communicate() inst.wait() lines = out.splitlines() lines = [x.decode('utf8').rstrip() for x in lines] lines = filter(None, lines) irc.replies(lines, joiner=' ') else: irc.error('The uname command is not configured. If uname is ' 'installed on this system, reconfigure the ' 'supybot.plugins.Unix.sysuname.command configuration ' 'variable appropriately.')
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.sysuname
6,975
def call(self, irc, msg, args, text): """<command to call with any arguments> Calls any command available on the system, and returns its output. Requires owner capability. Note that being restricted to owner, this command does not do any sanity checking on input/output. So it is up to you to make sure you don't run anything that will spamify your channel or that will bring your machine to its knees. """ self.log.info('Unix: running command "%s" for %s/%s', text, msg.nick, irc.network) args = shlex.split(text) try: with open(os.devnull) as null: inst = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=null) except __HOLE__ as e: irc.error('It seems the requested command was ' 'not available (%s).' % e, Raise=True) result = inst.communicate() if result[1]: # stderr irc.error(' '.join(result[1].decode('utf8').split())) if result[0]: # stdout response = result[0].decode('utf8').splitlines() response = [l for l in response if l] irc.replies(response)
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.call
6,976
def shell(self, irc, msg, args, text): """<command to call with any arguments> Calls any command available on the system using the shell specified by the SHELL environment variable, and returns its output. Requires owner capability. Note that being restricted to owner, this command does not do any sanity checking on input/output. So it is up to you to make sure you don't run anything that will spamify your channel or that will bring your machine to its knees. """ self.log.info('Unix: running command "%s" for %s/%s', text, msg.nick, irc.network) try: with open(os.devnull) as null: inst = subprocess.Popen(text, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=null) except __HOLE__ as e: irc.error('It seems the shell (%s) was not available (%s)' % (os.getenv('SHELL'), e), Raise=True) result = inst.communicate() if result[1]: # stderr irc.error(' '.join(result[1].decode('utf8').split())) if result[0]: # stdout response = result[0].decode('utf8').splitlines() response = [l for l in response if l] irc.replies(response)
OSError
dataset/ETHPy150Open ProgVal/Limnoria/plugins/Unix/plugin.py/Unix.shell
6,977
@unittest.skipUnless(hasattr(os, 'tmpfile'), 'test needs os.tmpfile()') def test_tmpfile(self): # As with test_tmpnam() below, the Windows implementation of tmpfile() # attempts to create a file in the root directory of the current drive. # On Vista and Server 2008, this test will always fail for normal users # as writing to the root directory requires elevated privileges. With # XP and below, the semantics of tmpfile() are the same, but the user # running the test is more likely to have administrative privileges on # their account already. If that's the case, then os.tmpfile() should # work. In order to make this test as useful as possible, rather than # trying to detect Windows versions or whether or not the user has the # right permissions, just try and create a file in the root directory # and see if it raises a 'Permission denied' OSError. If it does, then # test that a subsequent call to os.tmpfile() raises the same error. If # it doesn't, assume we're on XP or below and the user running the test # has administrative privileges, and proceed with the test as normal. with warnings.catch_warnings(): warnings.filterwarnings("ignore", "tmpfile", DeprecationWarning) if sys.platform == 'win32': name = '\\python_test_os_test_tmpfile.txt' if os.path.exists(name): os.remove(name) try: fp = open(name, 'w') except IOError, first: # open() failed, assert tmpfile() fails in the same way. # Although open() raises an IOError and os.tmpfile() raises an # OSError(), 'args' will be (13, 'Permission denied') in both # cases. try: fp = os.tmpfile() except __HOLE__, second: self.assertEqual(first.args, second.args) else: self.fail("expected os.tmpfile() to raise OSError") return else: # open() worked, therefore, tmpfile() should work. Close our # dummy file and proceed with the test as normal. fp.close() os.remove(name) fp = os.tmpfile() fp.write("foobar") fp.seek(0,0) s = fp.read() fp.close() self.assertTrue(s == "foobar")
OSError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_os.py/TemporaryFileTests.test_tmpfile
6,978
@unittest.skipUnless(hasattr(os, 'stat'), 'test needs os.stat()') def test_stat_attributes(self): import stat result = os.stat(self.fname) # Make sure direct access works self.assertEqual(result[stat.ST_SIZE], 3) self.assertEqual(result.st_size, 3) # Make sure all the attributes are there members = dir(result) for name in dir(stat): if name[:3] == 'ST_': attr = name.lower() if name.endswith("TIME"): def trunc(x): return int(x) else: def trunc(x): return x self.assertEqual(trunc(getattr(result, attr)), result[getattr(stat, name)]) self.assertIn(attr, members) try: result[200] self.fail("No exception raised") except IndexError: pass # Make sure that assignment fails try: result.st_mode = 1 self.fail("No exception raised") except (AttributeError, TypeError): pass try: result.st_rdev = 1 self.fail("No exception raised") except (AttributeError, __HOLE__): pass try: result.parrot = 1 self.fail("No exception raised") except AttributeError: pass # Use the stat_result constructor with a too-short tuple. try: result2 = os.stat_result((10,)) self.fail("No exception raised") except TypeError: pass # Use the constructor with a too-long tuple. try: result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14)) except TypeError: pass
TypeError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_os.py/StatAttributeTests.test_stat_attributes
6,979
@unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()') def test_statvfs_attributes(self): try: result = os.statvfs(self.fname) except OSError, e: # On AtheOS, glibc always returns ENOSYS if e.errno == errno.ENOSYS: self.skipTest('glibc always returns ENOSYS on AtheOS') # Make sure direct access works self.assertEqual(result.f_bfree, result[3]) # Make sure all the attributes are there. members = ('bsize', 'frsize', 'blocks', 'bfree', 'bavail', 'files', 'ffree', 'favail', 'flag', 'namemax') for value, member in enumerate(members): self.assertEqual(getattr(result, 'f_' + member), result[value]) # Make sure that assignment really fails try: result.f_bfree = 1 self.fail("No exception raised") except TypeError: pass try: result.parrot = 1 self.fail("No exception raised") except AttributeError: pass # Use the constructor with a too-short tuple. try: result2 = os.statvfs_result((10,)) self.fail("No exception raised") except TypeError: pass # Use the constructor with a too-long tuple. try: result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14)) except __HOLE__: pass
TypeError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_os.py/StatAttributeTests.test_statvfs_attributes
6,980
def check(self, f, *args): try: f(test_support.make_bad_fd(), *args) except __HOLE__ as e: self.assertEqual(e.errno, errno.EBADF) else: self.fail("%r didn't raise a OSError with a bad file descriptor" % f)
OSError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_os.py/TestInvalidFD.check
6,981
@unittest.skipUnless(hasattr(os, 'closerange'), 'test needs os.closerange()') def test_closerange(self): fd = test_support.make_bad_fd() # Make sure none of the descriptors we are about to close are # currently valid (issue 6542). for i in range(10): try: os.fstat(fd+i) except __HOLE__: pass else: break if i < 2: raise unittest.SkipTest( "Unable to acquire a range of invalid file descriptors") self.assertEqual(os.closerange(fd, fd + i-1), None)
OSError
dataset/ETHPy150Open francelabs/datafari/windows/python/Lib/test/test_os.py/TestInvalidFD.test_closerange
6,982
def process_view(self, request, callback, callback_args, callback_kwargs): if getattr(request, 'csrf_processing_done', False): return None # If the user doesn't have a CSRF cookie, generate one and store it in the # request, so it's available to the view. We'll store it in a cookie when # we reach the response. try: # In case of cookies from untrusted sources, we strip anything # dangerous at this point, so that the cookie + token will have the # same, sanitized value. request.META["CSRF_COOKIE"] = _sanitize_token(request.COOKIES[settings.CSRF_COOKIE_NAME]) cookie_is_new = False except KeyError: # No cookie, so create one. This will be sent with the next # response. request.META["CSRF_COOKIE"] = _get_new_csrf_key() # Set a flag to allow us to fall back and allow the session id in # place of a CSRF cookie for this request only. cookie_is_new = True # Wait until request.META["CSRF_COOKIE"] has been manipulated before # bailing out, so that get_token still works if getattr(callback, 'csrf_exempt', False): return None if request.method == 'POST': if getattr(request, '_dont_enforce_csrf_checks', False): # Mechanism to turn off CSRF checks for test suite. It comes after # the creation of CSRF cookies, so that everything else continues to # work exactly the same (e.g. cookies are sent etc), but before the # any branches that call reject() return self._accept(request) if request.is_secure(): # Suppose user visits http://example.com/ # An active network attacker,(man-in-the-middle, MITM) sends a # POST form which targets https://example.com/detonate-bomb/ and # submits it via javascript. # # The attacker will need to provide a CSRF cookie and token, but # that is no problem for a MITM and the session independent # nonce we are using. So the MITM can circumvent the CSRF # protection. This is true for any HTTP connection, but anyone # using HTTPS expects better! For this reason, for # https://example.com/ we need additional protection that treats # http://example.com/ as completely untrusted. Under HTTPS, # Barth et al. found that the Referer header is missing for # same-domain requests in only about 0.2% of cases or less, so # we can use strict Referer checking. referer = request.META.get('HTTP_REFERER') if referer is None: logger.warning('Forbidden (%s): %s' % (REASON_NO_REFERER, request.path), extra={ 'status_code': 403, 'request': request, } ) return self._reject(request, REASON_NO_REFERER) # Note that request.get_host() includes the port good_referer = 'https://%s/' % request.get_host() if not same_origin(referer, good_referer): reason = REASON_BAD_REFERER % (referer, good_referer) logger.warning('Forbidden (%s): %s' % (reason, request.path), extra={ 'status_code': 403, 'request': request, } ) return self._reject(request, reason) # If the user didn't already have a CSRF cookie, then fall back to # the Django 1.1 method (hash of session ID), so a request is not # rejected if the form was sent to the user before upgrading to the # Django 1.2 method (session independent nonce) if cookie_is_new: try: session_id = request.COOKIES[settings.SESSION_COOKIE_NAME] csrf_token = _make_legacy_session_token(session_id) except __HOLE__: # No CSRF cookie and no session cookie. For POST requests, # we insist on a CSRF cookie, and in this way we can avoid # all CSRF attacks, including login CSRF. logger.warning('Forbidden (%s): %s' % (REASON_NO_COOKIE, request.path), extra={ 'status_code': 403, 'request': request, } ) return self._reject(request, REASON_NO_COOKIE) else: csrf_token = request.META["CSRF_COOKIE"] # check incoming token request_csrf_token = request.POST.get('csrfmiddlewaretoken', '') if request_csrf_token == "": # Fall back to X-CSRFToken, to make things easier for AJAX request_csrf_token = request.META.get('HTTP_X_CSRFTOKEN', '') if not constant_time_compare(request_csrf_token, csrf_token): if cookie_is_new: # probably a problem setting the CSRF cookie logger.warning('Forbidden (%s): %s' % (REASON_NO_CSRF_COOKIE, request.path), extra={ 'status_code': 403, 'request': request, } ) return self._reject(request, REASON_NO_CSRF_COOKIE) else: logger.warning('Forbidden (%s): %s' % (REASON_BAD_TOKEN, request.path), extra={ 'status_code': 403, 'request': request, } ) return self._reject(request, REASON_BAD_TOKEN) return self._accept(request)
KeyError
dataset/ETHPy150Open AppScale/appscale/AppServer/lib/django-1.3/django/middleware/csrf.py/CsrfViewMiddleware.process_view
6,983
def success(self, x): """Is a candidate solution at the global minimum""" val = self.fun(x) if np.isnan(val): return True try: assert_almost_equal(val, 0., 4) return True except __HOLE__: return False return False
AssertionError
dataset/ETHPy150Open scipy/scipy/benchmarks/benchmarks/go_benchmark_functions/go_funcs_D.py/Damavandi.success
6,984
def __init__(self, documentFactory=None): from xml.dom import XML_NAMESPACE self.documentFactory = documentFactory self.firstEvent = [None, None] self.lastEvent = self.firstEvent self.elementStack = [] self.push = self.elementStack.append try: self.pop = self.elementStack.pop except __HOLE__: # use class' pop instead pass self._ns_contexts = [{XML_NAMESPACE:'xml'}] # contains uri -> prefix dicts self._current_context = self._ns_contexts[-1] self.pending_events = []
AttributeError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/xml/dom/pulldom.py/PullDOM.__init__
6,985
def parseString(string, parser=None): try: from cStringIO import StringIO except __HOLE__: from StringIO import StringIO bufsize = len(string) buf = StringIO(string) if not parser: parser = xml.sax.make_parser() return DOMEventStream(buf, parser, bufsize)
ImportError
dataset/ETHPy150Open azoft-dev-team/imagrium/env/Lib/xml/dom/pulldom.py/parseString
6,986
def __init__(self, *args, **kwargs): super(PaymentForm, self).__init__(*args, **kwargs) if self.provider.org_id: try: fingerprint_id = self.payment.attrs.fingerprint_session_id except __HOLE__: fingerprint_id = str(uuid4()) self.fields['fingerprint'] = FingerprintInput( label=_('fingerprint'), org_id=self.provider.org_id, initial=fingerprint_id, merchant_id=self.provider.merchant_id, fingerprint_url=self.provider.fingerprint_url)
KeyError
dataset/ETHPy150Open mirumee/django-payments/payments/cybersource/forms.py/PaymentForm.__init__
6,987
def main(argv): parser = argparse.ArgumentParser() parser.add_argument("--replay", help="Replay in 'real time'", action="store_true") parser.add_argument("--current", help="Use date adjusted from script start time.", action="store_true") parser.add_argument("--incidents", help="Whether to generate and publish (fake) " + "traffic incidents. Requires a second PubSub topic " + "to be specified.", action="store_true") parser.add_argument("--random_delays", help="Whether to randomly alter the data to " + "sometimes introduce delays between log date and " + "publish timestamp.", action="store_true") parser.add_argument("--filename", help="input filename") parser.add_argument("--num_lines", type=int, default=0, help="The number of lines to process. " + "0 indicates all.") parser.add_argument("--topic", default=TRAFFIC_TOPIC, help="The pubsub 'traffic' topic to publish to. " + "Should already exist.") parser.add_argument("--incident_topic", default=INCIDENT_TOPIC, help="The pubsub 'incident' topic to publish to. " + "Only used if the --incidents flag is set. " + "If so, should already exist.") args = parser.parse_args() pubsub_topic = args.topic print "Publishing to pubsub 'traffic' topic: %s" % pubsub_topic incidents = args.incidents random_delays = args.random_delays if incidents: incident_topic = args.incident_topic print "Publishing to pubsub 'incident' topic: %s" % incident_topic filename = args.filename print "filename: %s" % filename replay = args.replay print "replay mode: %s" % replay current = args.current print "current date mode: %s" % current num_lines = args.num_lines if num_lines: print "processing %s lines" % num_lines client = create_pubsub_client() dt = parse('01/01/2010 00:00:00') # earliest date in the traffic files now = datetime.datetime.utcnow() # used if altering date to replay from start time diff = now - dt # used if running in 'replay' mode, reflecting pauses in the data prev_date = dt restart_time = now line_count = 0 incident_count = 0 print "processing %s" % filename # process the traffic data file with open(filename) as data_file: reader = csv.reader(data_file) for line in reader: line_count += 1 if num_lines: # if terminating after num_lines processed if line_count >= num_lines: print "Have processed %s lines" % num_lines break if (line_count % LINE_BATCHES) == 0: print "%s lines processed" % line_count ts = "" try: timestring = line[0] orig_date = parse(timestring) if current: # if using --current flag (line, ts) = process_current_mode( orig_date, diff, line, replay, random_delays) else: # not using --current flag (line, ts) = process_noncurrent_mode( orig_date, line, random_delays) if replay and orig_date != prev_date: date_delta = orig_date - prev_date print "date delta: %s" % date_delta.total_seconds() current_time = datetime.datetime.utcnow() timelapse = current_time - restart_time print "timelapse: %s" % timelapse.total_seconds() d2 = date_delta - timelapse sleeptime = d2.total_seconds() print "sleeping %s" % sleeptime time.sleep(sleeptime) restart_time = datetime.datetime.utcnow() print "restart_time is set to: %s" % restart_time prev_date = orig_date msg_attributes = {'timestamp': ts} publish(client, pubsub_topic, ",".join(line), msg_attributes) if incidents: # if generating traffic 'incidents' as well # randomly determine whether we'll generate an incident # associated with this reading. if random.random() < INCIDENT_THRESH: print "Generating a traffic incident for %s." % line # grab the timestring, station id, freeway, and # direction of travel. # Then generate some 'incident' data and publish it to # the incident topic. Use the incident count as a # simplistic id. incident_count += 1 publish_random_incident(client, incident_topic, incident_count, line[0], line[1], line[2], line[3], msg_attributes) except __HOLE__, e: sys.stderr.write("---Error: %s for %s\n" % (e, line))
ValueError
dataset/ETHPy150Open GoogleCloudPlatform/cloud-pubsub-samples-python/gce-cmdline-publisher/traffic_pubsub_generator.py/main
6,988
def load_account(path): # Show a more descriptive message if the file doesn't exist. if not os.path.exists(path): logger.error("Couldn't find an account file at {}.".format(path)) logger.error("Are you in the right directory? Did you register yet?") logger.error("Run 'manuale -h' for instructions.") raise ManualeError() try: with open(path, 'rb') as f: return deserialize_account(f.read()) except (ValueError, __HOLE__) as e: logger.error("Couldn't read account file. Aborting.") raise ManualeError(e)
IOError
dataset/ETHPy150Open veeti/manuale/manuale/cli.py/load_account
6,989
def main(): parser = argparse.ArgumentParser( description=DESCRIPTION, formatter_class=Formatter, ) subparsers = parser.add_subparsers() # Server switch parser.add_argument('--server', '-s', help="The ACME server to use", default=LETS_ENCRYPT_PRODUCTION) parser.add_argument('--account', '-a', help="The account file to use or create", default=DEFAULT_ACCOUNT_PATH) # Account creation register = subparsers.add_parser( 'register', help="Create a new account and register", description=DESCRIPTION_REGISTER, formatter_class=Formatter, ) register.add_argument('email', type=str, help="Account e-mail address") register.set_defaults(func=_register) # Domain verification authorize = subparsers.add_parser( 'authorize', help="Verify domain ownership", description=DESCRIPTION_AUTHORIZE, formatter_class=Formatter, ) authorize.add_argument('domain', help="One or more domain names to authorize", nargs='+') authorize.set_defaults(func=_authorize) # Certificate issuance issue = subparsers.add_parser( 'issue', help="Request a new certificate", description=DESCRIPTION_ISSUE, formatter_class=Formatter, ) issue.add_argument('domain', help="One or more domain names to include in the certificate", nargs='+') issue.add_argument('--key-size', '-b', help="The key size to use for the certificate", type=int, default=DEFAULT_CERT_KEY_SIZE) issue.add_argument('--key-file', '-k', help="Existing key file to use for the certificate") issue.add_argument('--csr-file', help="Existing signing request to use") issue.add_argument('--output', '-o', help="The output directory for created objects", default='.') issue.set_defaults(func=_issue) # Certificate revocation revoke = subparsers.add_parser( 'revoke', help="Revoke an issued certificate", description=DESCRIPTION_REVOKE, formatter_class=Formatter, ) revoke.add_argument('certificate', help="The certificate file to revoke") revoke.set_defaults(func=_revoke) # Account info info = subparsers.add_parser( 'info', help="Shows account information from the service", description=DESCRIPTION_INFO, formatter_class=Formatter, ) info.set_defaults(func=_info) # Version version = subparsers.add_parser('version', help="Show the version number") version.set_defaults(func=lambda *args: logger.info("manuale {}".format(manuale.__version__))) # Parse args = parser.parse_args() if not hasattr(args, 'func'): parser.print_help() sys.exit(0) # Set up logging root = logging.getLogger('manuale') root.setLevel(logging.INFO) handler = logging.StreamHandler(sys.stderr) handler.setFormatter(logging.Formatter("%(message)s")) root.addHandler(handler) # Let's encrypt try: args.func(args) except ManualeError as e: if str(e): logger.error(e) sys.exit(1) except __HOLE__: logger.error("") logger.error("Interrupted.") sys.exit(2) except Exception as e: logger.error("Oops! An unhandled error occurred. Please file a bug.") logger.exception(e) sys.exit(3)
KeyboardInterrupt
dataset/ETHPy150Open veeti/manuale/manuale/cli.py/main
6,990
def make_array(array_type): """ Return the Structure representation of the given *array_type* (an instance of types.ArrayCompatible). Note this does not call __array_wrap__ in case a new array structure is being created (rather than populated). """ real_array_type = array_type.as_array base = cgutils.create_struct_proxy(real_array_type) ndim = real_array_type.ndim class ArrayStruct(base): def _make_refs(self, ref): sig = signature(real_array_type, array_type) try: array_impl = self._context.get_function('__array__', sig) except __HOLE__: return super(ArrayStruct, self)._make_refs(ref) # Return a wrapped structure and its unwrapped reference datamodel = self._context.data_model_manager[array_type] be_type = self._get_be_type(datamodel) if ref is None: outer_ref = cgutils.alloca_once(self._builder, be_type, zfill=True) else: outer_ref = ref # NOTE: __array__ is called with a pointer and expects a pointer # in return! ref = array_impl(self._builder, (outer_ref,)) return outer_ref, ref @property def shape(self): """ Override .shape to inform LLVM that its elements are all positive. """ builder = self._builder if ndim == 0: return base.__getattr__(self, "shape") # Unfortunately, we can't use llvm.assume as its presence can # seriously pessimize performance, # *and* the range metadata currently isn't improving anything here, # see https://llvm.org/bugs/show_bug.cgi?id=23848 ! ptr = self._get_ptr_by_name("shape") dims = [] for i in range(ndim): dimptr = cgutils.gep_inbounds(builder, ptr, 0, i) load = builder.load(dimptr) dims.append(load) mark_positive(builder, load) return cgutils.pack_array(builder, dims) return ArrayStruct
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/arrayobj.py/make_array
6,991
@lower_builtin('setitem', types.Buffer, types.Any, types.Any) def setitem_array(context, builder, sig, args): """ array[a] = scalar_or_array array[a,..,b] = scalar_or_array """ aryty, idxty, valty = sig.args ary, idx, val = args if isinstance(idxty, types.BaseTuple): index_types = idxty.types indices = cgutils.unpack_tuple(builder, idx, count=len(idxty)) else: index_types = (idxty,) indices = (idx,) ary = make_array(aryty)(context, builder, ary) # First try basic indexing to see if a single array location is denoted. index_types, indices = normalize_indices(context, builder, index_types, indices) try: dataptr, shapes, strides = \ basic_indexing(context, builder, aryty, ary, index_types, indices) except __HOLE__: use_fancy_indexing = True else: use_fancy_indexing = bool(shapes) if use_fancy_indexing: # Index describes a non-trivial view => use generic slice assignment # (NOTE: this also handles scalar broadcasting) return fancy_setslice(context, builder, sig, args, index_types, indices) # Store source value the given location val = context.cast(builder, val, valty, aryty.dtype) store_item(context, builder, aryty, val, dataptr)
NotImplementedError
dataset/ETHPy150Open numba/numba/numba/targets/arrayobj.py/setitem_array
6,992
def load_schema(self): """ Loads the schema definition for this column family from Cassandra and updates comparator and validation classes if neccessary. """ ksdef = self.pool.execute('get_keyspace_description', use_dict_for_col_metadata=True) try: self._cfdef = ksdef[self.column_family] except __HOLE__: nfe = NotFoundException() nfe.why = 'Column family %s not found.' % self.column_family raise nfe self.super = self._cfdef.column_type == 'Super' self._load_comparator_classes() self._load_validation_classes() self._load_key_class()
KeyError
dataset/ETHPy150Open pycassa/pycassa/pycassa/columnfamily.py/ColumnFamily.load_schema
6,993
def multiget(self, keys, columns=None, column_start="", column_finish="", column_reversed=False, column_count=100, include_timestamp=False, super_column=None, read_consistency_level=None, buffer_size=None, include_ttl=False): """ Fetch multiple rows from a Cassandra server. `keys` should be a list of keys to fetch. `buffer_size` is the number of rows from the total list to fetch at a time. If left as ``None``, the ColumnFamily's :attr:`buffer_size` will be used. All other parameters are the same as :meth:`get()`, except that a list of keys may be passed in. Results will be returned in the form: ``{key: {column_name: column_value}}``. If an OrderedDict is used, the rows will have the same order as `keys`. """ packed_keys = map(self._pack_key, keys) cp = self._column_parent(super_column) sp = self._slice_predicate(columns, column_start, column_finish, column_reversed, column_count, super_column) consistency = read_consistency_level or self.read_consistency_level buffer_size = buffer_size or self.buffer_size offset = 0 keymap = {} while offset < len(packed_keys): new_keymap = self.pool.execute('multiget_slice', packed_keys[offset:offset + buffer_size], cp, sp, consistency) keymap.update(new_keymap) offset += buffer_size ret = self.dict_class() # Keep the order of keys for key in keys: ret[key] = None empty_keys = [] for packed_key, columns in keymap.iteritems(): unpacked_key = self._unpack_key(packed_key) if len(columns) > 0: ret[unpacked_key] = self._cosc_to_dict(columns, include_timestamp, include_ttl) else: empty_keys.append(unpacked_key) for key in empty_keys: try: del ret[key] except __HOLE__: pass return ret
KeyError
dataset/ETHPy150Open pycassa/pycassa/pycassa/columnfamily.py/ColumnFamily.multiget
6,994
def _strerror(errcode): try: return _lib.libusb_strerror(errcode).decode('utf8') except __HOLE__: return _str_error_map[errcode] # Data structures
AttributeError
dataset/ETHPy150Open walac/pyusb/usb/backend/libusb1.py/_strerror
6,995
def _setup_prototypes(lib): # void libusb_set_debug (libusb_context *ctx, int level) lib.libusb_set_debug.argtypes = [c_void_p, c_int] # int libusb_init (libusb_context **context) lib.libusb_init.argtypes = [POINTER(c_void_p)] # void libusb_exit (struct libusb_context *ctx) lib.libusb_exit.argtypes = [c_void_p] # ssize_t libusb_get_device_list (libusb_context *ctx, # libusb_device ***list) lib.libusb_get_device_list.argtypes = [ c_void_p, POINTER(POINTER(c_void_p)) ] # void libusb_free_device_list (libusb_device **list, # int unref_devices) lib.libusb_free_device_list.argtypes = [ POINTER(c_void_p), c_int ] # libusb_device *libusb_ref_device (libusb_device *dev) lib.libusb_ref_device.argtypes = [c_void_p] lib.libusb_ref_device.restype = c_void_p # void libusb_unref_device(libusb_device *dev) lib.libusb_unref_device.argtypes = [c_void_p] # int libusb_open(libusb_device *dev, libusb_device_handle **handle) lib.libusb_open.argtypes = [c_void_p, POINTER(_libusb_device_handle)] # void libusb_close(libusb_device_handle *dev_handle) lib.libusb_close.argtypes = [_libusb_device_handle] # int libusb_set_configuration(libusb_device_handle *dev, # int configuration) lib.libusb_set_configuration.argtypes = [_libusb_device_handle, c_int] # int libusb_get_configuration(libusb_device_handle *dev, # int *config) lib.libusb_get_configuration.argtypes = [_libusb_device_handle, POINTER(c_int)] # int libusb_claim_interface(libusb_device_handle *dev, # int interface_number) lib.libusb_claim_interface.argtypes = [_libusb_device_handle, c_int] # int libusb_release_interface(libusb_device_handle *dev, # int interface_number) lib.libusb_release_interface.argtypes = [_libusb_device_handle, c_int] # int libusb_set_interface_alt_setting(libusb_device_handle *dev, # int interface_number, # int alternate_setting) lib.libusb_set_interface_alt_setting.argtypes = [ _libusb_device_handle, c_int, c_int ] # int libusb_reset_device (libusb_device_handle *dev) lib.libusb_reset_device.argtypes = [_libusb_device_handle] # int libusb_kernel_driver_active(libusb_device_handle *dev, # int interface) lib.libusb_kernel_driver_active.argtypes = [ _libusb_device_handle, c_int ] # int libusb_detach_kernel_driver(libusb_device_handle *dev, # int interface) lib.libusb_detach_kernel_driver.argtypes = [ _libusb_device_handle, c_int ] # int libusb_attach_kernel_driver(libusb_device_handle *dev, # int interface) lib.libusb_attach_kernel_driver.argtypes = [ _libusb_device_handle, c_int ] # int libusb_get_device_descriptor( # libusb_device *dev, # struct libusb_device_descriptor *desc # ) lib.libusb_get_device_descriptor.argtypes = [ c_void_p, POINTER(_libusb_device_descriptor) ] # int libusb_get_config_descriptor( # libusb_device *dev, # uint8_t config_index, # struct libusb_config_descriptor **config # ) lib.libusb_get_config_descriptor.argtypes = [ c_void_p, c_uint8, POINTER(POINTER(_libusb_config_descriptor)) ] # void libusb_free_config_descriptor( # struct libusb_config_descriptor *config # ) lib.libusb_free_config_descriptor.argtypes = [ POINTER(_libusb_config_descriptor) ] # int libusb_get_string_descriptor_ascii(libusb_device_handle *dev, # uint8_t desc_index, # unsigned char *data, # int length) lib.libusb_get_string_descriptor_ascii.argtypes = [ _libusb_device_handle, c_uint8, POINTER(c_ubyte), c_int ] # int libusb_control_transfer(libusb_device_handle *dev_handle, # uint8_t bmRequestType, # uint8_t bRequest, # uint16_t wValue, # uint16_t wIndex, # unsigned char *data, # uint16_t wLength, # unsigned int timeout) lib.libusb_control_transfer.argtypes = [ _libusb_device_handle, c_uint8, c_uint8, c_uint16, c_uint16, POINTER(c_ubyte), c_uint16, c_uint ] #int libusb_bulk_transfer( # struct libusb_device_handle *dev_handle, # unsigned char endpoint, # unsigned char *data, # int length, # int *transferred, # unsigned int timeout # ) lib.libusb_bulk_transfer.argtypes = [ _libusb_device_handle, c_ubyte, POINTER(c_ubyte), c_int, POINTER(c_int), c_uint ] # int libusb_interrupt_transfer( # libusb_device_handle *dev_handle, # unsigned char endpoint, # unsigned char *data, # int length, # int *actual_length, # unsigned int timeout # ); lib.libusb_interrupt_transfer.argtypes = [ _libusb_device_handle, c_ubyte, POINTER(c_ubyte), c_int, POINTER(c_int), c_uint ] # libusb_transfer* libusb_alloc_transfer(int iso_packets); lib.libusb_alloc_transfer.argtypes = [c_int] lib.libusb_alloc_transfer.restype = POINTER(_libusb_transfer) # void libusb_free_transfer(struct libusb_transfer *transfer) lib.libusb_free_transfer.argtypes = [POINTER(_libusb_transfer)] # int libusb_submit_transfer(struct libusb_transfer *transfer); lib.libusb_submit_transfer.argtypes = [POINTER(_libusb_transfer)] if hasattr(lib, 'libusb_strerror'): # const char *libusb_strerror(enum libusb_error errcode) lib.libusb_strerror.argtypes = [c_uint] lib.libusb_strerror.restype = c_char_p # int libusb_clear_halt(libusb_device_handle *dev, unsigned char endpoint) lib.libusb_clear_halt.argtypes = [_libusb_device_handle, c_ubyte] # void libusb_set_iso_packet_lengths( # libusb_transfer* transfer, # unsigned int length # ); def libusb_set_iso_packet_lengths(transfer_p, length): r"""This function is inline in the libusb.h file, so we must implement it. lib.libusb_set_iso_packet_lengths.argtypes = [ POINTER(_libusb_transfer), c_int ] """ transfer = transfer_p.contents for iso_packet_desc in _get_iso_packet_list(transfer): iso_packet_desc.length = length lib.libusb_set_iso_packet_lengths = libusb_set_iso_packet_lengths #int libusb_get_max_iso_packet_size(libusb_device* dev, # unsigned char endpoint); lib.libusb_get_max_iso_packet_size.argtypes = [c_void_p, c_ubyte] # void libusb_fill_iso_transfer( # struct libusb_transfer* transfer, # libusb_device_handle* dev_handle, # unsigned char endpoint, # unsigned char* buffer, # int length, # int num_iso_packets, # libusb_transfer_cb_fn callback, # void * user_data, # unsigned int timeout # ); def libusb_fill_iso_transfer(_libusb_transfer_p, dev_handle, endpoint, buffer, length, num_iso_packets, callback, user_data, timeout): r"""This function is inline in the libusb.h file, so we must implement it. lib.libusb_fill_iso_transfer.argtypes = [ _libusb_transfer, _libusb_device_handle, c_ubyte, POINTER(c_ubyte), c_int, c_int, _libusb_transfer_cb_fn_p, c_void_p, c_uint ] """ transfer = _libusb_transfer_p.contents transfer.dev_handle = dev_handle transfer.endpoint = endpoint transfer.type = _LIBUSB_TRANSFER_TYPE_ISOCHRONOUS transfer.timeout = timeout transfer.buffer = cast(buffer, c_void_p) transfer.length = length transfer.num_iso_packets = num_iso_packets transfer.user_data = user_data transfer.callback = callback lib.libusb_fill_iso_transfer = libusb_fill_iso_transfer # uint8_t libusb_get_bus_number(libusb_device *dev) lib.libusb_get_bus_number.argtypes = [c_void_p] lib.libusb_get_bus_number.restype = c_uint8 # uint8_t libusb_get_device_address(libusb_device *dev) lib.libusb_get_device_address.argtypes = [c_void_p] lib.libusb_get_device_address.restype = c_uint8 try: # uint8_t libusb_get_device_speed(libusb_device *dev) lib.libusb_get_device_speed.argtypes = [c_void_p] lib.libusb_get_device_speed.restype = c_uint8 except AttributeError: pass try: # uint8_t libusb_get_port_number(libusb_device *dev) lib.libusb_get_port_number.argtypes = [c_void_p] lib.libusb_get_port_number.restype = c_uint8 except AttributeError: pass try: # int libusb_get_port_numbers(libusb_device *dev, # uint8_t* port_numbers, # int port_numbers_len) lib.libusb_get_port_numbers.argtypes = [ c_void_p, POINTER(c_uint8), c_int ] lib.libusb_get_port_numbers.restype = c_int except __HOLE__: pass #int libusb_handle_events(libusb_context *ctx); lib.libusb_handle_events.argtypes = [c_void_p] # check a libusb function call
AttributeError
dataset/ETHPy150Open walac/pyusb/usb/backend/libusb1.py/_setup_prototypes
6,996
@methodtrace(_logger) def get_device_descriptor(self, dev): dev_desc = _libusb_device_descriptor() _check(self.lib.libusb_get_device_descriptor(dev.devid, byref(dev_desc))) dev_desc.bus = self.lib.libusb_get_bus_number(dev.devid) dev_desc.address = self.lib.libusb_get_device_address(dev.devid) # Only available in newer versions of libusb try: dev_desc.speed = self.lib.libusb_get_device_speed(dev.devid) except AttributeError: dev_desc.speed = None # Only available in newer versions of libusb try: dev_desc.port_number = self.lib.libusb_get_port_number(dev.devid) except __HOLE__: dev_desc.port_number = None # Only available in newer versions of libusb try: buff = (c_uint8 * 7)() # USB 3.0 maximum depth is 7 written = dev_desc.port_numbers = self.lib.libusb_get_port_numbers( dev.devid, buff, len(buff)) if written > 0: dev_desc.port_numbers = tuple(buff[:written]) else: dev_desc.port_numbers = None except AttributeError: dev_desc.port_numbers = None return dev_desc
AttributeError
dataset/ETHPy150Open walac/pyusb/usb/backend/libusb1.py/_LibUSB.get_device_descriptor
6,997
def RetrieveURL(method, host_port, relative_url, user_info=None, body=None, extra_headers=[]): """Access a URL over HTTP and returns the results. Args: method: HTTP method to use, e.g., GET, POST host_port: Tuple (hostname, port) of the host to contact. relative_url: Relative URL to access on the remote host. user_info: If not None, send this user_info tuple in an HTTP Cookie header along with the request; otherwise, no header is included. The user_info tuple should be in the form (email, admin) where: email: The user's email address. admin: True if the user should be an admin; False otherwise. If email is empty, it will be as if the user is not logged in. body: Request body to write to the remote server. Should only be used with the POST method any other methods that expect a message body. extra_headers: List of (key, value) tuples for headers to send on the request. Returns: Tuple (status, content, headers) where: status: HTTP status code returned by the remote host, e.g. 404, 200, 500 content: Data returned by the remote host. headers: Dictionary mapping header names to header values (both strings). If an exception is raised while accessing the remote host, both status and content will be set to None. """ url_host = '%s:%d' % host_port logging.info('Connecting to %s', url_host) try: connection = httplib.HTTPConnection(url_host) logging.info('Sending request "%s %s"', method, relative_url) try: connection.putrequest(method, relative_url) if user_info is not None: email, admin = user_info auth_string = '%s=%s' % (dev_appserver_login.COOKIE_NAME, dev_appserver_login.CreateCookieData(email, admin)) logging.info('Putting auth header: %s', auth_string) connection.putheader('Cookie', auth_string) if body is not None: connection.putheader('Content-length', len(body)) for key, value in extra_headers: logging.info('Putting header: %s = %s', str(key), str(value)) connection.putheader(str(key), str(value)) connection.endheaders() if body is not None: connection.send(body) response = connection.getresponse() status = response.status content = response.read() headers = dict(response.getheaders()) logging.info('Received response %s with content:\n%s', status, content) return status, content, headers finally: connection.close() except (__HOLE__, httplib.HTTPException, socket.error), e: logging.error('Encountered exception accessing HTTP server: %s', e) raise e
IOError
dataset/ETHPy150Open CollabQ/CollabQ/appengine_django/tests/integration_test.py/RetrieveURL
6,998
def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_pyrecast', [dirname(__file__)]) except __HOLE__: import _pyrecast return _pyrecast if fp is not None: try: _mod = imp.load_module('_pyrecast', fp, pathname, description) finally: fp.close() return _mod
ImportError
dataset/ETHPy150Open sccn/SNAP/src/framework/navigation/pyrecast.py/swig_import_helper
6,999
def SetField(self, tag, value, count=None): """ Set TIFF field value with tag. tag can be numeric constant TIFFTAG_<tagname> or a string containing <tagname>. """ if isinstance(tag, str): tag = eval('TIFFTAG_' + tag.upper()) t = tifftags.get(tag) if t is None: print 'Warning: no tag %r defined' % (tag) return data_type, convert = t if data_type == ctypes.c_float: data_type = ctypes.c_double if tag == TIFFTAG_COLORMAP: # ColorMap passes 3 values each a c_uint16 pointer try: r_arr,g_arr,b_arr = value except (TypeError, __HOLE__): print "Error: TIFFTAG_COLORMAP expects 3 uint16* arrays as a list/tuple of lists" r_arr,g_arr,b_arr = None,None,None if r_arr is None: return bps = self.GetField("BitsPerSample") if bps is None: print "Warning: BitsPerSample is required to get ColorMap, assuming 8 bps..." bps = 8 num_cmap_elems = 1 << bps data_type = data_type * num_cmap_elems r_ptr = data_type(*r_arr) g_ptr = data_type(*g_arr) b_ptr = data_type(*b_arr) libtiff.TIFFSetField.argtypes = libtiff.TIFFSetField.argtypes[:2] + [ctypes.POINTER(data_type)]*3 r = libtiff.TIFFSetField(self, tag, r_ptr, g_ptr, b_ptr) else: if issubclass(data_type, (ctypes.Array, tuple, list)): data = data_type(*value) elif issubclass(data_type, ctypes._Pointer): # does not include c_char_p # convert to the base type, ctypes will take care of actually # sending it by reference base_type = data_type._type_ if isinstance(value, collections.Iterable): data = base_type(*value) else: data = base_type(value) else: data = data_type(value) # TODO: for most of the tags, count is len(value), so it shouldn't be needed if count is None: libtiff.TIFFSetField.argtypes = libtiff.TIFFSetField.argtypes[:2] + [data_type] r = libtiff.TIFFSetField(self, tag, data) else: libtiff.TIFFSetField.argtypes = libtiff.TIFFSetField.argtypes[:2] + [ctypes.c_uint, data_type] r = libtiff.TIFFSetField(self, tag, count, data) return r
ValueError
dataset/ETHPy150Open pearu/pylibtiff/libtiff/libtiff_ctypes.py/TIFF.SetField