Unnamed: 0
int64
0
10k
function
stringlengths
79
138k
label
stringclasses
20 values
info
stringlengths
42
261
4,100
def _get_max_speed(self): # Get max speed buf = array.array('I', [0]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MAX_SPEED_HZ, buf, True) except __HOLE__ as e: raise SPIError(e.errno, "Getting SPI max speed: " + e.strerror) return buf[0]
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._get_max_speed
4,101
def _set_max_speed(self, max_speed): if not isinstance(max_speed, int) and not isinstance(max_speed, float): raise TypeError("Invalid max_speed type, should be integer or float.") # Set max speed buf = array.array('I', [int(max_speed)]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_MAX_SPEED_HZ, buf, False) except __HOLE__ as e: raise SPIError(e.errno, "Setting SPI max speed: " + e.strerror)
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._set_max_speed
4,102
def _get_bit_order(self): # Get mode buf = array.array('B', [0]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MODE, buf, True) except __HOLE__ as e: raise SPIError(e.errno, "Getting SPI mode: " + e.strerror) if (buf[0] & SPI._SPI_LSB_FIRST) > 0: return "lsb" return "msb"
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._get_bit_order
4,103
def _set_bit_order(self, bit_order): if not isinstance(bit_order, str): raise TypeError("Invalid bit_order type, should be string.") elif bit_order.lower() not in ["msb", "lsb"]: raise ValueError("Invalid bit_order, can be \"msb\" or \"lsb\".") # Read-modify-write mode, because the mode contains bits for other settings # Get mode buf = array.array('B', [0]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MODE, buf, True) except __HOLE__ as e: raise SPIError(e.errno, "Getting SPI mode: " + e.strerror) bit_order = bit_order.lower() buf[0] = (buf[0] & ~SPI._SPI_LSB_FIRST) | (SPI._SPI_LSB_FIRST if bit_order == "lsb" else 0) # Set mode try: fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_MODE, buf, False) except OSError as e: raise SPIError(e.errno, "Setting SPI mode: " + e.strerror)
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._set_bit_order
4,104
def _get_bits_per_word(self): # Get bits per word buf = array.array('B', [0]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_BITS_PER_WORD, buf, True) except __HOLE__ as e: raise SPIError(e.errno, "Getting SPI bits per word: " + e.strerror) return buf[0]
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._get_bits_per_word
4,105
def _set_bits_per_word(self, bits_per_word): if not isinstance(bits_per_word, int): raise TypeError("Invalid bits_per_word type, should be integer.") if bits_per_word < 0 or bits_per_word > 255: raise ValueError("Invalid bits_per_word, must be 0-255.") # Set bits per word buf = array.array('B', [bits_per_word]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_BITS_PER_WORD, buf, False) except __HOLE__ as e: raise SPIError(e.errno, "Setting SPI bits per word: " + e.strerror)
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._set_bits_per_word
4,106
def _get_extra_flags(self): # Get mode buf = array.array('B', [0]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MODE, buf, True) except __HOLE__ as e: raise SPIError(e.errno, "Getting SPI mode: " + e.strerror) return buf[0] & ~(SPI._SPI_LSB_FIRST | SPI._SPI_CPHA | SPI._SPI_CPOL)
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._get_extra_flags
4,107
def _set_extra_flags(self, extra_flags): if not isinstance(extra_flags, int): raise TypeError("Invalid extra_flags type, should be integer.") if extra_flags < 0 or extra_flags > 255: raise ValueError("Invalid extra_flags, must be 0-255.") # Read-modify-write mode, because the mode contains bits for other settings # Get mode buf = array.array('B', [0]) try: fcntl.ioctl(self._fd, SPI._SPI_IOC_RD_MODE, buf, True) except __HOLE__ as e: raise SPIError(e.errno, "Getting SPI mode: " + e.strerror) buf[0] = (buf[0] & (SPI._SPI_LSB_FIRST | SPI._SPI_CPHA | SPI._SPI_CPOL)) | extra_flags # Set mode try: fcntl.ioctl(self._fd, SPI._SPI_IOC_WR_MODE, buf, False) except OSError as e: raise SPIError(e.errno, "Setting SPI mode: " + e.strerror)
OSError
dataset/ETHPy150Open vsergeev/python-periphery/periphery/spi.py/SPI._set_extra_flags
4,108
def info(name): ''' Return information about a group CLI Example: .. code-block:: bash salt '*' group.info foo ''' try: grinfo = grp.getgrnam(name) except __HOLE__: return {} else: return {'name': grinfo.gr_name, 'passwd': grinfo.gr_passwd, 'gid': grinfo.gr_gid, 'members': grinfo.gr_mem}
KeyError
dataset/ETHPy150Open saltstack/salt/salt/modules/pw_group.py/info
4,109
def Convert2Num(text): """converts text to python type in order Int, hex, Float, Complex ValueError if can't """ #convert to number if possible try: value = int(text, 10) return value except ValueError as ex: pass try: value = int(text, 16) return value except ValueError as ex: pass try: value = float(text) return value except __HOLE__ as ex: pass try: value = complex(text) return value except ValueError as ex: pass raise ValueError("Expected Number got '{0}'".format(text)) # return None
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2Num
4,110
def Convert2CoordNum(text): """converts text to python type in order FracDeg, Int, hex, Float, Complex ValueError if can't """ #convert to FracDeg Coord if possible dm = REO_LatLonNE.findall(text) #returns list of tuples of groups [(deg,min)] if dm: deg = float(dm[0][0]) min_ = float(dm[0][1]) return (deg + min_/60.0) dm = REO_LatLonSW.findall(text) #returns list of tuples of groups [(deg,min)] if dm: deg = float(dm[0][0]) min_ = float(dm[0][1]) return (-(deg + min_/60.0)) try: return (Convert2Num(text)) except __HOLE__: raise ValueError("Expected CoordNum got '{0}'".format(text))
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2CoordNum
4,111
def Convert2BoolCoordNum(text): """converts text to python type in order None, Boolean, Int, Float, Complex ValueError if can't """ #convert to None if possible if text.lower() == 'none': return None #convert to boolean if possible if text.lower() in ['true', 'yes']: return (True) if text.lower() in ['false', 'no']: return (False) try: return (Convert2CoordNum(text)) except __HOLE__: raise ValueError("Expected PathBoolCoordNum got '{0}'".format(text)) return None
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2BoolCoordNum
4,112
def Convert2StrBoolCoordNum(text): """converts text to python type in order Boolean, Int, Float, complex or double quoted string ValueError if can't """ if REO_Quoted.match(text): #text is double quoted string return text.strip('"') #strip off quotes if REO_QuotedSingle.match(text): #text is single quoted string return text.strip("'") #strip off quotes try: return (Convert2BoolCoordNum(text)) except __HOLE__: raise ValueError("Expected StrBoolNum got '{0}'".format(text)) return None
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2StrBoolCoordNum
4,113
def Convert2PathCoordNum(text): """converts text to python type in order Boolean, Int, Float, Complex ValueError if can't """ #convert to path string if possible if REO_PathNode.match(text): return (text) try: return (Convert2CoordNum(text)) except __HOLE__: raise ValueError("Expected BoolCoordNum got '{0}'".format(text)) return None
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2PathCoordNum
4,114
def Convert2BoolPathCoordNum(text): """converts text to python type in order Boolean, Int, Float, Complex ValueError if can't """ #convert to None if possible if text.lower() == 'none': return None #convert to boolean if possible if text.lower() in ['true', 'yes']: return (True) if text.lower() in ['false', 'no']: return (False) try: return (Convert2PathCoordNum(text)) except __HOLE__: raise ValueError("Expected PathBoolCoordNum got '{0}'".format(text)) return None
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2BoolPathCoordNum
4,115
def Convert2StrBoolPathCoordNum(text): """converts text to python type in order Boolean, Int, Float, complex or double quoted string ValueError if can't """ if REO_Quoted.match(text): #text is double quoted string return text.strip('"') #strip off quotes if REO_QuotedSingle.match(text): #text is single quoted string return text.strip("'") #strip off quotes try: return (Convert2BoolPathCoordNum(text)) except __HOLE__: raise ValueError("Expected StrBoolNum got '{0}'".format(text)) return None
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Convert2StrBoolPathCoordNum
4,116
def build(self, fileName='', mode=None, metas=None, preloads=None, behaviors=None): """ Allows building from multiple files. Essentially files list is stack of files fileName is name of first file. Load commands in any files push (append) file onto files until file completed loaded and then popped off Each house's store is inited with the meta data in metas """ #overwrite default if truthy argument if fileName: self.fileName = fileName if mode: self.mode.extend[mode] if metas: self.metas.extend[metas] if preloads: self.preloads.extend[preloads] if behaviors: self.behaviors.extend[behaviors] if self.behaviors: #import behavior package/module for behavior in self.behaviors: mod = importlib.import_module(behavior) housing.House.Clear() #clear house registry housing.ClearRegistries() #clear all the other registries lineView = '' try: #IOError self.fileName = os.path.abspath(self.fileName) self.currentFile = open(self.fileName,"r") self.currentCount = 0 try: #ResolveError while self.currentFile: line = self.currentFile.readline() #empty if end of file self.currentCount += 1 #inc line counter while (line): saveLines = [] saveLineViews = [] while line.endswith('\\\n'): #continuation line = line.rstrip() saveLineViews.append("%04d %s" % (self.currentCount, line)) saveLines.append(line.rstrip('\\').strip()) line = self.currentFile.readline() #empty if end of file self.currentCount += 1 #inc line counter line = line.rstrip() saveLineViews.append("%04d %s" % (self.currentCount, line)) saveLines.append(line) lineView = "\n".join(saveLineViews) line = " ".join(saveLines) console.concise(lineView + '\n') line = line.strip() #strips white space both ends chunks = REO_Chunks.findall(line) # removes trailing comments tokens = [] for chunk in chunks: if chunk[0] == '#': #throw out whole line as comment break else: tokens.append(chunk) if (not tokens): #empty line or comment only line = self.currentFile.readline() #empty if end of file self.currentCount += 1 #inc line counter continue #above guarantees at least 1 token self.currentHuman = ' '.join(tokens) try: #ParseError ParseWarning if not self.dispatch(tokens): console.terse("Script Parsing stopped at line {0} in file {1}\n".format( self.currentCount, self.currentFile.name)) console.terse(lineView + '\n') return False except excepting.ParseError as ex: console.terse("\n{0}\n\n".format(ex)) console.terse("Script line {0} in file {1}\n".format( self.currentCount, self.currentFile.name)) console.terse(lineView + '\n') raise #dispatch evals commands. self.currentFile may be changed by load command line = self.currentFile.readline() #empty if end of file self.currentCount += 1 #inc line counter self.currentFile.close() if self.files: self.currentFile = self.files.pop() self.currentCount = self.counts.pop() console.terse("Resume loading from file {0}.\n".format(self.currentFile.name)) else: self.currentFile = None #building done so now resolve links and collect actives inactives for house in self.houses: house.orderTaskables() house.resolve() if console._verbosity >= console.Wordage.concise: house.showAllTaskers() #show framework hierarchiy for framer in house.framers: framer.showHierarchy() #show hierarchy of each house's store console.concise( "\nData Store for {0}\n".format(house.name)) house.store.expose(valued=(console._verbosity >= console.Wordage.terse)) return True except excepting.ResolveError as ex: console.terse("{0}\n".format(ex)) return False except __HOLE__ as ex: console.terse("Error opening mission file {0}\n".format(ex)) return False finally: for f in self.files: if not f.closed: f.close()
IOError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.build
4,117
def buildLoad(self, command, tokens, index): """ load filepathname """ try: name = tokens[index] index +=1 self.files.append(self.currentFile) #push currentFile self.counts.append(self.currentCount) #push current line ct cwd = os.getcwd() #save current working directory os.chdir(os.path.split(self.currentFile.name)[0]) # set cwd to current file name = os.path.abspath(os.path.expanduser(name)) # resolve name if relpath to cwd os.chdir(cwd) #restore old cwd self.currentFile = open(name,"r") self.currentCount = 0 console.terse("Loading from file {0}.\n".format(self.currentFile.name)) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) return True #House specific builders
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildLoad
4,118
def buildHouse(self, command, tokens, index): """Create a new house and make it the current one house dreams """ try: name = tokens[index] index +=1 self.verifyName(name, command, tokens, index) self.currentHouse = housing.House(name = name) #also creates .store self.houses.append(self.currentHouse) self.currentStore = self.currentHouse.store console.terse(" Created House '{0}'. Assigning registries and " "creating instances ...\n".format(name)) self.currentHouse.assignRegistries() console.profuse(" Clearing current Framer, Frame, Log etc.\n") #changed store so need to make new frameworks and frames self.currentFramer = None #current framer self.currentFrame = None #current frame self.currentLogger = None #current logger self.currentLog = None #current log #meta data in metas is list of triples of (name, path, data) for name, path, data in self.metas: self.currentHouse.metas[name] = self.initPathToData(path, data) # set .meta.house to house.name self.currentHouse.metas['house'] = self.initPathToData('.meta.house', odict(value=self.currentHouse.name)) for path, data in self.preloads: self.initPathToData(path, data) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) msg = " Built House '{0}' with meta:\n".format(self.currentHouse.name) for name, share in self.currentHouse.metas.items(): msg += " {0}: {1!r}\n".format(name, share) console.terse(msg) msg = " Built House '{0}' with preload:\n".format(self.currentHouse.name) for path, data in self.preloads: share = self.currentHouse.store.fetch(path) msg += " {0}: {1!r}\n".format(path, share) console.terse(msg) return True # Convenince Functions
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildHouse
4,119
def buildInit(self, command, tokens, index): """Initialize share in current store init destination to data destination: absolute path data: direct init destination from source destination: [(value, fields) in] absolute [(value, fields) in] path source: [(value, fields) in] absolute [(value, fields) in] path """ if not self.currentStore: msg = "ParseError: Building verb '%s'. No current store" % (command) raise excepting.ParseError(msg, tokens, index) try: destinationFields, index = self.parseFields(tokens, index) destinationPath, index = self.parsePath(tokens, index) if self.currentStore.fetchShare(destinationPath) is None: console.terse(" Warning: Init of non-preexistent share {0} ..." " creating anyway\n".format(destinationPath)) destination = self.currentStore.create(destinationPath) connective = tokens[index] index += 1 if connective in ['to', 'with']: if destinationFields: #fields not allowed so error msg = "ParseError: Building verb '%s'. Unexpected fields '%s in' clause " %\ (command, destinationFields) raise excepting.ParseError(msg, tokens, index) data, index = self.parseDirect(tokens, index) #prevent init value and non value fields in same share self.verifyShareFields(destination, data.keys(), tokens, index) destination.update(data) console.profuse(" Inited share {0} to data = {1}\n".format(destination.name, data)) elif connective in ['by', 'from']: sourceFields, index = self.parseFields(tokens, index) sourcePath, index = self.parsePath(tokens, index) source = self.currentStore.fetchShare(sourcePath) if source is None: msg = "ParseError: Building verb '%s'. Nonexistent source share '%s'" %\ (command, sourcePath) raise excepting.ParseError(msg, tokens, index) sourceFields, destinationFields = self.prepareSrcDstFields(source, sourceFields, destination, destinationFields, tokens, index) data = odict() for sf, df in izip(sourceFields, destinationFields): data[df] = source[sf] destination.update(data) msg = " Inited share {0} from source {1} with data = {2}\n".format( destination.name, source.name, data) console.profuse(msg) else: msg = "ParseError: Building verb '%s'. Unexpected connective '%s'" %\ (command, connective) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildInit
4,120
def buildServer(self, command, tokens, index): """create server tasker in current house server has to have name so can ask stop server name [at period] [be scheduled] [rx shost:sport] [tx dhost:dport] [in order] [to prefix] [per data] [for source] scheduled: (active, inactive, slave) rx: (host:port, :port, host:, host, :) tx: (host:port, :port, host:, host, :) order: (front, mid, back) prefix filepath data: direct source: [(value, fields) in] indirect """ if not self.currentHouse: msg = "ParseError: Building verb '%s'. No current house" % (command) raise excepting.ParseError(msg, tokens, index) if not self.currentStore: msg = "ParseError: Building verb '%s'. No current store" % (command) raise excepting.ParseError(msg, tokens, index) try: parms = {} init = {} name = '' connective = None period = 0.0 prefix = './' schedule = ACTIVE #globaling.py order = MID #globaling.py rxa = '' txa = '' sha = ('', 54321) #empty host means any interface on local host dha = ('localhost', 54321) name = tokens[index] index +=1 while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'at': period = abs(Convert2Num(tokens[index])) index +=1 elif connective == 'to': prefix = tokens[index] index +=1 elif connective == 'be': option = tokens[index] index +=1 if option not in ['active', 'inactive', 'slave']: msg = "ParseError: Building verb '%s'. Bad server scheduled option got %s" % \ (command, option) raise excepting.ParseError(msg, tokens, index) schedule = ScheduleValues[option] #replace text with value elif connective == 'in': order = tokens[index] index +=1 if order not in OrderValues: msg = "ParseError: Building verb '%s'. Bad order option got %s" % \ (command, order) raise excepting.ParseError(msg, tokens, index) order = OrderValues[order] #convert to order value elif connective == 'rx': rxa = tokens[index] index += 1 elif connective == 'tx': txa = tokens[index] index += 1 elif connective == 'per': data, index = self.parseDirect(tokens, index) init.update(data) elif connective == 'for': srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parsePath(tokens, index) if self.currentStore.fetchShare(srcPath) is None: console.terse(" Warning: Init 'with' non-existent share {0}" " ... creating anyway".format(srcPath)) src = self.currentStore.create(srcPath) #assumes src share inited before this line parsed for field in srcFields: init[field] = src[field] else: msg = "ParseError: Building verb '%s'. Bad connective got %s" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) prefix += '/' + self.currentHouse.name #extra slashes are ignored if rxa: if ':' in rxa: host, port = rxa.split(':') sha = (host, int(port)) else: sha = (rxa, sha[1]) if txa: if ':' in txa: host, port = txa.split(':') dha = (host, int(port)) else: dha = (txa, dha[1]) server = serving.Server(name=name, store = self.currentStore,) kw = dict(period=period, schedule=schedule, sha=sha, dha=dha, prefix=prefix,) kw.update(init) server.reinit(**kw) self.currentHouse.taskers.append(server) if schedule == SLAVE: self.currentHouse.slaves.append(server) else: #taskable active or inactive if order == FRONT: self.currentHouse.fronts.append(server) elif order == BACK: self.currentHouse.backs.append(server) else: self.currentHouse.mids.append(server) msg = " Created server named {0} at period {2:0.4f} be {3}\n".format( server.name, name, server.period, ScheduleNames[server.schedule]) console.profuse(msg) return True #Logger specific builders
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildServer
4,121
def buildLogger(self, command, tokens, index): """create logger in current house logger logname [to prefix] [at period] [be scheduled] [flush interval] scheduled: (active, inactive, slave) logger basic at 0.125 logger basic """ if not self.currentHouse: msg = "ParseError: Building verb '{0}'. No current house.".format( command, index, tokens) raise excepting.ParseError(msg, tokens, index) if not self.currentStore: msg = "ParseError: Building verb '{0}'. No current store.".format( command, index, tokens) raise excepting.ParseError(msg, tokens, index) try: name = tokens[index] index +=1 period = 0.0 #default schedule = ACTIVE #globaling.py order = MID #globaling.py interval = 30.0 prefix = './' while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'at': period = abs(Convert2Num(tokens[index])) index +=1 elif connective == 'to': prefix = tokens[index] index +=1 elif connective == 'be': option = tokens[index] index +=1 if option not in ['active', 'inactive', 'slave']: msg = "Error building %s. Bad logger scheduled option got %s." %\ (command, option) raise excepting.ParseError(msg, tokens, index) schedule = ScheduleValues[option] #replace text with value elif connective == 'in': order = tokens[index] index +=1 if order not in OrderValues: msg = "Error building %s. Bad order got %s." %\ (command, order) raise excepting.ParseError(msg, tokens, index) order = OrderValues[order] #convert to order value elif connective == 'flush': interval = max(1.0,abs(Convert2Num(tokens[index]))) index +=1 else: msg = "Error building %s. Bad connective got %s." %\ (command, connective) raise excepting.ParseError(msg, tokens, index) prefix += '/' + self.currentHouse.name #extra slashes are ignored if name in logging.Logger.Names: msg = "Error building %s. Task %s already exists." %\ (command, name) raise excepting.ParseError(msg, tokens, index) logger = logging.Logger(name = name, store = self.currentStore, period = period, flushPeriod = interval, prefix = prefix) logger.schedule = schedule self.currentHouse.taskers.append(logger) if schedule == SLAVE: self.currentHouse.slaves.append(logger) else: #taskable active or inactive if order == FRONT: self.currentHouse.fronts.append(logger) elif order == BACK: self.currentHouse.backs.append(logger) else: self.currentHouse.mids.append(logger) self.currentLogger = logger console.profuse(" Created logger named {0} at period {1:0.4f} be {2}\n".format( logger.name, logger.period, ScheduleNames[logger.schedule])) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildLogger
4,122
def buildLog(self, command, tokens, index): """create log in current logger log name [to fileName] [as (text, binary)] [on rule] rule: (once, never, always, update, change) default fileName is log's name default type is text default rule is update for manual logging use tally command with rule once or never log autopilot (text, binary, console) to './logs/' on (never, once, update, change, always) """ if not self.currentLogger: msg = "Error building %s. No current logger." % (command,) raise excepting.ParseError(msg, tokens, index) if not self.currentStore: msg = "Error building %s. No current store." % (command,) raise excepting.ParseError(msg, tokens, index) try: kind = 'text' fileName = '' rule = NEVER name = tokens[index] index +=1 while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'as': kind = tokens[index] index +=1 if kind not in ['text', 'binary']: msg = "Error building %s. Bad kind = %s." %\ (command, kind) raise excepting.ParseError(msg, tokens, index) elif connective == 'to': fileName = tokens[index] index +=1 elif connective == 'on': rule = tokens[index].capitalize() index +=1 if rule not in LogRuleValues: msg = "Error building %s. Bad rule = %s." %\ (command, rule) raise excepting.ParseError(msg, tokens, index) rule = LogRuleValues[rule] else: msg = "Error building %s. Bad connective got %s." %\ (command, connective) raise excepting.ParseError(msg, tokens, index) if name in logging.Log.Names: msg = "Error building %s. Log %s already exists." %\ (command, name) raise excepting.ParseError(msg, tokens, index) log = logging.Log(name = name, store = self.currentStore, kind = kind, fileName = fileName, rule = rule) self.currentLogger.addLog(log) self.currentLog = log console.profuse(" Created log named {0} kind {1} file {2} rule {3}\n".format( name, kind, fileName, LogRuleNames[rule])) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildLog
4,123
def buildLoggee(self, command, tokens, index): """add loggee(s) to current log loggee tag sharepath tag sharepath ... """ if not self.currentLog: msg = "Error building %s. No current log." % (command,) raise excepting.ParseError(msg, tokens, index) if not self.currentStore: msg = "Error building %s. No current store." % (command,) raise excepting.ParseError(msg, tokens, index) try: while index < len(tokens): tag = tokens[index] index +=1 path = tokens[index] #share path index +=1 share = self.currentStore.create(path) #create so no errors at runtime if not isinstance(share, storing.Share): #verify path ends in share not node msg = "Error building %s. Loggee path %s not Share." % (command, path) raise excepting.ParseError(msg, tokens, index) if tag in self.currentLog.loggees: msg = "Error building %s. Loggee %s already exists in Log %s." %\ (command, tag, self.currentLog.name) raise excepting.ParseError(msg, tokens, index) self.currentLog.addLoggee(tag = tag, loggee = share) console.profuse(" Added loggee {0} with tag {1} loggees {2}\n".format( share.name, tag, self.currentLog.loggees)) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) return True #Framework specific builders
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildLoggee
4,124
def buildFramer(self, command, tokens, index): """Create a new framer and make it the current one framework framername [be (active, inactive, aux, slave)] [at period] [first frame] [via (main, mine, inode)] framework framername be active at 0.0 framework framername """ if not self.currentHouse: msg = "Error building %s. No current house." % (command,) raise excepting.ParseError(msg, tokens, index) if not self.currentStore: msg = "Error building %s. No current store." % (command,) raise excepting.ParseError(msg, tokens, index) try: name = tokens[index] index +=1 self.verifyName(name, command, tokens, index) schedule = INACTIVE #globaling.py order = MID #globaling.py period = 0.0 frame = '' inode = '' while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'at': period = max(0.0, Convert2Num(tokens[index])) index +=1 elif connective == 'be': option = tokens[index] index +=1 if option not in ScheduleValues: msg = "Error building %s. Bad scheduled option got %s." %\ (command, option) raise excepting.ParseError(msg, tokens, index) schedule = ScheduleValues[option] #replace text with value elif connective == 'in': order = tokens[index] index +=1 if order not in OrderValues: msg = "Error building %s. Bad order got %s." %\ (command, order,) raise excepting.ParseError(msg, tokens, index) order = OrderValues[order] #convert to order value elif connective == 'first': frame = tokens[index] index +=1 self.verifyName(frame, command, tokens, index) elif connective == 'via': inode, index = self.parseIndirect(tokens, index, node=True) else: msg = "Error building %s. Bad connective got %s." %\ (command, connective) raise excepting.ParseError(msg, tokens, index) if name in framing.Framer.Names: msg = "Error building %s. Framer or Task %s already exists." %\ (command, name) raise excepting.ParseError(msg, tokens, index) else: framer = framing.Framer(name = name, store = self.currentStore, period = period) framer.schedule = schedule framer.first = frame #need to resolve later framer.inode = inode self.currentHouse.taskers.append(framer) self.currentHouse.framers.append(framer) if schedule == SLAVE: self.currentHouse.slaves.append(framer) elif schedule == AUX: self.currentHouse.auxes.append(framer) elif schedule == MOOT: self.currentHouse.moots.append(framer) else: #taskable active or inactive if order == FRONT: self.currentHouse.fronts.append(framer) elif order == BACK: self.currentHouse.backs.append(framer) else: self.currentHouse.mids.append(framer) self.currentFramer = framer self.currentFramer.assignFrameRegistry() self.currentFrame = None #changed current Framer so no current Frame console.profuse(" Created Framer named '{0}' at period {1:0.4f} be {2} first {3}\n".format( framer.name, framer.period, ScheduleNames[framer.schedule], framer.first)) console.profuse(" Added Framer '{0}' to House '{1}', Assigned frame registry\n".format( framer.name, self.currentHouse.name)) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildFramer
4,125
def buildFirst(self, command, tokens, index): """set first (starting) frame for current framer first framename """ if not self.currentFramer: msg = "Error building %s. No current framer." % (command,) raise excepting.ParseError(msg, tokens, index) try: name = tokens[index] index +=1 self.verifyName(name, command, tokens, index) self.currentFramer.first = name #need to resolve later console.profuse(" Assigned first frame {0} for framework {1}\n".format( name, self.currentFramer.name)) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) return True #Frame specific builders
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildFirst
4,126
def buildFrame(self, command, tokens, index): """Create frame and attach to over frame if indicated frame frameName frame frameName overName the frameName next is reserved """ if not self.currentStore: msg = "Error building %s. No current store." % (command,) raise excepting.ParseError(msg, tokens, index) if not self.currentFramer: msg = "Error building %s. No current framer." % (command,) raise excepting.ParseError(msg, tokens, index) try: name = tokens[index] index +=1 self.verifyName(name, command, tokens, index) over = None while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'in': over = tokens[index] index +=1 else: msg = "Error building %s. Bad connective got %s." % (command, connective) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if name in ReservedFrameNames: msg = "Error building %s in Framer %s. Frame name %s reserved." %\ (command, self.currentFramer.name, name) raise excepting.ParseError(msg, tokens, index) elif name in framing.Frame.Names: #could use Registry Retrieve function msg = "Error building %s in Framer %s. Frame %s already exists." %\ (command, self.currentFramer.name, name) raise excepting.ParseError(msg, tokens, index) else: frame = framing.Frame(name = name, store = self.currentStore, framer = self.currentFramer.name) if over: frame.over = over #need to resolve later #if previous frame did not have explicit next frame then use this new frame # ad next lexically if self.currentFrame and not self.currentFrame.next_: self.currentFrame.next_ = frame.name #default first frame is first lexical frame if not assigned otherwise #so if startFrame is none then we must be first lexical frame if not self.currentFramer.first: #frame.framer.first: self.currentFramer.first = frame.name #frame.framer.first = frame self.currentFrame = frame self.currentContext = NATIVE console.profuse(" Created frame {0} with over {1}\n".format(frame.name, over)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildFrame
4,127
def buildOver(self, command, tokens, index): """Makes frame the over frame of the current frame over frame """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: over = tokens[index] index +=1 self.verifyName(over, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) self.currentFrame.over = over #need to resolve and attach later console.profuse(" Assigned over {0} to frame {1}\n".format( over,self.currentFrame.name)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildOver
4,128
def buildUnder(self, command, tokens, index): """Makes frame the primary under frame of the current frame under frame """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: under = tokens[index] index +=1 self.verifyName(under, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) unders = self.currentFrame.unders if not unders: #empty so just append unders.append(under) elif under != unders[0]: #not already primary while under in unders: #remove under (in case multiple copies shouldnt be) unders.remove(under) if isinstance(unders[0], framing.Frame): #should not be but if valid don't overwrite unders.insert(0, under) else: #just name so overwrite unders[0] = under else: #under == unders[0] already so do nothing pass console.profuse(" Assigned primary under {0} for frame {1}\n".format( under,self.currentFrame.name)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildUnder
4,129
def buildNext(self, command, tokens, index): """Explicitly assign next frame for timeouts and as target of go next next frameName next blank frameName means use lexically next allows override if multiple next commands to default of lexical """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: if index < len(tokens): #next frame optional next_ = tokens[index] index += 1 self.verifyName(next_, command, tokens, index) else: next_ = None except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) self.currentFrame.next_ = next_ console.profuse(" Assigned next frame {0} for frame {1}\n".format( next_, self.currentFrame.name)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildNext
4,130
def buildAux(self, command, tokens, index): """Parse 'aux' command for simple, cloned, or conditional aux of forms Simple Auxiliary: aux framername Cloned Auxiliary: aux framername as (mine, clonedauxname) [via (main, mine, inode)] Simple Conditional Auxiliary: aux framername if [not] need aux framername if [not] need [and [not] need ...] Cloned Conditional Auxiliary: aux framername as (mine, clonedauxname) [via (main, mine, inode)] if [not] need aux framername as (mine, clonedauxname) [via (main, mine, inode)] if [not] need [and [not] need ...] """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: needs = [] aux = None #original connective = None clone = None inode = '' aux = tokens[index] index +=1 #eat token self.verifyName(aux, command, tokens, index) while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'as': clone = tokens[index] index += 1 self.verifyName(clone, command, tokens, index) elif connective == 'via': inode, index = self.parseIndirect(tokens, index, node=True) elif connective == 'if': while (index < len(tokens)): act, index = self.makeNeed(tokens, index) if not act: return False # something wrong do not know what needs.append(act) if index < len(tokens): connective = tokens[index] if connective not in ['and']: msg = "ParseError: Building verb '%s'. Bad connective '%s'" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) index += 1 #otherwise eat token else: msg = ("Error building {0}. Invalid connective" " '{1}'.".format(command, connective)) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if clone: data = odict(original=aux, clone=clone, schedule=AUX, human=self.currentHuman, count=self.currentCount, inode=inode) if clone == 'mine': # insular clone may not be referenced aux = data # create clone when resolve aux can wait until then else: # named clone create clone when resolve framer.moots may be referenced self.currentFramer.moots.append(data) # need to resolve early aux = clone # assign aux to clone name as original aux is to be cloned # named clones must be resolved before any frames get resolved # and are added to the framer.names so they can be referenced if needs: #conditional auxiliary suspender preact human = ' '.join(tokens) #recreate transition command string for debugging #resolve aux link later parms = dict(needs = needs, main = 'me', aux = aux, human = human) act = acting.Act( actor='Suspender', registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) self.currentFrame.addPreact(act) console.profuse(" Added suspender preact, '{0}', with aux" " {1} needs:\n".format(command, aux)) for need in needs: console.profuse(" {0} with parms = {1}\n".format(need.actor, need.parms)) else: # Simple auxiliary self.currentFrame.addAux(aux) #need to resolve later console.profuse(" Added aux framer {0}\n".format(aux)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildAux
4,131
def buildRear(self, command, tokens, index): """ Parse 'rear' verb Two Forms: only first form is currently supported rear original [as mine] [be aux] in frame framename framename cannot be me or in outline of me rear original as clonename be schedule schedule cannot be aux clonename cannot be mine """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: original = None connective = None clone = 'mine' # default is insular clone schedule = 'aux' # default schedule is aux frame = 'me' # default frame is current original = tokens[index] index +=1 # eat token self.verifyName(original, command, tokens, index) while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'as': clone = tokens[index] index += 1 self.verifyName(clone, command, tokens, index) elif connective == 'be': schedule = tokens[index] index += 1 elif connective == 'in': #optional in frame or in framer clause place = tokens[index] #need to resolve index += 1 # eat token if place != 'frame': msg = ("ParseError: Building verb '{0}'. Invalid " " '{1}' clause. Expected 'frame' got " "'{2}'".format(command, connective, place)) raise excepting.ParseError(msg, tokens, index) if index < len(tokens): frame = tokens[index] index += 1 else: msg = ("Error building {0}. Invalid connective" " '{1}'.".format(command, connective)) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building {0}. Not enough tokens.".format(command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building {0}. Unused tokens.".format(command,) raise excepting.ParseError(msg, tokens, index) # only allow schedule of aux for now if schedule not in ScheduleValues or schedule not in ['aux']: msg = "Error building {0}. Bad scheduled option got '{1}'.".format(command, schedule) raise excepting.ParseError(msg, tokens, index) schedule = ScheduleValues[schedule] #replace text with value # when clone is insular and schedule is aux then frame cannot be # current frames outline. This is validated in the actor resolve if schedule == AUX: if clone != 'mine': msg = ("Error building {0}. Only insular clonename of" " 'mine' allowed. Got '{1}'.".format(command, clone)) raise excepting.ParseError(msg, tokens, index) if frame == 'me': msg = ("Error building {0}. Frame clause required.".format(command, clone)) raise excepting.ParseError(msg, tokens, index) parms = dict(original=original, clone=clone, schedule=schedule, frame=frame) actorName = 'Rearer' if actorName not in acting.Actor.Registry: msg = "Error building '{0}'. No actor named '{1}'.".format(command, actorName) raise excepting.ParseError(msg, tokens, index) act = acting.Act(actor=actorName, registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) context = self.currentContext if context == NATIVE: context = ENTER # what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildRear
4,132
def buildRaze(self, command, tokens, index): """ Parse 'raze' verb raze (all, last, first) [in frame [(me, framename)]] """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: connective = None who = None # default is insular clone frame = 'me' # default frame is current who = tokens[index] index +=1 # eat token if who not in ['all', 'first', 'last']: msg = ("ParseError: Building verb '{0}'. Invalid target of" " raze. Expected one of ['all', 'first', 'last'] but got " "'{2}'".format(command, connective, who)) raise excepting.ParseError(msg, tokens, index) while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'in': #optional in frame or in framer clause place = tokens[index] #need to resolve index += 1 # eat token if place != 'frame': msg = ("ParseError: Building verb '{0}'. Invalid " " '{1}' clause. Expected 'frame' got " "'{2}'".format(command, connective, place)) raise excepting.ParseError(msg, tokens, index) if index < len(tokens): frame = tokens[index] index += 1 else: msg = ("Error building {0}. Invalid connective" " '{1}'.".format(command, connective)) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building {0}. Not enough tokens.".format(command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building {0}. Unused tokens.".format(command,) raise excepting.ParseError(msg, tokens, index) parms = dict(who=who, frame=frame) actorName = 'Razer' if actorName not in acting.Actor.Registry: msg = "Error building '{0}'. No actor named '{1}'.".format(command, actorName) raise excepting.ParseError(msg, tokens, index) act = acting.Act(actor=actorName, registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) context = self.currentContext if context == NATIVE: context = EXIT # what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildRaze
4,133
def buildDone(self, command, tokens, index): """ Creates complete action that indicates tasker(s) completed by setting .done state to True native context is enter done tasker [tasker ...] done [me] tasker: (taskername, me) """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: kind = 'Done' taskers = [] while index < len(tokens): tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) taskers.append(tasker) #resolve later if not taskers: taskers.append('me') except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) actorName = 'Complete' + kind.capitalize() if actorName not in completing.Complete.Registry: msg = "Error building complete %s. No actor named %s." %\ (kind, actorName) raise excepting.ParseError(msg, tokens, index) parms = {} parms['taskers'] = taskers #resolve later act = acting.Act(actor=actorName, registrar=completing.Complete, parms=parms, human=self.currentHuman, count=self.currentCount) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Created done complete {0} with {1}\n".format(act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildDone
4,134
def buildTimeout(self, command, tokens, index): """creates implicit transition to next on elapsed >= value timeout 5.0 """ self.verifyCurrentContext(tokens, index) try: value = abs(Convert2Num(tokens[index])) #convert text to number if valid format index +=1 if isinstance(value, str): msg = "Error building %s. invalid timeout %s." %\ (command, value) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) # build need act for transact need = self.makeImplicitDirectFramerNeed( name="elapsed", comparison='>=', goal=float(value), tolerance=0) needs = [] needs.append(need) # build transact human = ' '.join(tokens) #recreate transition command string for debugging far = 'next' #resolve far link later parms = dict(needs = needs, near = 'me', far = far, human = human) act = acting.Act(actor='Transiter', registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) self.currentFrame.addPreact(act) #add transact as preact console.profuse(" Added timeout transition preact, '{0}', with far {1} needs:\n".format( command, far)) for act in needs: console.profuse(" {0} with parms = {1}\n".format(act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildTimeout
4,135
def buildRepeat(self, command, tokens, index): """creates implicit transition to next on recurred >= value repeat 2 go next if recurred >= 2 """ self.verifyCurrentContext(tokens, index) try: value = abs(Convert2Num(tokens[index])) #convert text to number if valid format index +=1 if isinstance(value, str): msg = "Error building %s. invalid repeat %s." %\ (command, value) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) # build need act for transact need = self.makeImplicitDirectFramerNeed( name="recurred", comparison='>=', goal=int(value), tolerance=0) needs = [] needs.append(need) # build transact human = ' '.join(tokens) #recreate transition command string for debugging far = 'next' #resolve far link later parms = dict(needs = needs, near = 'me', far = far, human = human) act = acting.Act( actor='Transiter', registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) self.currentFrame.addPreact(act) #add transact as preact console.profuse(" Added repeat transition preact, '{0}', with far {1} needs:\n".format( command, far)) for act in needs: console.profuse(" {0} with parms = {1}\n".format(act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildRepeat
4,136
def buildPrint(self, command, tokens, index): """prints a string consisting of space separated tokens print message print hello world """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: message = ' '.join(tokens[1:]) except __HOLE__: message = '' parms = dict(message = message) act = acting.Act( actor='Printer', registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildPrint
4,137
def buildPut(self, command, tokens, index): """Build put command to put data into share put data into destination data: direct destination: [(value, fields) in] indirect """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: srcData, index = self.parseDirect(tokens, index) connective = tokens[index] index += 1 if connective != 'into': msg = "ParseError: Building verb '%s'. Unexpected connective '%s'" %\ (command, connective) raise excepting.ParseError(msg, tokens, index) dstFields, index = self.parseFields(tokens, index) dstPath, index = self.parseIndirect(tokens, index) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) actorName = 'Poke' + 'Direct' #capitalize second word if actorName not in poking.Poke.Registry: msg = "ParseError: Can't find actor named '%s'" % (actorName) raise excepting.ParseError(msg, tokens, index) parms = {} parms['sourceData'] = srcData # this is dict parms['destination'] = dstPath # this is a share path parms['destinationFields'] = dstFields # this is a list act = acting.Act( actor=actorName, registrar=poking.Poke, parms=parms, human=self.currentHuman, count=self.currentCount) msg = " Created Actor {0} parms: data = {1} destination = {2} fields = {3} ".format( actorName, srcData, dstPath, dstFields) console.profuse(msg) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildPut
4,138
def buildInc(self, command, tokens, index): """Build inc command to inc share by data or from source inc destination by data inc destination from source destination: [(value, field) in] indirect data: directone source: [(value, field) in] indirect """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: dstFields, index = self.parseFields(tokens, index) dstPath, index = self.parseIndirect(tokens, index) connective = tokens[index] index += 1 if connective in ['to', 'with']: srcData, index = self.parseDirect(tokens, index) for field, value in srcData.items(): if isinstance(value, str): msg = "ParseError: Building verb '%s'. " % (command) msg += "Data value = '%s' in field '%s' not a number" %\ (value, field) raise excepting.ParseError(msg, tokens, index) act = self.makeIncDirect(dstPath, dstFields, srcData) elif connective in ['by', 'from']: srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parseIndirect(tokens, index) act = self.makeIncIndirect(dstPath, dstFields, srcPath, srcFields) else: msg = "ParseError: Building verb '%s'. Unexpected connective '%s'" %\ (command, connective) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildInc
4,139
def buildCopy(self, command, tokens, index): """Build copy command to copy from one share to another copy source into destination source: [(value, fields) in] indirect destination: [(value, fields) in] indirect """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parseIndirect(tokens, index) connective = tokens[index] index += 1 if connective != 'into': msg = "ParseError: Building verb '%s'. Unexpected connective '%s'" %\ (command, connective) raise excepting.ParseError(msg, tokens, index) dstFields, index = self.parseFields(tokens, index) dstPath, index = self.parseIndirect(tokens, index) except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) actorName = 'Poke' + 'Indirect' #capitalize second word if actorName not in poking.Poke.Registry: msg = "ParseError: Can't find actor named '%s'" % (actorName) raise excepting.ParseError(msg, tokens, index) parms = {} parms['source'] = srcPath #this is string parms['sourceFields'] = srcFields #this is a list parms['destination'] = dstPath #this is a string parms['destinationFields'] = dstFields #this is a list act = acting.Act( actor=actorName, registrar=poking.Poke, parms=parms, human=self.currentHuman, count=self.currentCount) msg = " Created Actor {0} parms: ".format(actorName) for key, value in parms.items(): msg += " {0} = {1}".format(key, value) console.profuse("{0}\n".format(msg)) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildCopy
4,140
def buildSet(self, command, tokens, index): """Build set command to generate goal actions set goal to data set goal from source goal: elapsed recurred [(value, fields) in] absolute [(value, fields) in] relativegoal data: direct source: indirect """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: kind = tokens[index] if kind in ['elapsed', 'recurred']: #simple implicit framer relative goals, direct and indirect, index +=1 #eat token act, index = self.makeFramerGoal(kind, tokens, index) else: #basic goals #goal is destination dst dstFields, index = self.parseFields(tokens, index) dstPath, index = self.parseIndirect(tokens, index) #required connective connective = tokens[index] index += 1 if connective in ['to', 'with']: #data direct srcData, index = self.parseDirect(tokens, index) act = self.makeGoalDirect(dstPath, dstFields, srcData) elif connective in ['by', 'from']: #source indirect srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parseIndirect(tokens, index) act = self.makeGoalIndirect(dstPath, dstFields, srcPath, srcFields) else: msg = "ParseError: Building verb '%s'. Unexpected connective '%s'" %\ (command, connective) raise excepting.ParseError(msg, tokens, index) if not act: return False except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildSet
4,141
def buildGo(self, command, tokens, index): """Parse 'go' command transition with transition conditions of forms Transitions: go far go far if [not] need go far if [not] need [and [not] need ...] Far: next me frame """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: needs = [] far = None connective = None far = tokens[index] #get target index +=1 #eat token self.verifyName(far, command, tokens, index) if index < len(tokens): #check for optional if connective connective = tokens[index] if connective not in ['if']: #invalid connective msg = "ParseError: Building verb '%s'. Bad connective '%s'" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) index += 1 #otherwise eat token while (index < len(tokens)): act, index = self.makeNeed(tokens, index) if not act: return False #something wrong do not know what needs.append(act) if index < len(tokens): connective = tokens[index] if connective not in ['and']: msg = "ParseError: Building verb '%s'. Bad connective '%s'" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) index += 1 #otherwise eat token except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command, ) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if not needs and connective: #if but no needs msg = "ParseError: Building verb '%s'. Connective %s but missing need(s)" %\ (command, connective) raise excepting.ParseError(msg, tokens, index) # build transact human = ' '.join(tokens) #recreate transition command string for debugging #resolve far link later parms = dict(needs = needs, near = 'me', far = far, human = human) act = acting.Act( actor='Transiter', registrar=acting.Actor, parms=parms, human=self.currentHuman, count=self.currentCount) self.currentFrame.addPreact(act) console.profuse(" Added transition preact, '{0}', with far {1} needs:\n".format( command, far)) for act in needs: console.profuse(" {0} with parms = {1}\n".format(act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildGo
4,142
def buildLet(self, command, tokens, index): """Parse 'let' command benter action with entry conditions of forms Before Enter: let [me] if [not] need let [me] if [not] need [and [not] need ...] Far: next me frame """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: needs = [] connective = None connective = tokens[index] #get me or if if connective not in ['me', 'if']: #invalid connective msg = "ParseError: Building verb '%s'. Bad connective '%s'" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) index += 1 #otherwise eat token if connective == 'me': connective = tokens[index] #check for if connective if connective not in ['if']: #invalid connective msg = "ParseError: Building verb '%s'. Bad connective '%s'" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) index += 1 #otherwise eat token while (index < len(tokens)): act, index = self.makeNeed(tokens, index) if not act: return False # something wrong do know what needs.append(act) if index < len(tokens): connective = tokens[index] if connective not in ['and']: msg = "ParseError: Building verb '%s'. Bad connective '%s'" % \ (command, connective) raise excepting.ParseError(msg, tokens, index) index += 1 #otherwise eat token except __HOLE__: msg = "ParseError: Building verb '%s'. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "ParseError: Building verb '%s'. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if not needs: # no needs msg = "ParseError: Building verb '%s'. Missing need(s)" %\ (command) raise excepting.ParseError(msg, tokens, index) # build beact for act in needs: self.currentFrame.addBeact(act) console.profuse(" Added beact, '{0}', with needs:\n".format(command)) for act in needs: console.profuse(" {0} with {1}\n".format(act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildLet
4,143
def buildDo(self, command, tokens, index): """ do kind [part ...] [as name [part ...]] [at context] [via inode] [to data][by source] [with data] [from source] [per data] [for source] [cum data] [qua source] deed: name [part ...] kind: name [part ...] context: (native, benter, enter, recur, exit, precur, renter, rexit) inode: indirect data: direct source: [(value, fields) in] indirect do controller pid depth --> controllerPIDDepth do arbiter switch heading --> arbiterSwitchHeading do controller pid depth with foobar 1 do controller pid depth from value in .max.depth """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: kind = "" # deed class key in registry name = "" #specific name of deed instance inode = None parts = [] parms = odict() inits = odict() ioinits = odict() prerefs = odict([('inits', odict()), ('ioinits', odict()), ('parms', odict()) ]) connective = None context = self.currentContext while index < len(tokens): if (tokens[index] in ['as', 'at', 'via', 'to', 'by', 'with', 'from', 'per', 'for', 'cum', 'qua']): # end of parts break parts.append(tokens[index]) index += 1 #eat token if parts: kind = "".join([part.capitalize() for part in parts]) #camel case while index < len(tokens): #options connective = tokens[index] index += 1 if connective == 'as': parts = [] while index < len(tokens): # kind parts end when connective if tokens[index] in ['as', 'at', 'to','by', 'with', 'from', 'per', 'for']: # end of parts break parts.append(tokens[index]) index += 1 #eat token name = "".join([part.capitalize() for part in parts]) #camel case if not name: msg = "ParseError: Building verb '%s'. Missing name for connective 'as'" % (command) raise excepting.ParseError(msg, tokens, index) elif connective in ['at']: context = tokens[index] index += 1 if context not in ActionContextValues: msg = ("ParseError: Building verb '{0}'. Invalid context" " '{1} for connective 'as'".format(command, context)) raise excepting.ParseError(msg, tokens, index) context = ActionContextValues[context] elif connective in ['via']: inode, index = self.parseIndirect(tokens, index, node=True) elif connective in ['to', 'with']: data, index = self.parseDirect(tokens, index) parms.update(data) elif connective in ['by', 'from']: srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parseIndirect(tokens, index) prerefs['parms'][srcPath] = srcFields elif connective in ['per']: data, index = self.parseDirect(tokens, index) ioinits.update(data) elif connective in ['for']: srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parseIndirect(tokens, index) prerefs['ioinits'][srcPath] = srcFields elif connective in ['cum']: data, index = self.parseDirect(tokens, index) inits.update(data) elif connective in ['qua']: srcFields, index = self.parseFields(tokens, index) srcPath, index = self.parseIndirect(tokens, index) prerefs['inits'][srcPath] = srcFields else: msg = ("Error building {0}. Invalid connective" " '{1}'.".format(command, connective)) raise excepting.ParseError(msg, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if not kind: msg = "ParseError: Building verb '%s'. Missing kind for Doer." %\ (command) raise excepting.ParseError(msg, tokens, index) if kind not in doing.Doer.Registry: # class registration not exist msg = "ParseError: Building verb '%s'. No Deed of kind '%s' in registry" %\ (command, kind) raise excepting.ParseError(msg, tokens, index) if inode: ioinits.update(inode=inode) # via argument takes precedence over others if name: inits['name'] = name act = acting.Act( actor=kind, registrar=doing.Doer, inits=inits, ioinits=ioinits, parms=parms, prerefs=prerefs, human=self.currentHuman, count=self.currentCount) #context = self.currentContext if context == NATIVE: context = RECUR #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildDo
4,144
def buildBid(self, command, tokens, index): """ bid control tasker [tasker ...] [at period] bid control [me] [at period] bid control all [at period] control: (stop, start, run, abort, ready) tasker: (tasker, me, all) period: number indirectOne indirectOne: sharepath [of relative] (field, value) in sharepath [of relative] """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: period = None # no period provided sourcePath = None sourceField = None parms = odict([('taskers', []), ('period', None), ('sources', odict())]) control = tokens[index] index +=1 if control not in ['start', 'run', 'stop', 'abort', 'ready']: msg = "Error building {0}. Bad control = {1}.".format(command, control) raise excepting.ParseError(msg, tokens, index) taskers = [] while index < len(tokens): if (tokens[index] in ['at']): break # end of taskers so do not eat yet tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) taskers.append(tasker) #resolve later if not taskers: taskers.append('me') while index < len(tokens): # at option connective = tokens[index] index += 1 if connective in ['at']: # parse period direct or indirect try: #parse direct period = max(0.0, Convert2Num(tokens[index])) # period is number index += 1 # eat token except __HOLE__: # parse indirect sourceField, index = self.parseField(tokens, index) sourcePath, index = self.parseIndirect(tokens, index) else: msg = ("Error building {0}. Invalid connective" " '{1}'.".format(command, connective)) raise excepting.ParseError(msg, tokens, index) actorName = 'Want' + control.capitalize() if actorName not in wanting.Want.Registry: msg = "Error building %s. No actor named %s." % (command, actorName) raise excepting.ParseError(msg, tokens, index) parms['taskers'] = taskers #resolve later parms['period'] = period parms['source'] = sourcePath parms['sourceField'] = sourceField act = acting.Act( actor=actorName, registrar=wanting.Want, parms=parms, human=self.currentHuman, count=self.currentCount) except IndexError: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) context = self.currentContext if context == NATIVE: context = ENTER #what is native for this command if not self.currentFrame.addByContext(act, context): msg = "Error building %s. Bad context '%s'." % (command, context) raise excepting.ParseError(msg, tokens, index) console.profuse(" Added {0} want '{1}' with parms '{2}'\n".format( ActionContextNames[context], act.actor, act.parms)) return True
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildBid
4,145
def buildReady(self, command, tokens, index): """ ready taskName """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) native = BENTER self.makeFiat(tasker, 'ready', native, command, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildReady
4,146
def buildStart(self, command, tokens, index): """ start taskName """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) native = ENTER self.makeFiat(tasker, 'start', native, command, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildStart
4,147
def buildStop(self, command, tokens, index): """ stop taskName """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) native = EXIT self.makeFiat(tasker, 'stop', native, command, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildStop
4,148
def buildRun(self, command, tokens, index): """ run taskName """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) native = RECUR self.makeFiat(tasker, 'run', native, command, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildRun
4,149
def buildAbort(self, command, tokens, index): """ abort taskName """ self.verifyCurrentContext(tokens, index) #currentStore, currentFramer, currentFrame exist try: tasker = tokens[index] index +=1 self.verifyName(tasker, command, tokens, index) except __HOLE__: msg = "Error building %s. Not enough tokens." % (command,) raise excepting.ParseError(msg, tokens, index) if index != len(tokens): msg = "Error building %s. Unused tokens." % (command,) raise excepting.ParseError(msg, tokens, index) native = ENTER self.makeFiat(tasker, 'abort', native, command, tokens, index) return True
IndexError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.buildAbort
4,150
def parseNeedGoal(self, statePath, stateField, tokens, index): """Parse required goal method must be wrapped in appropriate try excepts """ goalPath = None #default goalField = None #default direct = False goal = tokens[index] #parse required goal try: goal = Convert2StrBoolCoordNum(tokens[index]) #goal is quoted string, boolean, or number index += 1 #eat token direct = True except __HOLE__: #means text is not (quoted string, bool, or number) so indirect goalField, index = self.parseField(tokens, index) goalPath, index = self.parseIndirect(tokens, index) return (direct, goal, goalPath, goalField, index)
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.parseNeedGoal
4,151
def parseFramerNeedGoal(self, statePath, stateField, tokens, index): """ Parse required goal for special framer need such as elapsed or recurred method must be wrapped in appropriate try excepts """ goalPath = None #default goalField = None #default direct = False goal = tokens[index] #parse required goal try: goal = Convert2StrBoolCoordNum(tokens[index]) #goal is quoted string, boolean, or number index += 1 #eat token direct = True except __HOLE__: #means text is not (quoted string, bool, or number) so indirect if goal == 'goal': #means goal inferred by relative statePath index += 1 #eat token #now create goal path as inferred from state path #check if statePath can be interpreted as framer state relative chunks = statePath.strip('.').split('.') try: if ((chunks[0] == 'framer') and (chunks[2] == 'state')): #framer relative chunks[2] = 'goal' # .framer.me.state becomes .framer.me.goal else: msg = "ParseError: Goal = 'goal' without framer state path '%s'" %\ (statePath) raise excepting.ParseError(msg, tokens, index) except IndexError: msg = "ParseError: Goal = 'goal' without framer state path '%s'" %\ (statePath) raise excepting.ParseError(msg, tokens, index) goalPath = ".".join(chunks) goalField = stateField #goal field is the same as the given state field else: #not 'goal' so parse as indirect #is 'field in' clause present goalField, index = self.parseField(tokens, index) goalPath, index = self.parseIndirect(tokens, index) return (direct, goal, goalPath, goalField, index)
ValueError
dataset/ETHPy150Open ioflo/ioflo/ioflo/base/building.py/Builder.parseFramerNeedGoal
4,152
def load_special(self, cnf, typ, metadata_construction=False): for arg in SPEC[typ]: try: self.setattr(typ, arg, cnf[arg]) except __HOLE__: pass self.context = typ self.load_complex(cnf, typ, metadata_construction=metadata_construction) self.context = self.def_context
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.load_special
4,153
def load_complex(self, cnf, typ="", metadata_construction=False): try: self.setattr(typ, "policy", Policy(cnf["policy"])) except KeyError: pass # for srv, spec in cnf["service"].items(): # try: # self.setattr(srv, "policy", # Policy(cnf["service"][srv]["policy"])) # except KeyError: # pass try: try: acs = ac_factory(cnf["attribute_map_dir"]) except __HOLE__: acs = ac_factory() if not acs: raise ConfigurationError( "No attribute converters, something is wrong!!") _acs = self.getattr("attribute_converters", typ) if _acs: _acs.extend(acs) else: self.setattr(typ, "attribute_converters", acs) except KeyError: pass if not metadata_construction: try: self.setattr(typ, "metadata", self.load_metadata(cnf["metadata"])) except KeyError: pass
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.load_complex
4,154
def unicode_convert(self, item): try: return unicode(item, "utf-8") except __HOLE__: _uc = self.unicode_convert if isinstance(item, dict): return dict([(key, _uc(val)) for key, val in item.items()]) elif isinstance(item, list): return [_uc(v) for v in item] elif isinstance(item, tuple): return tuple([_uc(v) for v in item]) else: return item
TypeError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.unicode_convert
4,155
def load(self, cnf, metadata_construction=False): """ The base load method, loads the configuration :param cnf: The configuration as a dictionary :param metadata_construction: Is this only to be able to construct metadata. If so some things can be left out. :return: The Configuration instance """ _uc = self.unicode_convert for arg in COMMON_ARGS: if arg == "virtual_organization": if "virtual_organization" in cnf: for key, val in cnf["virtual_organization"].items(): self.vorg[key] = VirtualOrg(None, key, val) continue elif arg == "extension_schemas": # List of filename of modules representing the schemas if "extension_schemas" in cnf: for mod_file in cnf["extension_schemas"]: _mod = self._load(mod_file) self.extension_schema[_mod.NAMESPACE] = _mod try: setattr(self, arg, _uc(cnf[arg])) except __HOLE__: pass except TypeError: # Something that can't be a string setattr(self, arg, cnf[arg]) if "service" in cnf: for typ in ["aa", "idp", "sp", "pdp", "aq"]: try: self.load_special( cnf["service"][typ], typ, metadata_construction=metadata_construction) self.serves.append(typ) except KeyError: pass if "extensions" in cnf: self.do_extensions(cnf["extensions"]) self.load_complex(cnf, metadata_construction=metadata_construction) self.context = self.def_context return self
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.load
4,156
def endpoint(self, service, binding=None, context=None): """ Goes through the list of endpoint specifications for the given type of service and returns a list of endpoint that matches the given binding. If no binding is given all endpoints available for that service will be returned. :param service: The service the endpoint should support :param binding: The expected binding :return: All the endpoints that matches the given restrictions """ spec = [] unspec = [] endps = self.getattr("endpoints", context) if endps and service in endps: for endpspec in endps[service]: try: endp, bind = endpspec if binding is None or bind == binding: spec.append(endp) except __HOLE__: unspec.append(endpspec) if spec: return spec else: return unspec
ValueError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.endpoint
4,157
def log_handler(self): try: _logconf = self.logger except __HOLE__: return None handler = None for htyp in LOG_HANDLER: if htyp in _logconf: if htyp == "syslog": args = _logconf[htyp] if "socktype" in args: import socket if args["socktype"] == "dgram": args["socktype"] = socket.SOCK_DGRAM elif args["socktype"] == "stream": args["socktype"] = socket.SOCK_STREAM else: raise ConfigurationError("Unknown socktype!") try: handler = LOG_HANDLER[htyp](**args) except TypeError: # difference between 2.6 and 2.7 del args["socktype"] handler = LOG_HANDLER[htyp](**args) else: handler = LOG_HANDLER[htyp](**_logconf[htyp]) break if handler is None: # default if rotating logger handler = LOG_HANDLER["rotating"]() if "format" in _logconf: formatter = logging.Formatter(_logconf["format"]) else: formatter = logging.Formatter(LOG_FORMAT) handler.setFormatter(formatter) return handler
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.log_handler
4,158
def setup_logger(self): if root_logger.level != logging.NOTSET: # Someone got there before me return root_logger _logconf = self.logger if _logconf is None: return root_logger try: root_logger.setLevel(LOG_LEVEL[_logconf["loglevel"].lower()]) except __HOLE__: # reasonable default root_logger.setLevel(logging.INFO) root_logger.addHandler(self.log_handler()) root_logger.info("Logging started") return root_logger
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/Config.setup_logger
4,159
def vo_conf(self, vo_name): try: return self.virtual_organization[vo_name] except __HOLE__: return None
KeyError
dataset/ETHPy150Open cloudera/hue/desktop/core/ext-py/pysaml2-2.4.0/src/saml2/config.py/SPConfig.vo_conf
4,160
@reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): """ kw arguments can be: oracle_resolve_synonyms dblink """ resolve_synonyms = kw.get('oracle_resolve_synonyms', False) dblink = kw.get('dblink', '') info_cache = kw.get('info_cache') (table_name, schema, dblink, synonym) = \ self._prepare_reflection_args(connection, table_name, schema, resolve_synonyms, dblink, info_cache=info_cache) columns = [] if self._supports_char_length: char_length_col = 'char_length' else: char_length_col = 'data_length' params = {"table_name": table_name} text = "SELECT column_name, data_type, %(char_length_col)s, "\ "data_precision, data_scale, "\ "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "\ "WHERE table_name = :table_name" if schema is not None: params['owner'] = schema text += " AND owner = :owner " text += " ORDER BY column_id" text = text % {'dblink': dblink, 'char_length_col': char_length_col} c = connection.execute(sql.text(text), **params) for row in c: (colname, orig_colname, coltype, length, precision, scale, nullable, default) = \ (self.normalize_name(row[0]), row[0], row[1], row[ 2], row[3], row[4], row[5] == 'Y', row[6]) if coltype == 'NUMBER': coltype = NUMBER(precision, scale) elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'): coltype = self.ischema_names.get(coltype)(length) elif 'WITH TIME ZONE' in coltype: coltype = TIMESTAMP(timezone=True) else: coltype = re.sub(r'\(\d+\)', '', coltype) try: coltype = self.ischema_names[coltype] except __HOLE__: util.warn("Did not recognize type '%s' of column '%s'" % (coltype, colname)) coltype = sqltypes.NULLTYPE cdict = { 'name': colname, 'type': coltype, 'nullable': nullable, 'default': default, 'autoincrement': default is None } if orig_colname.lower() == orig_colname: cdict['quote'] = True columns.append(cdict) return columns
KeyError
dataset/ETHPy150Open RoseOu/flasky/venv/lib/python2.7/site-packages/sqlalchemy/dialects/oracle/base.py/OracleDialect.get_columns
4,161
def _cohn_kanade_orig(datadir, im_shape, na_val=-1): """Creates dataset (pair of X and y) from Cohn-Kanade image data (CK+)""" images = [] landmarks = [] labels = [] n = 0 for name in os.listdir(os.path.join(datadir, 'images')): n += 1 print('processed %d' % n) impath = os.path.join(datadir, 'images', name) lmname = name.replace('.png', '_landmarks.txt') lmpath = os.path.join(datadir, 'landmarks', lmname) labelname = name.replace('.png', '_emotion.txt') labelpath = os.path.join(datadir, 'labels', labelname) try: im = imread(impath) except __HOLE__: continue im = standartize(im, im_shape) images.append(im.flatten()) landmarks.append(read_landmarks(lmpath).flatten()) # processing labels if os.path.exists(labelpath): labels.append(read_label(labelpath)) else: labels.append(-1) return np.vstack(images), np.array(landmarks), np.array(labels)
IOError
dataset/ETHPy150Open dfdx/masque/masque/datasets.py/_cohn_kanade_orig
4,162
def get_revision_metadata(changeset, metadata_property_map=None, repository_uri=None, encoding="utf-8"): """ Return dictionary of metadatas defined in metadata_property_map. Uses slow solution (git log query per property) to avoid "delimiter inside result" problem. """ # it looks like git is displaying time in en_US locale even if, i.e. cs_CZ # is set, which is bad when using with %a or %b...so hack it setlocale(LC_ALL, USED_GIT_PARSING_LOCALE) metadata = { "repository_uri" : repository_uri or get_repository_uri() } metadata_property_map = metadata_property_map or { "%h" : {'name' : "hash_abbrev"}, "%H" : {'name' : "hash"}, "%aN" : {'name' : "author_name"}, "%ae" : {'name' : "author_email"}, "%ad" : {'name' : "author_date", 'filter' : filter_parse_date}, "%cN" : {'name' : "commiter_name"}, "%ce" : {'name' : "commiter_email"}, "%cd" : {'name' : "commiter_date", 'filter' : filter_parse_date}, "%s" : {'name' : "subject"}, } for property in metadata_property_map: if 'filter' in metadata_property_map[property]: filter = metadata_property_map[property]['filter'] else: filter = None try: metadata[metadata_property_map[property]['name']] = get_revision_metadata_property(changeset, property, filter) except (CalledProcessError, __HOLE__): metadata[metadata_property_map[property]['name']] = "[failed to retrieve]" log.error("Error when parsing metadata: %s" % traceback.format_exc()) resetlocale() return metadata
ValueError
dataset/ETHPy150Open ella/citools/citools/git.py/get_revision_metadata
4,163
def __init__(self, qs, request): self.qs = qs self.request = request self.page_size = request.session.get('page_size', self.available_page_sizes[0]) # overwrite with new value, if it is correct if 'page_size' in request.GET: try: ps = int(request.GET['page_size']) if ps in self.available_page_sizes: self.page_size = ps request.session['page_size'] = ps except exceptions.ValueError: pass try: self.page_number = int(request.GET.get('page', '1')) except __HOLE__: self.page_number = 1 self.paginator = Paginator(self.qs, self.page_size) try: self.page = self.paginator.page(self.page_number) except (EmptyPage, InvalidPage): self.page = self.paginator.page(self.paginator.num_pages) # calculate pages self.pages = [] add_none = True for page in self.paginator.page_range: if (page < 3 or self.paginator.num_pages - page < 3 or abs(page - self.page.number) < 3): add_none = True self.pages.append(page) elif add_none: self.pages.append(None) add_none = False # create get_vars get_vars = request.GET.copy() if 'page' in get_vars: del get_vars['page'] if len(get_vars.keys()) > 0: self.get_vars = "&%s" % get_vars.urlencode() else: self.get_vars = '' if 'page_size' in get_vars: del get_vars['page_size'] self.hidden_inputs = get_vars.items()
ValueError
dataset/ETHPy150Open dndtools/dndtools/dndtools/dnd/dnd_paginator.py/DndPaginator.__init__
4,164
def _bulk_register(watch_states, notifier, cb, details_filter=None): """Bulk registers a callback associated with many states.""" registered = [] try: for state in watch_states: if not notifier.is_registered(state, cb, details_filter=details_filter): notifier.register(state, cb, details_filter=details_filter) registered.append((state, cb)) except __HOLE__: with excutils.save_and_reraise_exception(): _bulk_deregister(notifier, registered, details_filter=details_filter) else: return registered
ValueError
dataset/ETHPy150Open openstack/taskflow/taskflow/listeners/base.py/_bulk_register
4,165
def main(): parser = argparse.ArgumentParser('pupa', description='pupa CLI') parser.add_argument('--debug', nargs='?', const='pdb', default=None, help='drop into pdb (or set =ipdb =pudb)') parser.add_argument('--loglevel', default='INFO', help=('set log level. options are: ' 'DEBUG|INFO|WARNING|ERROR|CRITICAL ' '(default is INFO)')) subparsers = parser.add_subparsers(dest='subcommand') # configure Django before model imports if os.environ.get("DJANGO_SETTINGS_MODULE") is None: os.environ['DJANGO_SETTINGS_MODULE'] = 'pupa.settings' subcommands = {} for mod in COMMAND_MODULES: try: cmd = importlib.import_module(mod).Command(subparsers) subcommands[cmd.name] = cmd except __HOLE__ as e: logger.error('exception "%s" prevented loading of %s module', e, mod) # process args args, other = parser.parse_known_args() # set log level from command line handler_level = getattr(logging, args.loglevel.upper(), 'INFO') settings.LOGGING['handlers']['default']['level'] = handler_level logging.config.dictConfig(settings.LOGGING) # turn debug on if args.debug: _debugger = importlib.import_module(args.debug) # turn on PDB-on-error mode # stolen from http://stackoverflow.com/questions/1237379/ # if this causes problems in interactive mode check that page def _tb_info(type, value, tb): traceback.print_exception(type, value, tb) _debugger.pm() sys.excepthook = _tb_info if not args.subcommand: parser.print_help() else: try: subcommands[args.subcommand].handle(args, other) except CommandError as e: logger.critical(str(e))
ImportError
dataset/ETHPy150Open opencivicdata/pupa/pupa/cli/__main__.py/main
4,166
def Paginate(self, query, default_limit): """Returns a list of entities limited to limit, with a next_page cursor.""" try: limit = int(self.request.get('limit', default_limit)) except __HOLE__: limit = default_limit if limit not in QUERY_LIMITS: limit = default_limit cursor = self.request.get('page', '') if cursor: query.with_cursor(cursor) entities = list(query.fetch(limit)) if len(entities) == limit: next_page = query.cursor() else: next_page = None self._page = { 'limit': limit, 'next_page': next_page, 'results_count': len(entities), } return entities
ValueError
dataset/ETHPy150Open google/simian/src/simian/mac/admin/__init__.py/AdminHandler.Paginate
4,167
def rescale(self, units): ''' Return a copy of the AnalogSignal(Array) converted to the specified units ''' to_dims = pq.quantity.validate_dimensionality(units) if self.dimensionality == to_dims: to_u = self.units signal = np.array(self) else: to_u = pq.Quantity(1.0, to_dims) from_u = pq.Quantity(1.0, self.dimensionality) try: cf = pq.quantity.get_conversion_factor(from_u, to_u) except __HOLE__: raise ValueError('Unable to convert between units of "%s" \ and "%s"' % (from_u._dimensionality, to_u._dimensionality)) signal = cf * self.magnitude new = self.__class__(signal=signal, units=to_u, sampling_rate=self.sampling_rate) new._copy_data_complement(self) new.annotations.update(self.annotations) return new
AssertionError
dataset/ETHPy150Open NeuralEnsemble/python-neo/neo/core/analogsignal.py/BaseAnalogSignal.rescale
4,168
def test_composite_attr_happy(self): obj = FakeResource.existing(**{'attr3': '3'}) try: self.assertEqual('3', obj.third) except __HOLE__: self.fail("third was not found as expected")
AttributeError
dataset/ETHPy150Open openstack/python-openstacksdk/openstack/tests/unit/test_resource.py/ResourceTests.test_composite_attr_happy
4,169
def test_composite_attr_fallback(self): obj = FakeResource.existing(**{'attr_three': '3'}) try: self.assertEqual('3', obj.third) except __HOLE__: self.fail("third was not found in fallback as expected")
AttributeError
dataset/ETHPy150Open openstack/python-openstacksdk/openstack/tests/unit/test_resource.py/ResourceTests.test_composite_attr_fallback
4,170
def _test_resource_serialization(self, session_method, resource_method): attr_type = resource.Resource class Test(resource.Resource): allow_create = True attr = resource.prop("attr", type=attr_type) the_id = 123 sot = Test() sot.attr = resource.Resource({"id": the_id}) self.assertEqual(attr_type, type(sot.attr)) def fake_call(*args, **kwargs): attrs = kwargs["json"] try: json.dumps(attrs) except __HOLE__ as e: self.fail("Unable to serialize _attrs: %s" % e) resp = mock.Mock() resp.json = mock.Mock(return_value=attrs) return resp session = mock.Mock() setattr(session, session_method, mock.Mock(side_effect=fake_call)) if resource_method == "create_by_id": session.create_by_id(session, sot._attrs) elif resource_method == "update_by_id": session.update_by_id(session, None, sot._attrs)
TypeError
dataset/ETHPy150Open openstack/python-openstacksdk/openstack/tests/unit/test_resource.py/ResourceMapping._test_resource_serialization
4,171
def get_access_token(self, target_id, scope, grant_type): """ :param target_id: :param scope: :param grant_type: :return: """ # No default, either there is an explicit policy or there is not try: lifetime = self.token_policy['access_token'][target_id][grant_type] except __HOLE__: raise NotAllowed( 'Access token for grant_type {} for target_id {} not allowed') sid = rndstr(32) return self.token_factory(sid, target_id=target_id, scope=scope, grant_type=grant_type, lifetime=lifetime)
KeyError
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/token_handler.py/TokenHandler.get_access_token
4,172
def refresh_access_token(self, target_id, token, grant_type, **kwargs): """ :param target_id: Who gave me this token :param token: The refresh_token :param grant_type: Which grant type the token is connected to :param kwargs: Extra key word arguments :return: New access_token """ # Check that the token is an refresh token info = self.refresh_token_factory.get_info(token) # Make sure the token should is usable by the client to get a # refresh token try: if target_id != info["azr"]: raise NotAllowed("{} can't use this token".format(target_id)) except KeyError: if target_id not in info['aud']: raise NotAllowed("{} can't use this token".format(target_id)) if self.token_factory.is_valid(info): try: lifetime = self.token_policy['access_token'][target_id][ grant_type] except __HOLE__: raise NotAllowed( 'Issue access token for grant_type {} for target_id {} not allowed') else: sid = self.token_factory.db[info['jti']] try: _aud = kwargs['aud'] except KeyError: _aud = info['aud'] return self.token_factory( sid, target_id=target_id, lifetime=lifetime, aud=_aud)
KeyError
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/token_handler.py/TokenHandler.refresh_access_token
4,173
def get_refresh_token(self, target_id, grant_type, sid): try: lifetime = self.token_policy['refresh_token'][target_id][grant_type] except __HOLE__: raise NotAllowed( 'Issue access token for grant_type {} for target_id {} not allowed') else: return self.refresh_token_factory( sid, target_id=target_id, lifetime=lifetime)
KeyError
dataset/ETHPy150Open rohe/pyoidc/src/oic/utils/token_handler.py/TokenHandler.get_refresh_token
4,174
def follow_messages(so, stdout, stderr): try: resp = follow_events(so["basedir"], "messages", catchup=True) if resp.status != 200: print >>stderr, "Error:", resp.status, resp.reason return 1 # httplib is not really built to read a stream of lines while True: fieldname, value = get_field(resp) if fieldname == "data": data = json.loads(value) if data["type"] == "inbound-messages": stdout.write(render_message(data["new_value"])) stdout.flush() except (__HOLE__, EOFError): return 0
KeyboardInterrupt
dataset/ETHPy150Open warner/petmail/petmail/scripts/messages.py/follow_messages
4,175
def get_filing_forms_w_sections(self): """ Returns a list of tuples, each containing a FilingForm object and list of FilingFormSection objects, if specific sections of the filing form are relevant to the model. """ from calaccess_raw.annotations import FilingForm forms_dict = {} for i in self.FILING_FORMS: if isinstance(i, FilingForm): try: forms_dict[i] except KeyError: forms_dict[i] = [] else: try: forms_dict[i.form].append(i) except __HOLE__: forms_dict[i.form] = [i] return sorted(forms_dict.items(), key=lambda x: x[0].id)
KeyError
dataset/ETHPy150Open california-civic-data-coalition/django-calaccess-raw-data/calaccess_raw/models/base.py/CalAccessBaseModel.get_filing_forms_w_sections
4,176
def _run_pyroma(data): # pragma: no cover """Run pyroma (used to perform checks before releasing a new version). """ import sys from zest.releaser.utils import ask if not ask("Run pyroma on the package before uploading?"): return try: from pyroma import run result = run(data['tagdir']) if result != 10: if not ask("Continue?"): sys.exit(1) except __HOLE__: if not ask("pyroma not available. Continue?"): sys.exit(1)
ImportError
dataset/ETHPy150Open LabPy/lantz/lantz/__init__.py/_run_pyroma
4,177
@staticmethod def are_concurrent(*lines): """Is a sequence of linear entities concurrent? Two or more linear entities are concurrent if they all intersect at a single point. Parameters ========== lines : a sequence of linear entities. Returns ======= True : if the set of linear entities are concurrent, False : otherwise. Notes ===== Simply take the first two lines and find their intersection. If there is no intersection, then the first two lines were parallel and had no intersection so concurrency is impossible amongst the whole set. Otherwise, check to see if the intersection point of the first two lines is a member on the rest of the lines. If so, the lines are concurrent. See Also ======== sympy.geometry.util.intersection Examples ======== >>> from sympy import Point3D, Line3D >>> p1, p2 = Point3D(0, 0, 0), Point3D(3, 5, 2) >>> p3, p4 = Point3D(-2, -2, -2), Point3D(0, 2, 1) >>> l1, l2, l3 = Line3D(p1, p2), Line3D(p1, p3), Line3D(p1, p4) >>> Line3D.are_concurrent(l1, l2, l3) True >>> l4 = Line3D(p2, p3) >>> Line3D.are_concurrent(l2, l3, l4) False """ # Concurrency requires intersection at a single point; One linear # entity cannot be concurrent. if len(lines) <= 1: return False try: # Get the intersection (if parallel) p = lines[0].intersection(lines[1]) if len(p) == 0: return False # Make sure the intersection is on every linear entity for line in lines[2:]: if p[0] not in line: return False return True except __HOLE__: return False
AttributeError
dataset/ETHPy150Open sympy/sympy/sympy/geometry/line3d.py/LinearEntity3D.are_concurrent
4,178
def event_choices(events): """ Get the possible events from settings """ if events is None: msg = "Please add some events in settings.WEBHOOK_EVENTS." raise ImproperlyConfigured(msg) try: choices = [(x, x) for x in events] except __HOLE__: """ Not a valid iterator, so we raise an exception """ msg = "settings.WEBHOOK_EVENTS must be an iterable object." raise ImproperlyConfigured(msg) return choices
TypeError
dataset/ETHPy150Open pydanny/dj-webhooks/djwebhooks/models.py/event_choices
4,179
def handle_noargs(self, **options): from django.db import connection, transaction, models from django.conf import settings from django.core.management.sql import custom_sql_for_model, emit_post_sync_signal verbosity = int(options.get('verbosity', 1)) interactive = options.get('interactive') show_traceback = options.get('traceback', False) self.style = no_style() # Import the 'management' module within each installed app, to register # dispatcher events. for app_name in settings.INSTALLED_APPS: try: import_module('.management', app_name) except __HOLE__, exc: # This is slightly hackish. We want to ignore ImportErrors # if the "management" module itself is missing -- but we don't # want to ignore the exception if the management module exists # but raises an ImportError for some reason. The only way we # can do this is to check the text of the exception. Note that # we're a bit broad in how we check the text, because different # Python implementations may not use the same text. # CPython uses the text "No module named management" # PyPy uses "No module named myproject.myapp.management" msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise cursor = connection.cursor() # Get a list of already installed *models* so that references work right. tables = connection.introspection.table_names() seen_models = connection.introspection.installed_models(tables) created_models = set() pending_references = {} # Create the tables for each model for app in models.get_apps(): app_name = app.__name__.split('.')[-2] model_list = models.get_models(app) for model in model_list: # Create the model's database table, if it doesn't already exist. if verbosity >= 2: print "Processing %s.%s model" % (app_name, model._meta.object_name) if connection.introspection.table_name_converter(model._meta.db_table) in tables: continue sql, references = connection.creation.sql_create_model(model, self.style, seen_models) seen_models.add(model) created_models.add(model) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend(connection.creation.sql_for_pending_references(refto, self.style, pending_references)) sql.extend(connection.creation.sql_for_pending_references(model, self.style, pending_references)) if verbosity >= 1 and sql: print "Creating table %s" % model._meta.db_table for statement in sql: cursor.execute(statement) tables.append(connection.introspection.table_name_converter(model._meta.db_table)) # Create the m2m tables. This must be done after all tables have been created # to ensure that all referred tables will exist. for app in models.get_apps(): app_name = app.__name__.split('.')[-2] model_list = models.get_models(app) for model in model_list: if model in created_models: sql = connection.creation.sql_for_many_to_many(model, self.style) if sql: if verbosity >= 2: print "Creating many-to-many tables for %s.%s model" % (app_name, model._meta.object_name) for statement in sql: cursor.execute(statement) transaction.commit_unless_managed() # Send the post_syncdb signal, so individual apps can do whatever they need # to do at this point. emit_post_sync_signal(created_models, verbosity, interactive) # The connection may have been closed by a syncdb handler. cursor = connection.cursor() # Install custom SQL for the app (but only if this # is a model we've just created) for app in models.get_apps(): app_name = app.__name__.split('.')[-2] for model in models.get_models(app): if model in created_models: custom_sql = custom_sql_for_model(model, self.style) if custom_sql: if verbosity >= 1: print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name) try: for sql in custom_sql: cursor.execute(sql) except Exception, e: sys.stderr.write("Failed to install custom SQL for %s.%s model: %s\n" % \ (app_name, model._meta.object_name, e)) if show_traceback: import traceback traceback.print_exc() transaction.rollback_unless_managed() else: transaction.commit_unless_managed() else: if verbosity >= 2: print "No custom SQL for %s.%s model" % (app_name, model._meta.object_name) # Install SQL indicies for all newly created models for app in models.get_apps(): app_name = app.__name__.split('.')[-2] for model in models.get_models(app): if model in created_models: index_sql = connection.creation.sql_indexes_for_model(model, self.style) if index_sql: if verbosity >= 1: print "Installing index for %s.%s model" % (app_name, model._meta.object_name) try: for sql in index_sql: cursor.execute(sql) except Exception, e: sys.stderr.write("Failed to install index for %s.%s model: %s\n" % \ (app_name, model._meta.object_name, e)) transaction.rollback_unless_managed() else: transaction.commit_unless_managed() # Install the 'initial_data' fixture, using format discovery from django.core.management import call_command call_command('loaddata', 'initial_data', verbosity=verbosity)
ImportError
dataset/ETHPy150Open CollabQ/CollabQ/vendor/django/core/management/commands/syncdb.py/Command.handle_noargs
4,180
def command_start(args): procfile_path = _procfile_path(args.app_root, _choose_procfile(args)) procfile = _procfile(procfile_path) concurrency = _parse_concurrency(args.concurrency) env = _read_env(args.app_root, args.env) quiet = _parse_quiet(args.quiet) port = _choose_port(args, env) if args.processes: processes = compat.OrderedDict() for name in args.processes: try: processes[name] = procfile.processes[name] except __HOLE__: raise CommandError("Process type '{0}' does not exist in Procfile".format(name)) else: processes = procfile.processes manager = Manager() for p in environ.expand_processes(processes, concurrency=concurrency, env=env, quiet=quiet, port=port): e = os.environ.copy() e.update(p.env) manager.add_process(p.name, p.cmd, quiet=p.quiet, env=e) manager.loop() sys.exit(manager.returncode)
KeyError
dataset/ETHPy150Open nickstenning/honcho/honcho/command.py/command_start
4,181
def _procfile(filename): try: with open(filename) as f: content = f.read() except IOError: raise CommandError('Procfile does not exist or is not a file') try: procfile = environ.parse_procfile(content) except __HOLE__ as e: raise CommandError(str(e)) return procfile
AssertionError
dataset/ETHPy150Open nickstenning/honcho/honcho/command.py/_procfile
4,182
def _read_env(app_root, env): files = [e.strip() for e in env.split(',')] content = [] for envfile in files: try: with open(os.path.join(app_root, envfile)) as f: content.append(f.read()) except __HOLE__: pass return environ.parse('\n'.join(content))
IOError
dataset/ETHPy150Open nickstenning/honcho/honcho/command.py/_read_env
4,183
def _mkdir(path): if os.path.exists(path): return try: os.makedirs(path) except __HOLE__ as e: log.error("Could not create export directory") raise CommandError(e)
OSError
dataset/ETHPy150Open nickstenning/honcho/honcho/command.py/_mkdir
4,184
def _write_file(path, content): _mkdir(os.path.dirname(path)) try: with open(path, 'w') as fp: fp.write(content) except __HOLE__ as e: log.error("Could not write to export file") raise CommandError(e)
IOError
dataset/ETHPy150Open nickstenning/honcho/honcho/command.py/_write_file
4,185
def handle(self): """Runs through the SMTP session, receiving commands, calling handlers, and sending responses. :raises: :class:`~slimta.smtp.ConnectionLost` or unhandled exceptions. """ if self.tls and self.tls_immediately: if not self._encrypt_session(): tls_failure.send(self.io, flush=True) return command, arg = b'BANNER_', None while True: try: try: if command: self._handle_command(command, arg) else: unknown_command.send(self.io) except __HOLE__: self._call_custom_handler('CLOSE') break except ConnectionLost: raise except UnicodeDecodeError: bad_arguments.send(self.io) raise except Exception: unhandled_error.send(self.io) raise finally: self.io.flush_send() command, arg = self._recv_command() except Timeout: timed_out.send(self.io) self.io.flush_send() raise ConnectionLost()
StopIteration
dataset/ETHPy150Open slimta/python-slimta/slimta/smtp/server.py/Server.handle
4,186
def _command_AUTH(self, arg): if 'AUTH' not in self.extensions: unknown_command.send(self.io) return if not self.ehlo_as or self.authed or self.have_mailfrom: bad_sequence.send(self.io) return auth = self.extensions.getparam('AUTH') try: result = auth.server_attempt(arg) except __HOLE__: bad_arguments.send(self.io) return except ServerAuthError as e: e.reply.send(self.io) return reply = Reply('235', '2.7.0 Authentication successful') self._call_custom_handler('AUTH', reply, result) reply.send(self.io) self._check_close_code(reply) if reply.code == '235': self.authed = True
ValueError
dataset/ETHPy150Open slimta/python-slimta/slimta/smtp/server.py/Server._command_AUTH
4,187
def _command_MAIL(self, arg): match = from_pattern.match(arg) if not match: bad_arguments.send(self.io) return start = match.end(0) end = find_outside_quotes(arg, b'>', start) if end == -1: bad_arguments.send(self.io) return address = arg[start:end].decode('utf-8') if not self.ehlo_as: bad_sequence.send(self.io) return if self.have_mailfrom: bad_sequence.send(self.io) return params = self._gather_params(arg[end+1:]) if b'SIZE' in params: try: size = int(params[b'SIZE']) except __HOLE__: bad_arguments.send(self.io) return max_size = self.extensions.getparam('SIZE', filter=int) if max_size is not None: if size > max_size: m = '5.3.4 Message size exceeds {0} limit'.format(max_size) Reply('552', m).send(self.io) return else: unknown_parameter.send(self.io) return reply = Reply('250', '2.1.0 Sender <{0}> Ok'.format(address)) self._call_custom_handler('MAIL', reply, address, params) reply.send(self.io) self._check_close_code(reply) self.have_mailfrom = self.have_mailfrom or (reply.code == '250')
ValueError
dataset/ETHPy150Open slimta/python-slimta/slimta/smtp/server.py/Server._command_MAIL
4,188
def synopsis(filename, cache={}): """Get the one-line summary out of a module file.""" mtime = os.stat(filename).st_mtime lastupdate, result = cache.get(filename, (0, None)) if lastupdate < mtime: info = inspect.getmoduleinfo(filename) try: file = open(filename) except __HOLE__: # module can't be opened, so skip it return None if info and 'b' in info[2]: # binary modules have to be imported try: module = imp.load_module('__temp__', file, filename, info[1:]) except: return None result = (module.__doc__ or '').splitlines()[0] del sys.modules['__temp__'] else: # text modules can be directly examined result = source_synopsis(file) file.close() cache[filename] = (mtime, result) return result
IOError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/synopsis
4,189
def safeimport(path, forceload=0, cache={}): """Import a module; handle errors; return None if the module isn't found. If the module *is* found but an exception occurs, it's wrapped in an ErrorDuringImport exception and reraised. Unlike __import__, if a package path is specified, the module at the end of the path is returned, not the package at the beginning. If the optional 'forceload' argument is 1, we reload the module from disk (unless it's a dynamic extension).""" try: # If forceload is 1 and the module has been previously loaded from # disk, we always have to reload the module. Checking the file's # mtime isn't good enough (e.g. the module could contain a class # that inherits from another module that has changed). if forceload and path in sys.modules: if path not in sys.builtin_module_names: # Avoid simply calling reload() because it leaves names in # the currently loaded module lying around if they're not # defined in the new source file. Instead, remove the # module from sys.modules and re-import. Also remove any # submodules because they won't appear in the newly loaded # module's namespace if they're already in sys.modules. subs = [m for m in sys.modules if m.startswith(path + '.')] for key in [path] + subs: # Prevent garbage collection. cache[key] = sys.modules[key] del sys.modules[key] module = __import__(path) except: # Did the error occur before or after the module was found? (exc, value, tb) = info = sys.exc_info() if path in sys.modules: # An error occurred while executing the imported module. raise ErrorDuringImport(sys.modules[path].__file__, info) elif exc is SyntaxError: # A SyntaxError occurred before we could execute the module. raise ErrorDuringImport(value.filename, info) elif exc is ImportError and \ split(lower(str(value)))[:2] == ['no', 'module']: # The module was not found. return None else: # Some other error occurred during the importing process. raise ErrorDuringImport(path, sys.exc_info()) for part in split(path, '.')[1:]: try: module = getattr(module, part) except __HOLE__: return None return module # ---------------------------------------------------- formatter base class
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/safeimport
4,190
def document(self, object, name=None, *args): """Generate documentation for an object.""" args = (object, name) + args # 'try' clause is to attempt to handle the possibility that inspect # identifies something in a way that pydoc itself has issues handling; # think 'super' and how it is a descriptor (which raises the exception # by lacking a __name__ attribute) and an instance. if inspect.isgetsetdescriptor(object): return self.docdata(*args) if inspect.ismemberdescriptor(object): return self.docdata(*args) try: if inspect.ismodule(object): return self.docmodule(*args) if inspect.isclass(object): return self.docclass(*args) if inspect.isroutine(object): return self.docroutine(*args) except __HOLE__: pass if isinstance(object, property): return self.docproperty(*args) return self.docother(*args)
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/Doc.document
4,191
def getdocloc(self, object): """Return the location of module docs or None""" try: file = inspect.getabsfile(object) except __HOLE__: file = '(built-in)' docloc = os.environ.get("PYTHONDOCS", "http://docs.python.org/library") basedir = os.path.join(sys.exec_prefix, "lib", "python"+sys.version[0:3]) if (isinstance(object, type(os)) and (object.__name__ in ('errno', 'exceptions', 'gc', 'imp', 'marshal', 'posix', 'signal', 'sys', 'thread', 'zipimport') or (file.startswith(basedir) and not file.startswith(os.path.join(basedir, 'site-packages'))))): if docloc.startswith("http://"): docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__) else: docloc = os.path.join(docloc, object.__name__ + ".html") else: docloc = None return docloc # -------------------------------------------- HTML documentation generator
TypeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/Doc.getdocloc
4,192
def docmodule(self, object, name=None, mod=None, *ignored): """Produce HTML documentation for a module object.""" name = object.__name__ # ignore the passed-in name try: all = object.__all__ except AttributeError: all = None parts = split(name, '.') links = [] for i in range(len(parts)-1): links.append( '<a href="%s.html"><font color="#ffffff">%s</font></a>' % (join(parts[:i+1], '.'), parts[i])) linkedname = join(links + parts[-1:], '.') head = '<big><big><strong>%s</strong></big></big>' % linkedname try: path = inspect.getabsfile(object) url = path if sys.platform == 'win32': import nturl2path url = nturl2path.pathname2url(path) filelink = '<a href="file:%s">%s</a>' % (url, path) except __HOLE__: filelink = '(built-in)' info = [] if hasattr(object, '__version__'): version = str(object.__version__) if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': version = strip(version[11:-1]) info.append('version %s' % self.escape(version)) if hasattr(object, '__date__'): info.append(self.escape(str(object.__date__))) if info: head = head + ' (%s)' % join(info, ', ') docloc = self.getdocloc(object) if docloc is not None: docloc = '<br><a href="%(docloc)s">Module Docs</a>' % locals() else: docloc = '' result = self.heading( head, '#ffffff', '#7799ee', '<a href=".">index</a><br>' + filelink + docloc) modules = inspect.getmembers(object, inspect.ismodule) classes, cdict = [], {} for key, value in inspect.getmembers(object, inspect.isclass): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or (inspect.getmodule(value) or object) is object): if visiblename(key, all): classes.append((key, value)) cdict[key] = cdict[value] = '#' + key for key, value in classes: for base in value.__bases__: key, modname = base.__name__, base.__module__ module = sys.modules.get(modname) if modname != name and module and hasattr(module, key): if getattr(module, key) is base: if not key in cdict: cdict[key] = cdict[base] = modname + '.html#' + key funcs, fdict = [], {} for key, value in inspect.getmembers(object, inspect.isroutine): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or inspect.isbuiltin(value) or inspect.getmodule(value) is object): if visiblename(key, all): funcs.append((key, value)) fdict[key] = '#-' + key if inspect.isfunction(value): fdict[value] = fdict[key] data = [] for key, value in inspect.getmembers(object, isdata): if visiblename(key, all): data.append((key, value)) doc = self.markup(getdoc(object), self.preformat, fdict, cdict) doc = doc and '<tt>%s</tt>' % doc result = result + '<p>%s</p>\n' % doc if hasattr(object, '__path__'): modpkgs = [] for importer, modname, ispkg in pkgutil.iter_modules(object.__path__): modpkgs.append((modname, name, ispkg, 0)) modpkgs.sort() contents = self.multicolumn(modpkgs, self.modpkglink) result = result + self.bigsection( 'Package Contents', '#ffffff', '#aa55cc', contents) elif modules: contents = self.multicolumn( modules, lambda key_value, s=self: s.modulelink(key_value[1])) result = result + self.bigsection( 'Modules', '#ffffff', '#aa55cc', contents) if classes: classlist = map(lambda key_value: key_value[1], classes) contents = [ self.formattree(inspect.getclasstree(classlist, 1), name)] for key, value in classes: contents.append(self.document(value, key, name, fdict, cdict)) result = result + self.bigsection( 'Classes', '#ffffff', '#ee77aa', join(contents)) if funcs: contents = [] for key, value in funcs: contents.append(self.document(value, key, name, fdict, cdict)) result = result + self.bigsection( 'Functions', '#ffffff', '#eeaa77', join(contents)) if data: contents = [] for key, value in data: contents.append(self.document(value, key)) result = result + self.bigsection( 'Data', '#ffffff', '#55aa55', join(contents, '<br>\n')) if hasattr(object, '__author__'): contents = self.markup(str(object.__author__), self.preformat) result = result + self.bigsection( 'Author', '#ffffff', '#7799ee', contents) if hasattr(object, '__credits__'): contents = self.markup(str(object.__credits__), self.preformat) result = result + self.bigsection( 'Credits', '#ffffff', '#7799ee', contents) return result
TypeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/HTMLDoc.docmodule
4,193
def docclass(self, object, name=None, mod=None, funcs={}, classes={}, *ignored): """Produce HTML documentation for a class object.""" realname = object.__name__ name = name or realname bases = object.__bases__ contents = [] push = contents.append # Cute little class to pump out a horizontal rule between sections. class HorizontalRule: def __init__(self): self.needone = 0 def maybe(self): if self.needone: push('<hr>\n') self.needone = 1 hr = HorizontalRule() # List the mro, if non-trivial. mro = deque(inspect.getmro(object)) if len(mro) > 2: hr.maybe() push('<dl><dt>Method resolution order:</dt>\n') for base in mro: push('<dd>%s</dd>\n' % self.classlink(base, object.__module__)) push('</dl>\n') def spill(msg, attrs, predicate): ok, attrs = _split_list(attrs, predicate) if ok: hr.maybe() push(msg) for name, kind, homecls, value in ok: push(self.document(getattr(object, name), name, mod, funcs, classes, mdict, object)) push('\n') return attrs def spilldescriptors(msg, attrs, predicate): ok, attrs = _split_list(attrs, predicate) if ok: hr.maybe() push(msg) for name, kind, homecls, value in ok: push(self._docdescriptor(name, value, mod)) return attrs def spilldata(msg, attrs, predicate): ok, attrs = _split_list(attrs, predicate) if ok: hr.maybe() push(msg) for name, kind, homecls, value in ok: base = self.docother(getattr(object, name), name, mod) if (hasattr(value, '__call__') or inspect.isdatadescriptor(value)): doc = getattr(value, "__doc__", None) else: doc = None if doc is None: push('<dl><dt>%s</dl>\n' % base) else: doc = self.markup(getdoc(value), self.preformat, funcs, classes, mdict) doc = '<dd><tt>%s</tt>' % doc push('<dl><dt>%s%s</dl>\n' % (base, doc)) push('\n') return attrs attrs = filter(lambda data: visiblename(data[0]), classify_class_attrs(object)) mdict = {} for key, kind, homecls, value in attrs: mdict[key] = anchor = '#' + name + '-' + key value = getattr(object, key) try: # The value may not be hashable (e.g., a data attr with # a dict or list value). mdict[value] = anchor except __HOLE__: pass while attrs: if mro: thisclass = mro.popleft() else: thisclass = attrs[0][2] attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass) if thisclass is __builtin__.object: attrs = inherited continue elif thisclass is object: tag = 'defined here' else: tag = 'inherited from %s' % self.classlink(thisclass, object.__module__) tag += ':<br>\n' # Sort attrs by name. try: attrs.sort(key=lambda t: t[0]) except TypeError: attrs.sort(lambda t1, t2: cmp(t1[0], t2[0])) # 2.3 compat # Pump out the attrs, segregated by kind. attrs = spill('Methods %s' % tag, attrs, lambda t: t[1] == 'method') attrs = spill('Class methods %s' % tag, attrs, lambda t: t[1] == 'class method') attrs = spill('Static methods %s' % tag, attrs, lambda t: t[1] == 'static method') attrs = spilldescriptors('Data descriptors %s' % tag, attrs, lambda t: t[1] == 'data descriptor') attrs = spilldata('Data and other attributes %s' % tag, attrs, lambda t: t[1] == 'data') assert attrs == [] attrs = inherited contents = ''.join(contents) if name == realname: title = '<a name="%s">class <strong>%s</strong></a>' % ( name, realname) else: title = '<strong>%s</strong> = <a name="%s">class %s</a>' % ( name, name, realname) if bases: parents = [] for base in bases: parents.append(self.classlink(base, object.__module__)) title = title + '(%s)' % join(parents, ', ') doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict) doc = doc and '<tt>%s<br>&nbsp;</tt>' % doc return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
TypeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/HTMLDoc.docclass
4,194
def docmodule(self, object, name=None, mod=None): """Produce text documentation for a given module object.""" name = object.__name__ # ignore the passed-in name synop, desc = splitdoc(getdoc(object)) result = self.section('NAME', name + (synop and ' - ' + synop)) try: all = object.__all__ except AttributeError: all = None try: file = inspect.getabsfile(object) except __HOLE__: file = '(built-in)' result = result + self.section('FILE', file) docloc = self.getdocloc(object) if docloc is not None: result = result + self.section('MODULE DOCS', docloc) if desc: result = result + self.section('DESCRIPTION', desc) classes = [] for key, value in inspect.getmembers(object, inspect.isclass): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or (inspect.getmodule(value) or object) is object): if visiblename(key, all): classes.append((key, value)) funcs = [] for key, value in inspect.getmembers(object, inspect.isroutine): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or inspect.isbuiltin(value) or inspect.getmodule(value) is object): if visiblename(key, all): funcs.append((key, value)) data = [] for key, value in inspect.getmembers(object, isdata): if visiblename(key, all): data.append((key, value)) modpkgs = [] modpkgs_names = set() if hasattr(object, '__path__'): for importer, modname, ispkg in pkgutil.iter_modules(object.__path__): modpkgs_names.add(modname) if ispkg: modpkgs.append(modname + ' (package)') else: modpkgs.append(modname) modpkgs.sort() result = result + self.section( 'PACKAGE CONTENTS', join(modpkgs, '\n')) # Detect submodules as sometimes created by C extensions submodules = [] for key, value in inspect.getmembers(object, inspect.ismodule): if value.__name__.startswith(name + '.') and key not in modpkgs_names: submodules.append(key) if submodules: submodules.sort() result = result + self.section( 'SUBMODULES', join(submodules, '\n')) if classes: classlist = map(lambda key_value: key_value[1], classes) contents = [self.formattree( inspect.getclasstree(classlist, 1), name)] for key, value in classes: contents.append(self.document(value, key, name)) result = result + self.section('CLASSES', join(contents, '\n')) if funcs: contents = [] for key, value in funcs: contents.append(self.document(value, key, name)) result = result + self.section('FUNCTIONS', join(contents, '\n')) if data: contents = [] for key, value in data: contents.append(self.docother(value, key, name, maxlen=70)) result = result + self.section('DATA', join(contents, '\n')) if hasattr(object, '__version__'): version = str(object.__version__) if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': version = strip(version[11:-1]) result = result + self.section('VERSION', version) if hasattr(object, '__date__'): result = result + self.section('DATE', str(object.__date__)) if hasattr(object, '__author__'): result = result + self.section('AUTHOR', str(object.__author__)) if hasattr(object, '__credits__'): result = result + self.section('CREDITS', str(object.__credits__)) return result
TypeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/TextDoc.docmodule
4,195
def pipepager(text, cmd): """Page through text by feeding it to another program.""" pipe = os.popen(cmd, 'w') try: pipe.write(text) pipe.close() except __HOLE__: pass # Ignore broken pipes caused by quitting the pager program.
IOError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/pipepager
4,196
def ttypager(text): """Page through text on a text terminal.""" lines = split(plain(text), '\n') try: import tty fd = sys.stdin.fileno() old = tty.tcgetattr(fd) tty.setcbreak(fd) getchar = lambda: sys.stdin.read(1) except (ImportError, __HOLE__): tty = None getchar = lambda: sys.stdin.readline()[:-1][:1] try: r = inc = os.environ.get('LINES', 25) - 1 sys.stdout.write(join(lines[:inc], '\n') + '\n') while lines[r:]: sys.stdout.write('-- more --') sys.stdout.flush() c = getchar() if c in ('q', 'Q'): sys.stdout.write('\r \r') break elif c in ('\r', '\n'): sys.stdout.write('\r \r' + lines[r] + '\n') r = r + 1 continue if c in ('b', 'B', '\x1b'): r = r - inc - inc if r < 0: r = 0 sys.stdout.write('\n' + join(lines[r:r+inc], '\n') + '\n') r = r + inc finally: if tty: tty.tcsetattr(fd, tty.TCSAFLUSH, old)
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/ttypager
4,197
def locate(path, forceload=0): """Locate an object by name or dotted path, importing as necessary.""" parts = [part for part in split(path, '.') if part] module, n = None, 0 while n < len(parts): nextmodule = safeimport(join(parts[:n+1], '.'), forceload) if nextmodule: module, n = nextmodule, n + 1 else: break if module: object = module for part in parts[n:]: try: object = getattr(object, part) except __HOLE__: return None return object else: if hasattr(__builtin__, path): return getattr(__builtin__, path) # --------------------------------------- interactive interpreter interface
AttributeError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/locate
4,198
def doc(thing, title='Python Library Documentation: %s', forceload=0): """Display text documentation, given an object or a path to an object.""" try: pager(render_doc(thing, title, forceload)) except (__HOLE__, ErrorDuringImport), value: print value
ImportError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/doc
4,199
def writedoc(thing, forceload=0): """Write HTML documentation to a file in the current directory.""" try: object, name = resolve(thing, forceload) page = html.page(describe(object), html.document(object, name)) file = open(name + '.html', 'w') file.write(page) file.close() print 'wrote', name + '.html' except (__HOLE__, ErrorDuringImport), value: print value
ImportError
dataset/ETHPy150Open deanhiller/databus/webapp/play1.3.x/python/Lib/pydoc.py/writedoc