_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q276000
Meter.extractHolidayDate
test
def extractHolidayDate(self, setting_holiday): """ Read a single holiday date from meter buffer. Args: setting_holiday (int): Holiday from 0-19 or in range(Extents.Holidays) Returns: tuple: Holiday tuple, elements are strings. =============== ====================== Holiday Holiday 0-19 as string Day Day 1-31 as string Month Monty 1-12 as string =============== ====================== """ ret = namedtuple("result", ["Holiday", "Month", "Day"]) setting_holiday += 1 ret.Holiday = str(setting_holiday) if (setting_holiday < 1) or (setting_holiday > Extents.Holidays): ekm_log("Out of bounds: holiday " + str(setting_holiday)) ret.Holiday = ret.Month = ret.Day = str(0) return ret idxday = "Holiday_" + str(setting_holiday) + "_Day" idxmon = "Holiday_" + str(setting_holiday) + "_Mon" if idxmon not in self.m_hldy: ret.Holiday = ret.Month = ret.Day = str(0) return ret if idxday not in self.m_hldy: ret.Holiday = ret.Month = ret.Day = str(0) return ret ret.Day = self.m_hldy[idxday][MeterData.StringValue] ret.Month = self.m_hldy[idxmon][MeterData.StringValue] return ret
python
{ "resource": "" }
q276001
Meter.readSettings
test
def readSettings(self): """Recommended call to read all meter settings at once. Returns: bool: True if all subsequent serial calls completed with ACK. """ success = (self.readHolidayDates() and self.readMonthTariffs(ReadMonths.kWh) and self.readMonthTariffs(ReadMonths.kWhReverse) and self.readSchedules(ReadSchedules.Schedules_1_To_4) and self.readSchedules(ReadSchedules.Schedules_5_To_6)) return success
python
{ "resource": "" }
q276002
Meter.writeCmdMsg
test
def writeCmdMsg(self, msg): """ Internal method to set the command result string. Args: msg (str): Message built during command. """ ekm_log("(writeCmdMsg | " + self.getContext() + ") " + msg) self.m_command_msg = msg
python
{ "resource": "" }
q276003
Meter.serialCmdPwdAuth
test
def serialCmdPwdAuth(self, password_str): """ Password step of set commands This method is normally called within another serial command, so it does not issue a termination string. Any default password is set in the caller parameter list, never here. Args: password_str (str): Required password. Returns: bool: True on completion and ACK. """ result = False try: req_start = "0150310228" + binascii.hexlify(password_str) + "2903" req_crc = self.calc_crc16(req_start[2:].decode("hex")) req_str = req_start + req_crc self.m_serial_port.write(req_str.decode("hex")) if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06": ekm_log("Password accepted (" + self.getContext() + ")") result = True else: ekm_log("Password call failure no 06(" + self.getContext() + ")") except: ekm_log("Password call failure by exception(" + self.getContext() + ")") ekm_log(traceback.format_exc(sys.exc_info())) return result
python
{ "resource": "" }
q276004
V3Meter.updateObservers
test
def updateObservers(self): """ Fire update method in all attached observers in order of attachment. """ for observer in self.m_observers: try: observer.update(self.m_req) except: ekm_log(traceback.format_exc(sys.exc_info()))
python
{ "resource": "" }
q276005
V4Meter.initLcdLookup
test
def initLcdLookup(self): """ Initialize lookup table for string input of LCD fields """ self.m_lcd_lookup["kWh_Tot"] = LCDItems.kWh_Tot self.m_lcd_lookup["Rev_kWh_Tot"] = LCDItems.Rev_kWh_Tot self.m_lcd_lookup["RMS_Volts_Ln_1"] = LCDItems.RMS_Volts_Ln_1 self.m_lcd_lookup["RMS_Volts_Ln_2"] = LCDItems.RMS_Volts_Ln_2 self.m_lcd_lookup["RMS_Volts_Ln_3"] = LCDItems.RMS_Volts_Ln_3 self.m_lcd_lookup["Amps_Ln_1"] = LCDItems.Amps_Ln_1 self.m_lcd_lookup["Amps_Ln_2"] = LCDItems.Amps_Ln_2 self.m_lcd_lookup["Amps_Ln_3"] = LCDItems.Amps_Ln_3 self.m_lcd_lookup["RMS_Watts_Ln_1"] = LCDItems.RMS_Watts_Ln_1 self.m_lcd_lookup["RMS_Watts_Ln_2"] = LCDItems.RMS_Watts_Ln_2 self.m_lcd_lookup["RMS_Watts_Ln_3"] = LCDItems.RMS_Watts_Ln_3 self.m_lcd_lookup["RMS_Watts_Tot"] = LCDItems.RMS_Watts_Tot self.m_lcd_lookup["Power_Factor_Ln_1"] = LCDItems.Power_Factor_Ln_1 self.m_lcd_lookup["Power_Factor_Ln_2"] = LCDItems.Power_Factor_Ln_2 self.m_lcd_lookup["Power_Factor_Ln_3"] = LCDItems.Power_Factor_Ln_3 self.m_lcd_lookup["kWh_Tariff_1"] = LCDItems.kWh_Tariff_1 self.m_lcd_lookup["kWh_Tariff_2"] = LCDItems.kWh_Tariff_2 self.m_lcd_lookup["kWh_Tariff_3"] = LCDItems.kWh_Tariff_3 self.m_lcd_lookup["kWh_Tariff_4"] = LCDItems.kWh_Tariff_4 self.m_lcd_lookup["Rev_kWh_Tariff_1"] = LCDItems.Rev_kWh_Tariff_1 self.m_lcd_lookup["Rev_kWh_Tariff_2"] = LCDItems.Rev_kWh_Tariff_2 self.m_lcd_lookup["Rev_kWh_Tariff_3"] = LCDItems.Rev_kWh_Tariff_3 self.m_lcd_lookup["Rev_kWh_Tariff_4"] = LCDItems.Rev_kWh_Tariff_4 self.m_lcd_lookup["Reactive_Pwr_Ln_1"] = LCDItems.Reactive_Pwr_Ln_1 self.m_lcd_lookup["Reactive_Pwr_Ln_2"] = LCDItems.Reactive_Pwr_Ln_2 self.m_lcd_lookup["Reactive_Pwr_Ln_3"] = LCDItems.Reactive_Pwr_Ln_3 self.m_lcd_lookup["Reactive_Pwr_Tot"] = LCDItems.Reactive_Pwr_Tot self.m_lcd_lookup["Line_Freq"] = LCDItems.Line_Freq self.m_lcd_lookup["Pulse_Cnt_1"] = LCDItems.Pulse_Cnt_1 self.m_lcd_lookup["Pulse_Cnt_2"] = LCDItems.Pulse_Cnt_2 self.m_lcd_lookup["Pulse_Cnt_3"] = LCDItems.Pulse_Cnt_3 self.m_lcd_lookup["kWh_Ln_1"] = LCDItems.kWh_Ln_1 self.m_lcd_lookup["Rev_kWh_Ln_1"] = LCDItems.Rev_kWh_Ln_1 self.m_lcd_lookup["kWh_Ln_2"] = LCDItems.kWh_Ln_2 self.m_lcd_lookup["Rev_kWh_Ln_2"] = LCDItems.Rev_kWh_Ln_2 self.m_lcd_lookup["kWh_Ln_3"] = LCDItems.kWh_Ln_3 self.m_lcd_lookup["Rev_kWh_Ln_3"] = LCDItems.Rev_kWh_Ln_3 self.m_lcd_lookup["Reactive_Energy_Tot"] = LCDItems.Reactive_Energy_Tot self.m_lcd_lookup["Max_Demand_Rst"] = LCDItems.Max_Demand_Rst self.m_lcd_lookup["Rev_kWh_Rst"] = LCDItems.Rev_kWh_Rst self.m_lcd_lookup["State_Inputs"] = LCDItems.State_Inputs self.m_lcd_lookup["Max_Demand"] = LCDItems.Max_Demand
python
{ "resource": "" }
q276006
V4Meter.request
test
def request(self, send_terminator = False): """ Combined A and B read for V4 meter. Args: send_terminator (bool): Send termination string at end of read. Returns: bool: True on completion. """ try: retA = self.requestA() retB = self.requestB() if retA and retB: self.makeAB() self.calculateFields() self.updateObservers() return True except: ekm_log(traceback.format_exc(sys.exc_info())) return False
python
{ "resource": "" }
q276007
V4Meter.requestA
test
def requestA(self): """Issue an A read on V4 meter. Returns: bool: True if CRC match at end of call. """ work_context = self.getContext() self.setContext("request[v4A]") self.m_serial_port.write("2f3f".decode("hex") + self.m_meter_address + "3030210d0a".decode("hex")) self.m_raw_read_a = self.m_serial_port.getResponse(self.getContext()) unpacked_read_a = self.unpackStruct(self.m_raw_read_a, self.m_blk_a) self.convertData(unpacked_read_a, self.m_blk_a) self.m_kwh_precision = int(self.m_blk_a[Field.kWh_Scale][MeterData.NativeValue]) self.m_a_crc = self.crcMeterRead(self.m_raw_read_a, self.m_blk_a) self.setContext(work_context) return self.m_a_crc
python
{ "resource": "" }
q276008
V4Meter.requestB
test
def requestB(self): """ Issue a B read on V4 meter. Returns: bool: True if CRC match at end of call. """ work_context = self.getContext() self.setContext("request[v4B]") self.m_serial_port.write("2f3f".decode("hex") + self.m_meter_address + "3031210d0a".decode("hex")) self.m_raw_read_b = self.m_serial_port.getResponse(self.getContext()) unpacked_read_b = self.unpackStruct(self.m_raw_read_b, self.m_blk_b) self.convertData(unpacked_read_b, self.m_blk_b, self.m_kwh_precision) self.m_b_crc = self.crcMeterRead(self.m_raw_read_b, self.m_blk_b) self.setContext(work_context) return self.m_b_crc
python
{ "resource": "" }
q276009
V4Meter.makeAB
test
def makeAB(self): """ Munge A and B reads into single serial block with only unique fields.""" for fld in self.m_blk_a: compare_fld = fld.upper() if not "RESERVED" in compare_fld and not "CRC" in compare_fld: self.m_req[fld] = self.m_blk_a[fld] for fld in self.m_blk_b: compare_fld = fld.upper() if not "RESERVED" in compare_fld and not "CRC" in compare_fld: self.m_req[fld] = self.m_blk_b[fld] pass
python
{ "resource": "" }
q276010
V4Meter.calculateFields
test
def calculateFields(self): """Write calculated fields for read buffer.""" pf1 = self.m_blk_b[Field.Cos_Theta_Ln_1][MeterData.StringValue] pf2 = self.m_blk_b[Field.Cos_Theta_Ln_2][MeterData.StringValue] pf3 = self.m_blk_b[Field.Cos_Theta_Ln_3][MeterData.StringValue] pf1_int = self.calcPF(pf1) pf2_int = self.calcPF(pf2) pf3_int = self.calcPF(pf3) self.m_blk_b[Field.Power_Factor_Ln_1][MeterData.StringValue] = str(pf1_int) self.m_blk_b[Field.Power_Factor_Ln_2][MeterData.StringValue] = str(pf2_int) self.m_blk_b[Field.Power_Factor_Ln_3][MeterData.StringValue] = str(pf3_int) self.m_blk_b[Field.Power_Factor_Ln_1][MeterData.NativeValue] = pf1_int self.m_blk_b[Field.Power_Factor_Ln_2][MeterData.NativeValue] = pf2_int self.m_blk_b[Field.Power_Factor_Ln_3][MeterData.NativeValue] = pf2_int rms_watts_1 = self.m_blk_b[Field.RMS_Watts_Ln_1][MeterData.NativeValue] rms_watts_2 = self.m_blk_b[Field.RMS_Watts_Ln_2][MeterData.NativeValue] rms_watts_3 = self.m_blk_b[Field.RMS_Watts_Ln_3][MeterData.NativeValue] sign_rms_watts_1 = 1 sign_rms_watts_2 = 1 sign_rms_watts_3 = 1 direction_byte = self.m_blk_a[Field.State_Watts_Dir][MeterData.NativeValue] if direction_byte == DirectionFlag.ForwardForwardForward: # all good pass if direction_byte == DirectionFlag.ForwardForwardReverse: sign_rms_watts_3 = -1 pass if direction_byte == DirectionFlag.ForwardReverseForward: sign_rms_watts_2 = -1 pass if direction_byte == DirectionFlag.ReverseForwardForward: sign_rms_watts_1 = -1 pass if direction_byte == DirectionFlag.ForwardReverseReverse: sign_rms_watts_2 = -1 sign_rms_watts_3 = -1 pass if direction_byte == DirectionFlag.ReverseForwardReverse: sign_rms_watts_1 = -1 sign_rms_watts_3 = -1 pass if direction_byte == DirectionFlag.ReverseReverseForward: sign_rms_watts_1 = -1 sign_rms_watts_2 = -1 pass if direction_byte == DirectionFlag.ReverseReverseReverse: sign_rms_watts_1 = -1 sign_rms_watts_2 = -1 sign_rms_watts_3 = -1 pass net_watts_1 = rms_watts_1 * sign_rms_watts_1 net_watts_2 = rms_watts_2 * sign_rms_watts_2 net_watts_3 = rms_watts_3 * sign_rms_watts_3 net_watts_tot = net_watts_1 + net_watts_2 + net_watts_3 self.m_blk_b[Field.Net_Calc_Watts_Ln_1][MeterData.NativeValue] = net_watts_1 self.m_blk_b[Field.Net_Calc_Watts_Ln_2][MeterData.NativeValue] = net_watts_2 self.m_blk_b[Field.Net_Calc_Watts_Ln_3][MeterData.NativeValue] = net_watts_3 self.m_blk_b[Field.Net_Calc_Watts_Tot][MeterData.NativeValue] = net_watts_tot self.m_blk_b[Field.Net_Calc_Watts_Ln_1][MeterData.StringValue] = str(net_watts_1) self.m_blk_b[Field.Net_Calc_Watts_Ln_2][MeterData.StringValue] = str(net_watts_2) self.m_blk_b[Field.Net_Calc_Watts_Ln_3][MeterData.StringValue] = str(net_watts_3) self.m_blk_b[Field.Net_Calc_Watts_Tot][MeterData.StringValue] = str(net_watts_tot) pass
python
{ "resource": "" }
q276011
V4Meter.setLCDCmd
test
def setLCDCmd(self, display_list, password="00000000"): """ Single call wrapper for LCD set." Wraps :func:`~ekmmeters.V4Meter.setLcd` and associated init and add methods. Args: display_list (list): List composed of :class:`~ekmmeters.LCDItems` password (str): Optional password. Returns: bool: Passthrough from :func:`~ekmmeters.V4Meter.setLcd` """ result = False try: self.initLcd() item_cnt = len(display_list) if (item_cnt > 45) or (item_cnt <= 0): ekm_log("LCD item list must have between 1 and 40 items") return False for display_item in display_list: self.addLcdItem(int(display_item)) result = self.setLCD(password) except: ekm_log(traceback.format_exc(sys.exc_info())) return result
python
{ "resource": "" }
q276012
V4Meter.setRelay
test
def setRelay(self, seconds, relay, status, password="00000000"): """Serial call to set relay. Args: seconds (int): Seconds to hold, ero is hold forever. See :class:`~ekmmeters.RelayInterval`. relay (int): Selected relay, see :class:`~ekmmeters.Relay`. status (int): Status to set, see :class:`~ekmmeters.RelayState` password (str): Optional password Returns: bool: True on completion and ACK. """ result = False self.setContext("setRelay") try: self.clearCmdMsg() if len(password) != 8: self.writeCmdMsg("Invalid password length.") self.setContext("") return result if seconds < 0 or seconds > 9999: self.writeCmdMsg("Relay duration must be between 0 and 9999.") self.setContext("") return result if not self.requestA(): self.writeCmdMsg("Bad read CRC on setting") else: if not self.serialCmdPwdAuth(password): self.writeCmdMsg("Password failure") else: req_str = "" req_str = ("01573102303038" + binascii.hexlify(str(relay)).zfill(2) + "28" + binascii.hexlify(str(status)).zfill(2) + binascii.hexlify(str(seconds).zfill(4)) + "2903") req_str += self.calc_crc16(req_str[2:].decode("hex")) self.m_serial_port.write(req_str.decode("hex")) if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06": self.writeCmdMsg("Success: 06 returned.") result = True self.serialPostEnd() except: ekm_log(traceback.format_exc(sys.exc_info())) self.setContext("") return result
python
{ "resource": "" }
q276013
V4Meter.serialPostEnd
test
def serialPostEnd(self): """ Send termination string to implicit current meter.""" ekm_log("Termination string sent (" + self.m_context + ")") try: self.m_serial_port.write("0142300375".decode("hex")) except: ekm_log(traceback.format_exc(sys.exc_info())) pass
python
{ "resource": "" }
q276014
V4Meter.setPulseInputRatio
test
def setPulseInputRatio(self, line_in, new_cnst, password="00000000"): """Serial call to set pulse input ratio on a line. Args: line_in (int): Member of :class:`~ekmmeters.Pulse` new_cnst (int): New pulse input ratio password (str): Optional password Returns: """ result = False self.setContext("setPulseInputRatio") try: if not self.requestA(): self.writeCmdMsg("Bad read CRC on setting") else: if not self.serialCmdPwdAuth(password): self.writeCmdMsg("Password failure") else: req_const = binascii.hexlify(str(new_cnst).zfill(4)) line_const = binascii.hexlify(str(line_in - 1)) req_str = "01573102303041" + line_const + "28" + req_const + "2903" req_str += self.calc_crc16(req_str[2:].decode("hex")) self.m_serial_port.write(req_str.decode("hex")) if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06": self.writeCmdMsg("Success: 06 returned.") result = True self.serialPostEnd() except: ekm_log(traceback.format_exc(sys.exc_info())) self.setContext("") return result
python
{ "resource": "" }
q276015
V4Meter.setZeroResettableKWH
test
def setZeroResettableKWH(self, password="00000000"): """ Serial call to zero resettable kWh registers. Args: password (str): Optional password. Returns: bool: True on completion and ACK. """ result = False self.setContext("setZeroResettableKWH") try: if not self.requestA(): self.writeCmdMsg("Bad read CRC on setting") else: if not self.serialCmdPwdAuth(password): self.writeCmdMsg("Password failure") else: req_str = "0157310230304433282903" req_str += self.calc_crc16(req_str[2:].decode("hex")) self.m_serial_port.write(req_str.decode("hex")) if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06": self.writeCmdMsg("Success: 06 returned.") result = True self.serialPostEnd() except: ekm_log(traceback.format_exc(sys.exc_info())) self.setContext("") return result
python
{ "resource": "" }
q276016
V4Meter.setLCD
test
def setLCD(self, password="00000000"): """ Serial call to set LCD using meter object bufer. Used with :func:`~ekmmeters.V4Meter.addLcdItem`. Args: password (str): Optional password Returns: bool: True on completion and ACK. """ result = False self.setContext("setLCD") try: self.clearCmdMsg() if len(password) != 8: self.writeCmdMsg("Invalid password length.") self.setContext("") return result if not self.request(): self.writeCmdMsg("Bad read CRC on setting") else: if not self.serialCmdPwdAuth(password): self.writeCmdMsg("Password failure") else: req_table = "" fill_len = 40 - len(self.m_lcd_items) for lcdid in self.m_lcd_items: append_val = binascii.hexlify(str(lcdid).zfill(2)) req_table += append_val for i in range(0, fill_len): append_val = binascii.hexlify(str(0).zfill(2)) req_table += append_val req_str = "015731023030443228" + req_table + "2903" req_str += self.calc_crc16(req_str[2:].decode("hex")) self.m_serial_port.write(req_str.decode("hex")) if self.m_serial_port.getResponse(self.getContext()).encode("hex") == "06": self.writeCmdMsg("Success: 06 returned.") result = True self.serialPostEnd() except: ekm_log(traceback.format_exc(sys.exc_info())) self.setContext("") return result
python
{ "resource": "" }
q276017
iterate_fields
test
def iterate_fields(fields, schema): """Recursively iterate over all DictField sub-fields. :param fields: Field instance (e.g. input) :type fields: dict :param schema: Schema instance (e.g. input_schema) :type schema: dict """ schema_dict = {val['name']: val for val in schema} for field_id, properties in fields.iteritems(): if 'group' in schema_dict[field_id]: for _field_schema, _fields in iterate_fields(properties, schema_dict[field_id]['group']): yield (_field_schema, _fields) else: yield (schema_dict[field_id], fields)
python
{ "resource": "" }
q276018
iterate_schema
test
def iterate_schema(fields, schema, path=None): """Recursively iterate over all schema sub-fields. :param fields: Field instance (e.g. input) :type fields: dict :param schema: Schema instance (e.g. input_schema) :type schema: dict :path schema: Field path :path schema: string """ for field_schema in schema: name = field_schema['name'] if 'group' in field_schema: for rvals in iterate_schema(fields[name] if name in fields else {}, field_schema['group'], None if path is None else '{}.{}'.format(path, name)): yield rvals else: if path is None: yield (field_schema, fields) else: yield (field_schema, fields, '{}.{}'.format(path, name))
python
{ "resource": "" }
q276019
paragraphs
test
def paragraphs(quantity=2, separator='\n\n', wrap_start='', wrap_end='', html=False, sentences_quantity=3, as_list=False): """Random paragraphs.""" if html: wrap_start = '<p>' wrap_end = '</p>' separator = '\n\n' result = [] for i in xrange(0, quantity): result.append(wrap_start + sentences(sentences_quantity) + wrap_end) if as_list: return result else: return separator.join(result)
python
{ "resource": "" }
q276020
text
test
def text(length=None, at_least=10, at_most=15, lowercase=True, uppercase=True, digits=True, spaces=True, punctuation=False): """ Random text. If `length` is present the text will be exactly this chars long. Else the text will be something between `at_least` and `at_most` chars long. """ base_string = '' if lowercase: base_string += string.ascii_lowercase if uppercase: base_string += string.ascii_uppercase if digits: base_string += string.digits if spaces: base_string += ' ' if punctuation: base_string += string.punctuation if len(base_string) == 0: return '' if not length: length = random.randint(at_least, at_most) result = '' for i in xrange(0, length): result += random.choice(base_string) return result
python
{ "resource": "" }
q276021
FormatterMixin.statistics
test
def statistics(self, elapsed, result): """ Return output for the combined time and result summary statistics. """ return "\n".join((self.timing(elapsed), self.result_summary(result)))
python
{ "resource": "" }
q276022
Colored.color
test
def color(self, color, text): """ Color some text in the given ANSI color. """ return "{escape}{text}{reset}".format( escape=self.ANSI[color], text=text, reset=self.ANSI["reset"], )
python
{ "resource": "" }
q276023
DotsFormatter.show
test
def show(self, text): """ Write the text to the stream and flush immediately. """ self.stream.write(text) self.stream.flush()
python
{ "resource": "" }
q276024
DotsFormatter.result_summary
test
def result_summary(self, result): """ Return a summary of the results. """ return "{} examples, {} errors, {} failures\n".format( result.testsRun, len(result.errors), len(result.failures), )
python
{ "resource": "" }
q276025
parse
test
def parse(argv=None): """ Parse some arguments using the parser. """ if argv is None: argv = sys.argv[1:] # Evade http://bugs.python.org/issue9253 if not argv or argv[0] not in {"run", "transform"}: argv = ["run"] + argv arguments = _clean(_parser.parse_args(argv)) return arguments
python
{ "resource": "" }
q276026
setup
test
def setup(config): """ Setup the environment for an example run. """ formatter = config.Formatter() if config.verbose: formatter = result.Verbose(formatter) if config.color: formatter = result.Colored(formatter) current_result = result.ExampleResult(formatter) ivoire.current_result = ivoire._manager.result = current_result
python
{ "resource": "" }
q276027
run
test
def run(config): """ Time to run. """ setup(config) if config.exitfirst: ivoire.current_result.failfast = True ivoire.current_result.startTestRun() for spec in config.specs: try: load_by_name(spec) except Exception: ivoire.current_result.addError( _ExampleNotRunning(), sys.exc_info() ) ivoire.current_result.stopTestRun() sys.exit(not ivoire.current_result.wasSuccessful())
python
{ "resource": "" }
q276028
transform
test
def transform(config): """ Run in transform mode. """ if transform_possible: ExampleLoader.register() args, sys.argv[1:] = sys.argv[1:], config.args try: return runpy.run_path(config.runner, run_name="__main__") finally: sys.argv[1:] = args
python
{ "resource": "" }
q276029
ExampleTransformer.transform_describe
test
def transform_describe(self, node, describes, context_variable): """ Transform a describe node into a ``TestCase``. ``node`` is the node object. ``describes`` is the name of the object being described. ``context_variable`` is the name bound in the context manager (usually "it"). """ body = self.transform_describe_body(node.body, context_variable) return ast.ClassDef( name="Test" + describes.title(), bases=[ast.Name(id="TestCase", ctx=ast.Load())], keywords=[], starargs=None, kwargs=None, body=list(body), decorator_list=[], )
python
{ "resource": "" }
q276030
ExampleTransformer.transform_describe_body
test
def transform_describe_body(self, body, group_var): """ Transform the body of an ``ExampleGroup``. ``body`` is the body. ``group_var`` is the name bound to the example group in the context manager (usually "it"). """ for node in body: withitem, = node.items context_expr = withitem.context_expr name = context_expr.args[0].s context_var = withitem.optional_vars.id yield self.transform_example(node, name, context_var, group_var)
python
{ "resource": "" }
q276031
ExampleTransformer.transform_example
test
def transform_example(self, node, name, context_variable, group_variable): """ Transform an example node into a test method. Returns the unchanged node if it wasn't an ``Example``. ``node`` is the node object. ``name`` is the name of the example being described. ``context_variable`` is the name bound in the context manager (usually "test"). ``group_variable`` is the name bound in the surrounding example group's context manager (usually "it"). """ test_name = "_".join(["test", group_variable] + name.split()) body = self.transform_example_body(node.body, context_variable) return ast.FunctionDef( name=test_name, args=self.takes_only_self(), body=list(body), decorator_list=[], )
python
{ "resource": "" }
q276032
ExampleTransformer.transform_example_body
test
def transform_example_body(self, body, context_variable): """ Transform the body of an ``Example`` into the body of a method. Replaces instances of ``context_variable`` to refer to ``self``. ``body`` is the body. ``context_variable`` is the name bound in the surrounding context manager to the example (usually "test"). """ for node in body: for child in ast.walk(node): if isinstance(child, ast.Name): if child.id == context_variable: child.id = "self" yield node
python
{ "resource": "" }
q276033
ExampleTransformer.takes_only_self
test
def takes_only_self(self): """ Return an argument list node that takes only ``self``. """ return ast.arguments( args=[ast.arg(arg="self")], defaults=[], kw_defaults=[], kwonlyargs=[], )
python
{ "resource": "" }
q276034
ExampleLoader.register
test
def register(cls): """ Register the path hook. """ cls._finder = FileFinder.path_hook((cls, [cls.suffix])) sys.path_hooks.append(cls._finder)
python
{ "resource": "" }
q276035
ExampleLoader.source_to_code
test
def source_to_code(self, source_bytes, source_path): """ Transform the source code, then return the code object. """ node = ast.parse(source_bytes) transformed = ExampleTransformer().transform(node) return compile(transformed, source_path, "exec", dont_inherit=True)
python
{ "resource": "" }
q276036
apply_argument_parser
test
def apply_argument_parser(argumentsParser, options=None): """ Apply the argument parser. """ if options is not None: args = argumentsParser.parse_args(options) else: args = argumentsParser.parse_args() return args
python
{ "resource": "" }
q276037
load_by_name
test
def load_by_name(name): """ Load a spec from either a file path or a fully qualified name. """ if os.path.exists(name): load_from_path(name) else: __import__(name)
python
{ "resource": "" }
q276038
load_from_path
test
def load_from_path(path): """ Load a spec from a given path, discovering specs if a directory is given. """ if os.path.isdir(path): paths = discover(path) else: paths = [path] for path in paths: name = os.path.basename(os.path.splitext(path)[0]) imp.load_source(name, path)
python
{ "resource": "" }
q276039
discover
test
def discover(path, filter_specs=filter_specs): """ Discover all of the specs recursively inside ``path``. Successively yields the (full) relative paths to each spec. """ for dirpath, _, filenames in os.walk(path): for spec in filter_specs(filenames): yield os.path.join(dirpath, spec)
python
{ "resource": "" }
q276040
checker
test
def checker(location, receiver): """Construct a function that checks a directory for process configuration The function checks for additions or removals of JSON process configuration files and calls the appropriate receiver methods. :param location: string, the directory to monitor :param receiver: IEventReceiver :returns: a function with no parameters """ path = filepath.FilePath(location) files = set() filesContents = {} def _check(path): currentFiles = set(fname for fname in os.listdir(location) if not fname.endswith('.new')) removed = files - currentFiles added = currentFiles - files for fname in added: contents = path.child(fname).getContent() filesContents[fname] = contents receiver.add(fname, contents) for fname in removed: receiver.remove(fname) same = currentFiles & files for fname in same: newContents = path.child(fname).getContent() oldContents = filesContents[fname] if newContents == oldContents: continue receiver.remove(fname) filesContents[fname] = newContents receiver.add(fname, newContents) files.clear() files.update(currentFiles) return functools.partial(_check, path)
python
{ "resource": "" }
q276041
messages
test
def messages(location, receiver): """Construct a function that checks a directory for messages The function checks for new messages and calls the appropriate method on the receiver. Sent messages are deleted. :param location: string, the directory to monitor :param receiver: IEventReceiver :returns: a function with no parameters """ path = filepath.FilePath(location) def _check(path): messageFiles = path.globChildren('*') for message in messageFiles: if message.basename().endswith('.new'): continue receiver.message(message.getContent()) message.remove() return functools.partial(_check, path)
python
{ "resource": "" }
q276042
add
test
def add(places, name, cmd, args, env=None, uid=None, gid=None, extras=None, env_inherit=None): """Add a process. :param places: a Places instance :param name: string, the logical name of the process :param cmd: string, executable :param args: list of strings, command-line arguments :param env: dictionary mapping strings to strings (will be environment in subprocess) :param uid: integer, uid to run the new process as :param gid: integer, gid to run the new process as :param extras: a dictionary with additional parameters :param env_inherit: a list of environment variables to inherit :returns: None """ args = [cmd]+args config = filepath.FilePath(places.config) fle = config.child(name) details = dict(args=args) if env is not None: newEnv = {} for thing in env: name, value = thing.split('=', 1) newEnv[name] = value details['env'] = newEnv if uid is not None: details['uid'] = uid if gid is not None: details['gid'] = gid if env_inherit is not None: details['env_inherit'] = env_inherit if extras is not None: details.update(extras) content = _dumps(details) fle.setContent(content)
python
{ "resource": "" }
q276043
remove
test
def remove(places, name): """Remove a process :params places: a Places instance :params name: string, the logical name of the process :returns: None """ config = filepath.FilePath(places.config) fle = config.child(name) fle.remove()
python
{ "resource": "" }
q276044
restart
test
def restart(places, name): """Restart a process :params places: a Places instance :params name: string, the logical name of the process :returns: None """ content = _dumps(dict(type='RESTART', name=name)) _addMessage(places, content)
python
{ "resource": "" }
q276045
call
test
def call(results): """Call results.func on the attributes of results :params result: dictionary-like object :returns: None """ results = vars(results) places = Places(config=results.pop('config'), messages=results.pop('messages')) func = results.pop('func') func(places, **results)
python
{ "resource": "" }
q276046
get
test
def get(config, messages, freq, pidDir=None, reactor=None): """Return a service which monitors processes based on directory contents Construct and return a service that, when started, will run processes based on the contents of the 'config' directory, restarting them if file contents change and stopping them if the file is removed. It also listens for restart and restart-all messages on the 'messages' directory. :param config: string, location of configuration directory :param messages: string, location of messages directory :param freq: number, frequency to check for new messages and configuration updates :param pidDir: {twisted.python.filepath.FilePath} or None, location to keep pid files :param reactor: something implementing the interfaces {twisted.internet.interfaces.IReactorTime} and {twisted.internet.interfaces.IReactorProcess} and :returns: service, {twisted.application.interfaces.IService} """ ret = taservice.MultiService() args = () if reactor is not None: args = reactor, procmon = procmonlib.ProcessMonitor(*args) if pidDir is not None: protocols = TransportDirectoryDict(pidDir) procmon.protocols = protocols procmon.setName('procmon') receiver = process_events.Receiver(procmon) confcheck = directory_monitor.checker(config, receiver) confserv = internet.TimerService(freq, confcheck) confserv.setServiceParent(ret) messagecheck = directory_monitor.messages(messages, receiver) messageserv = internet.TimerService(freq, messagecheck) messageserv.setServiceParent(ret) procmon.setServiceParent(ret) return ret
python
{ "resource": "" }
q276047
makeService
test
def makeService(opt): """Return a service based on parsed command-line options :param opt: dict-like object. Relevant keys are config, messages, pid, frequency, threshold, killtime, minrestartdelay and maxrestartdelay :returns: service, {twisted.application.interfaces.IService} """ ret = get(config=opt['config'], messages=opt['messages'], pidDir=opt['pid'], freq=opt['frequency']) pm = ret.getServiceNamed("procmon") pm.threshold = opt["threshold"] pm.killTime = opt["killtime"] pm.minRestartDelay = opt["minrestartdelay"] pm.maxRestartDelay = opt["maxrestartdelay"] return ret
python
{ "resource": "" }
q276048
Nodelist.refresh_session
test
def refresh_session(self, node_id=None): """ Adds or refreshes a particular node in the nodelist, attributing the current time with the node_id. :param string node_id: optional, the connection id of the node whose session should be refreshed """ if not node_id: node_id = self.conn.id self.conn.client.hset(self.nodelist_key, node_id, int(time.time() * 1000.))
python
{ "resource": "" }
q276049
Nodelist.remove_expired_nodes
test
def remove_expired_nodes(self, node_ids=None): """ Removes all expired nodes from the nodelist. If a set of node_ids is passed in, those ids are checked to ensure they haven't been refreshed prior to a lock being acquired. Should only be run with a lock. :param list node_ids: optional, a list of node_ids to remove. They will be verified to ensure they haven't been refreshed. """ nodes = self.find_expired_nodes(node_ids) if nodes: self.conn.client.hdel(self.nodelist_key, *nodes)
python
{ "resource": "" }
q276050
Nodelist.remove_node
test
def remove_node(self, node_id=None): """ Removes a particular node from the nodelist. :param string node_id: optional, the process id of the node to remove """ if not node_id: node_id = self.conn.id self.conn.client.hdel(self.nodelist_key, node_id)
python
{ "resource": "" }
q276051
Nodelist.get_last_updated
test
def get_last_updated(self, node_id=None): """ Returns the time a particular node has been last refreshed. :param string node_id: optional, the connection id of the node to retrieve :rtype: int :returns: Returns a unix timestamp if it exists, otherwise None """ if not node_id: node_id = self.conn.id dt = self.conn.client.hget(self.nodelist_key, node_id) return int(dt) if dt else None
python
{ "resource": "" }
q276052
Nodelist.get_all_nodes
test
def get_all_nodes(self): """ Returns all nodes in the hash with the time they were last refreshed as a dictionary. :rtype: dict(string, int) :returns: A dictionary of strings and corresponding timestamps """ nodes = self.conn.client.hgetall(self.nodelist_key) return {node_id: int(dt) for (node_id, dt) in nodes.items()}
python
{ "resource": "" }
q276053
Reference.refresh_session
test
def refresh_session(self): """ Update the session for this node. Specifically; lock on the reflist, then update the time this node acquired the reference. This method should only be called while the reference is locked. """ expired_nodes = self.nodelist.find_expired_nodes() if expired_nodes: self.nodelist.remove_expired_nodes(expired_nodes) self.nodelist.refresh_session()
python
{ "resource": "" }
q276054
Reference.increment_times_modified
test
def increment_times_modified(self): """ Increments the number of times this resource has been modified by all processes. """ rc = self.conn.client.incr(self.times_modified_key) self.conn.client.pexpire(self.times_modified_key, phonon.s_to_ms(TTL))
python
{ "resource": "" }
q276055
Reference.dereference
test
def dereference(self, callback=None, args=None, kwargs=None): """ This method should only be called while the reference is locked. Decrements the reference count for the resource. If this process holds the only reference at the time we finish dereferencing it; True is returned. Operating on the resource after it has been dereferenced is undefined behavior. Dereference queries the value stored in the backend, if any, iff (if and only if) this instance is the last reference to that resource. e.g. self.count() == 0 :param function callback: A function to execute iff it's determined this process holds the only reference to the resource. When there is a failure communicating with the backend in the cleanup step the callback function will be called an additional time for that failure and each subsequent one thereafter. Ensure your callback handles this properly. :param tuple args: Positional arguments to pass your callback. :param dict kwargs: keyword arguments to pass your callback. :returns: Whether or not there are no more references among all processes. True if this was the last reference. False otherwise. :rtype: bool """ if args is None: args = tuple() if kwargs is None: kwargs = {} client = self.conn.client should_execute = False if self.force_expiry: should_execute = True if not should_execute: self.nodelist.remove_node(self.conn.id) self.nodelist.remove_expired_nodes() updated_refcount = client.incr(self.refcount_key, -1) should_execute = (updated_refcount <= 0) # When we force expiry this will be -1 try: if callable(callback) and should_execute: callback(*args, **kwargs) finally: if should_execute: client.delete(self.resource_key, self.nodelist.nodelist_key, self.times_modified_key, self.refcount_key) self.conn.remove_from_registry(self.resource_key) return should_execute
python
{ "resource": "" }
q276056
delimit
test
def delimit(values, delimiter=', '): "Returns a list of tokens interleaved with the delimiter." toks = [] if not values: return toks if not isinstance(delimiter, (list, tuple)): delimiter = [delimiter] last = len(values) - 1 for i, value in enumerate(values): toks.append(value) if i < last: toks.extend(delimiter) return toks
python
{ "resource": "" }
q276057
check
test
def check(path, start, now): """check which processes need to be restarted :params path: a twisted.python.filepath.FilePath with configurations :params start: when the checker started running :params now: current time :returns: list of strings """ return [child.basename() for child in path.children() if _isbad(child, start, now)]
python
{ "resource": "" }
q276058
Status.merge
test
def merge(self, status: 'Status[Input, Output]') -> 'Status[Input, Output]': """Merge the failure message from another status into this one. Whichever status represents parsing that has gone the farthest is retained. If both statuses have gone the same distance, then the expected values from both are retained. Args: status: The status to merge into this one. Returns: This ``Status`` which may have ``farthest`` and ``expected`` updated accordingly. """ if status is None or status.farthest is None: # No new message; simply return unchanged pass elif self.farthest is None: # No current message to compare to; use the message from status self.farthest = status.farthest self.expected = status.expected elif status.farthest.position < self.farthest.position: # New message is not farther; keep current message pass elif status.farthest.position > self.farthest.position: # New message is farther than current message; replace with new message self.farthest = status.farthest self.expected = status.expected else: # New message and current message are equally far; merge messages self.expected = status.expected + self.expected return self
python
{ "resource": "" }
q276059
exists
test
def exists(value): "Query to test if a value exists." if not isinstance(value, Token): raise TypeError('value must be a token') if not hasattr(value, 'identifier'): raise TypeError('value must support an identifier') if not value.identifier: value = value.__class__(**value.__dict__) value.identifier = 'v' ident = Identifier(value.identifier) return Query([ OptionalMatch(value), Return(Predicate(ident, 'IS NOT NULL')), Limit(1), ])
python
{ "resource": "" }
q276060
get
test
def get(value): "Query to get the value." if not isinstance(value, Token): raise TypeError('value must be a token') if not hasattr(value, 'identifier'): raise TypeError('value must support an identifier') if not value.identifier: value = value.__class__(**value.__dict__) value.identifier = 'v' ident = Identifier(value.identifier) return Query([ Match(value), Return(ident) ])
python
{ "resource": "" }
q276061
constant
test
def constant(x: A) -> Callable[..., A]: """Produce a function that always returns a supplied value. Args: x: Any object. Returns: A function that accepts any number of positional and keyword arguments, discards them, and returns ``x``. """ def constanted(*args, **kwargs): return x return constanted
python
{ "resource": "" }
q276062
splat
test
def splat(f: Callable[..., A]) -> Callable[[Iterable], A]: """Convert a function taking multiple arguments into a function taking a single iterable argument. Args: f: Any function Returns: A function that accepts a single iterable argument. Each element of this iterable argument is passed as an argument to ``f``. Example: $ def f(a, b, c): $ return a + b + c $ $ f(1, 2, 3) # 6 $ g = splat(f) $ g([1, 2, 3]) # 6 """ def splatted(args): return f(*args) return splatted
python
{ "resource": "" }
q276063
unsplat
test
def unsplat(f: Callable[[Iterable], A]) -> Callable[..., A]: """Convert a function taking a single iterable argument into a function taking multiple arguments. Args: f: Any function taking a single iterable argument Returns: A function that accepts multiple arguments. Each argument of this function is passed as an element of an iterable to ``f``. Example: $ def f(a): $ return a[0] + a[1] + a[2] $ $ f([1, 2, 3]) # 6 $ g = unsplat(f) $ g(1, 2, 3) # 6 """ def unsplatted(*args): return f(args) return unsplatted
python
{ "resource": "" }
q276064
runProcess
test
def runProcess(args, timeout, grace, reactor): """Run a process, return a deferred that fires when it is done :params args: Process arguments :params timeout: Time before terminating process :params grace: Time before killing process after terminating it :params reactor: IReactorProcess and IReactorTime :returns: deferred that fires with success when the process ends, or fails if there was a problem spawning/terminating the process """ deferred = defer.Deferred() protocol = ProcessProtocol(deferred) process = reactor.spawnProcess(protocol, args[0], args, env=os.environ) def _logEnded(err): err.trap(tierror.ProcessDone, tierror.ProcessTerminated) print(err.value) deferred.addErrback(_logEnded) def _cancelTermination(dummy): for termination in terminations: if termination.active(): termination.cancel() deferred.addCallback(_cancelTermination) terminations = [] terminations.append(reactor.callLater(timeout, process.signalProcess, "TERM")) terminations.append(reactor.callLater(timeout+grace, process.signalProcess, "KILL")) return deferred
python
{ "resource": "" }
q276065
makeService
test
def makeService(opts): """Make scheduler service :params opts: dict-like object. keys: frequency, args, timeout, grace """ ser = tainternet.TimerService(opts['frequency'], runProcess, opts['args'], opts['timeout'], opts['grace'], tireactor) ret = service.MultiService() ser.setName('scheduler') ser.setServiceParent(ret) heart.maybeAddHeart(ret) return ret
python
{ "resource": "" }
q276066
completely_parse_reader
test
def completely_parse_reader(parser: Parser[Input, Output], reader: Reader[Input]) -> Result[Output]: """Consume reader and return Success only on complete consumption. This is a helper function for ``parse`` methods, which return ``Success`` when the input is completely consumed and ``Failure`` with an appropriate message otherwise. Args: parser: The parser doing the consuming reader: The input being consumed Returns: A parsing ``Result`` """ result = (parser << eof).consume(reader) if isinstance(result, Continue): return Success(result.value) else: used = set() unique_expected = [] for expected_lambda in result.expected: expected = expected_lambda() if expected not in used: used.add(expected) unique_expected.append(expected) return Failure(result.farthest.expected_error(' or '.join(unique_expected)))
python
{ "resource": "" }
q276067
lit
test
def lit(literal: Sequence[Input], *literals: Sequence[Sequence[Input]]) -> Parser: """Match a literal sequence. In the `TextParsers`` context, this matches the literal string provided. In the ``GeneralParsers`` context, this matches a sequence of input. If multiple literals are provided, they are treated as alternatives. e.g. ``lit('+', '-')`` is the same as ``lit('+') | lit('-')``. Args: literal: A literal to match *literals: Alternative literals to match Returns: A ``LiteralParser`` in the ``GeneralContext``, a ``LiteralStringParser`` in the ``TextParsers`` context, and an ``AlternativeParser`` if multiple arguments are provided. """ if len(literals) > 0: return AlternativeParser(options.handle_literal(literal), *map(options.handle_literal, literals)) else: return options.handle_literal(literal)
python
{ "resource": "" }
q276068
opt
test
def opt(parser: Union[Parser, Sequence[Input]]) -> OptionalParser: """Optionally match a parser. An ``OptionalParser`` attempts to match ``parser``. If it succeeds, it returns a list of length one with the value returned by the parser as the only element. If it fails, it returns an empty list. Args: parser: Parser or literal """ if isinstance(parser, str): parser = lit(parser) return OptionalParser(parser)
python
{ "resource": "" }
q276069
rep1
test
def rep1(parser: Union[Parser, Sequence[Input]]) -> RepeatedOnceParser: """Match a parser one or more times repeatedly. This matches ``parser`` multiple times in a row. If it matches as least once, it returns a list of values from each time ``parser`` matched. If it does not match ``parser`` at all, it fails. Args: parser: Parser or literal """ if isinstance(parser, str): parser = lit(parser) return RepeatedOnceParser(parser)
python
{ "resource": "" }
q276070
rep
test
def rep(parser: Union[Parser, Sequence[Input]]) -> RepeatedParser: """Match a parser zero or more times repeatedly. This matches ``parser`` multiple times in a row. A list is returned containing the value from each match. If there are no matches, an empty list is returned. Args: parser: Parser or literal """ if isinstance(parser, str): parser = lit(parser) return RepeatedParser(parser)
python
{ "resource": "" }
q276071
rep1sep
test
def rep1sep(parser: Union[Parser, Sequence[Input]], separator: Union[Parser, Sequence[Input]]) \ -> RepeatedOnceSeparatedParser: """Match a parser one or more times separated by another parser. This matches repeated sequences of ``parser`` separated by ``separator``. If there is at least one match, a list containing the values of the ``parser`` matches is returned. The values from ``separator`` are discarded. If it does not match ``parser`` at all, it fails. Args: parser: Parser or literal separator: Parser or literal """ if isinstance(parser, str): parser = lit(parser) if isinstance(separator, str): separator = lit(separator) return RepeatedOnceSeparatedParser(parser, separator)
python
{ "resource": "" }
q276072
repsep
test
def repsep(parser: Union[Parser, Sequence[Input]], separator: Union[Parser, Sequence[Input]]) \ -> RepeatedSeparatedParser: """Match a parser zero or more times separated by another parser. This matches repeated sequences of ``parser`` separated by ``separator``. A list is returned containing the value from each match of ``parser``. The values from ``separator`` are discarded. If there are no matches, an empty list is returned. Args: parser: Parser or literal separator: Parser or literal """ if isinstance(parser, str): parser = lit(parser) if isinstance(separator, str): separator = lit(separator) return RepeatedSeparatedParser(parser, separator)
python
{ "resource": "" }
q276073
check
test
def check(settings, states, location): """Check all processes""" children = {child.basename(): child for child in location.children()} last = set(states) current = set(children) gone = last - current added = current - last for name in gone: states[name].close() del states[name] for name in added: states[name] = State(location=children[name], settings=settings) return [name for name, state in six.iteritems(states) if state.check()]
python
{ "resource": "" }
q276074
State.close
test
def close(self): """Discard data and cancel all calls. Instance cannot be reused after closing. """ if self.closed: raise ValueError("Cannot close a closed state") if self.call is not None: self.call.cancel() self.closed = True
python
{ "resource": "" }
q276075
State.check
test
def check(self): """Check the state of HTTP""" if self.closed: raise ValueError("Cannot check a closed state") self._maybeReset() if self.url is None: return False return self._maybeCheck()
python
{ "resource": "" }
q276076
maybeAddHeart
test
def maybeAddHeart(master): """Add a heart to a service collection Add a heart to a service.IServiceCollector if the heart is not None. :params master: a service.IServiceCollector """ heartSer = makeService() if heartSer is None: return heartSer.setName('heart') heartSer.setServiceParent(master)
python
{ "resource": "" }
q276077
wrapHeart
test
def wrapHeart(service): """Wrap a service in a MultiService with a heart""" master = taservice.MultiService() service.setServiceParent(master) maybeAddHeart(master) return master
python
{ "resource": "" }
q276078
freeze_from_checkpoint
test
def freeze_from_checkpoint(input_checkpoint, output_file_path, output_node_names): """Freeze and shrink the graph based on a checkpoint and the output node names.""" check_input_checkpoint(input_checkpoint) output_node_names = output_node_names_string_as_list(output_node_names) with tf.Session() as sess: restore_from_checkpoint(sess, input_checkpoint) freeze_graph.freeze_graph_with_def_protos(input_graph_def=sess.graph_def, input_saver_def=None, input_checkpoint=input_checkpoint, output_node_names=','.join(output_node_names), restore_op_name='save/restore_all', filename_tensor_name='save/Const:0', output_graph=output_file_path, clear_devices=True, initializer_nodes='')
python
{ "resource": "" }
q276079
freeze
test
def freeze(sess, output_file_path, output_node_names): """Freeze and shrink the graph based on a session and the output node names.""" with TemporaryDirectory() as temp_dir_name: checkpoint_path = os.path.join(temp_dir_name, 'model.ckpt') tf.train.Saver().save(sess, checkpoint_path) freeze_from_checkpoint(checkpoint_path, output_file_path, output_node_names)
python
{ "resource": "" }
q276080
save_graph_only
test
def save_graph_only(sess, output_file_path, output_node_names, as_text=False): """Save a small version of the graph based on a session and the output node names.""" for node in sess.graph_def.node: node.device = '' graph_def = graph_util.extract_sub_graph(sess.graph_def, output_node_names) output_dir, output_filename = os.path.split(output_file_path) graph_io.write_graph(graph_def, output_dir, output_filename, as_text=as_text)
python
{ "resource": "" }
q276081
save_graph_only_from_checkpoint
test
def save_graph_only_from_checkpoint(input_checkpoint, output_file_path, output_node_names, as_text=False): """Save a small version of the graph based on a checkpoint and the output node names.""" check_input_checkpoint(input_checkpoint) output_node_names = output_node_names_string_as_list(output_node_names) with tf.Session() as sess: restore_from_checkpoint(sess, input_checkpoint) save_graph_only(sess, output_file_path, output_node_names, as_text=as_text)
python
{ "resource": "" }
q276082
save_weights_from_checkpoint
test
def save_weights_from_checkpoint(input_checkpoint, output_path, conv_var_names=None, conv_transpose_var_names=None): """Save the weights of the trainable variables given a checkpoint, each one in a different file in output_path.""" check_input_checkpoint(input_checkpoint) with tf.Session() as sess: restore_from_checkpoint(sess, input_checkpoint) save_weights(sess, output_path, conv_var_names=conv_var_names, conv_transpose_var_names=conv_transpose_var_names)
python
{ "resource": "" }
q276083
restore_from_checkpoint
test
def restore_from_checkpoint(sess, input_checkpoint): """Return a TensorFlow saver from a checkpoint containing the metagraph.""" saver = tf.train.import_meta_graph('{}.meta'.format(input_checkpoint)) saver.restore(sess, input_checkpoint) return saver
python
{ "resource": "" }
q276084
BaseNode.parse
test
def parse(cls, parser, token): """ Parse the tag, instantiate the class. :type parser: django.template.base.Parser :type token: django.template.base.Token """ tag_name, args, kwargs = parse_token_kwargs( parser, token, allowed_kwargs=cls.allowed_kwargs, compile_args=cls.compile_args, compile_kwargs=cls.compile_kwargs ) cls.validate_args(tag_name, *args, **kwargs) if cls.end_tag_name: kwargs['nodelist'] = parser.parse((cls.end_tag_name,)) parser.delete_first_token() return cls(tag_name, *args, **kwargs)
python
{ "resource": "" }
q276085
BaseNode.render_tag
test
def render_tag(self, context, *tag_args, **tag_kwargs): """ Render the tag, with all arguments resolved to their actual values. """ raise NotImplementedError("{0}.render_tag() is not implemented!".format(self.__class__.__name__))
python
{ "resource": "" }
q276086
BaseNode.validate_args
test
def validate_args(cls, tag_name, *args, **kwargs): """ Validate the syntax of the template tag. """ if cls.min_args is not None and len(args) < cls.min_args: if cls.min_args == 1: raise TemplateSyntaxError("'{0}' tag requires at least {1} argument".format(tag_name, cls.min_args)) else: raise TemplateSyntaxError("'{0}' tag requires at least {1} arguments".format(tag_name, cls.min_args)) if cls.max_args is not None and len(args) > cls.max_args: if cls.max_args == 0: if cls.allowed_kwargs: raise TemplateSyntaxError("'{0}' tag only allows keywords arguments, for example {1}=\"...\".".format(tag_name, cls.allowed_kwargs[0])) else: raise TemplateSyntaxError("'{0}' tag doesn't support any arguments".format(tag_name)) elif cls.max_args == 1: raise TemplateSyntaxError("'{0}' tag only allows {1} argument.".format(tag_name, cls.max_args)) else: raise TemplateSyntaxError("'{0}' tag only allows {1} arguments.".format(tag_name, cls.max_args))
python
{ "resource": "" }
q276087
BaseInclusionNode.get_context_data
test
def get_context_data(self, parent_context, *tag_args, **tag_kwargs): """ Return the context data for the included template. """ raise NotImplementedError("{0}.get_context_data() is not implemented.".format(self.__class__.__name__))
python
{ "resource": "" }
q276088
BaseAssignmentOrInclusionNode.parse
test
def parse(cls, parser, token): """ Parse the "as var" syntax. """ bits, as_var = parse_as_var(parser, token) tag_name, args, kwargs = parse_token_kwargs(parser, bits, ('template',) + cls.allowed_kwargs, compile_args=cls.compile_args, compile_kwargs=cls.compile_kwargs) # Pass through standard chain cls.validate_args(tag_name, *args) return cls(tag_name, as_var, *args, **kwargs)
python
{ "resource": "" }
q276089
BaseAssignmentOrInclusionNode.get_context_data
test
def get_context_data(self, parent_context, *tag_args, **tag_kwargs): """ Return the context data for the inclusion tag. Returns ``{'value': self.get_value(parent_context, *tag_args, **tag_kwargs)}`` by default. """ if 'template' not in self.allowed_kwargs: # The overwritten get_value() doesn't have to take care of our customly inserted tag parameters, # It can safely assume passing **tag_kwargs to another function. tag_kwargs.pop('template', None) return { self.context_value_name: self.get_value(parent_context, *tag_args, **tag_kwargs) }
python
{ "resource": "" }
q276090
caffe_to_tensorflow_session
test
def caffe_to_tensorflow_session(caffe_def_path, caffemodel_path, inputs, graph_name='Graph', conversion_out_dir_path=None, use_padding_same=False): """Create a TensorFlow Session from a Caffe model.""" try: # noinspection PyUnresolvedReferences from caffeflow import convert except ImportError: raise Exception("caffeflow package needs to be installed to freeze Caffe models. Check out the README file.") with (dummy_context_mgr(conversion_out_dir_path) or util.TemporaryDirectory()) as dir_path: params_values_output_path = os.path.join(dir_path, 'params_values.npy') network_output_path = os.path.join(dir_path, 'network.py') convert.convert(caffe_def_path, caffemodel_path, params_values_output_path, network_output_path, False, use_padding_same=use_padding_same) network_module = imp.load_source('module.name', network_output_path) network_class = getattr(network_module, graph_name) network = network_class(inputs) sess = tf.Session() network.load(params_values_output_path, sess) return sess
python
{ "resource": "" }
q276091
freeze
test
def freeze(caffe_def_path, caffemodel_path, inputs, output_file_path, output_node_names, graph_name='Graph', conversion_out_dir_path=None, checkpoint_out_path=None, use_padding_same=False): """Freeze and shrink the graph based on a Caffe model, the input tensors and the output node names.""" with caffe_to_tensorflow_session(caffe_def_path, caffemodel_path, inputs, graph_name=graph_name, conversion_out_dir_path=conversion_out_dir_path, use_padding_same=use_padding_same) as sess: saver = tf.train.Saver() with (dummy_context_mgr(checkpoint_out_path) or util.TemporaryDirectory()) as temp_dir_path: checkpoint_path = checkpoint_out_path or os.path.join(temp_dir_path, 'pose.ckpt') saver.save(sess, checkpoint_path) output_node_names = util.output_node_names_string_as_list(output_node_names) tf_freeze.freeze_from_checkpoint(checkpoint_path, output_file_path, output_node_names)
python
{ "resource": "" }
q276092
save_graph_only
test
def save_graph_only(caffe_def_path, caffemodel_path, inputs, output_file_path, output_node_names, graph_name='Graph', use_padding_same=False): """Save a small version of the graph based on a Caffe model, the input tensors and the output node names.""" with caffe_to_tensorflow_session(caffe_def_path, caffemodel_path, inputs, graph_name=graph_name, use_padding_same=use_padding_same) as sess: tf_freeze.save_graph_only(sess, output_file_path, output_node_names)
python
{ "resource": "" }
q276093
make_rows
test
def make_rows(num_columns, seq): """ Make a sequence into rows of num_columns columns. >>> tuple(make_rows(2, [1, 2, 3, 4, 5])) ((1, 4), (2, 5), (3, None)) >>> tuple(make_rows(3, [1, 2, 3, 4, 5])) ((1, 3, 5), (2, 4, None)) """ # calculate the minimum number of rows necessary to fit the list in # num_columns Columns num_rows, partial = divmod(len(seq), num_columns) if partial: num_rows += 1 # break the seq into num_columns of length num_rows try: result = more_itertools.grouper(seq, num_rows) except TypeError: # more_itertools before 6.x result = more_itertools.grouper(num_rows, seq) # result is now a list of columns... transpose it to return a list # of rows return zip(*result)
python
{ "resource": "" }
q276094
grouper_nofill_str
test
def grouper_nofill_str(n, iterable): """ Take a sequence and break it up into chunks of the specified size. The last chunk may be smaller than size. This works very similar to grouper_nofill, except it works with strings as well. >>> tuple(grouper_nofill_str(3, 'foobarbaz')) ('foo', 'bar', 'baz') You can still use it on non-strings too if you like. >>> tuple(grouper_nofill_str(42, [])) () >>> tuple(grouper_nofill_str(3, list(range(10)))) ([0, 1, 2], [3, 4, 5], [6, 7, 8], [9]) """ res = more_itertools.chunked(iterable, n) if isinstance(iterable, six.string_types): res = (''.join(item) for item in res) return res
python
{ "resource": "" }
q276095
every_other
test
def every_other(iterable): """ Yield every other item from the iterable >>> ' '.join(every_other('abcdefg')) 'a c e g' """ items = iter(iterable) while True: try: yield next(items) next(items) except StopIteration: return
python
{ "resource": "" }
q276096
remove_duplicates
test
def remove_duplicates(iterable, key=None): """ Given an iterable with items that may come in as sequential duplicates, remove those duplicates. Unlike unique_justseen, this function does not remove triplicates. >>> ' '.join(remove_duplicates('abcaabbccaaabbbcccbcbc')) 'a b c a b c a a b b c c b c b c' >>> ' '.join(remove_duplicates('aaaabbbbb')) 'a a b b b' """ return itertools.chain.from_iterable(six.moves.map( every_other, six.moves.map( operator.itemgetter(1), itertools.groupby(iterable, key) )))
python
{ "resource": "" }
q276097
peek
test
def peek(iterable): """ Get the next value from an iterable, but also return an iterable that will subsequently return that value and the rest of the original iterable. >>> l = iter([1,2,3]) >>> val, l = peek(l) >>> val 1 >>> list(l) [1, 2, 3] """ peeker, original = itertools.tee(iterable) return next(peeker), original
python
{ "resource": "" }
q276098
takewhile_peek
test
def takewhile_peek(predicate, iterable): """ Like takewhile, but takes a peekable iterable and doesn't consume the non-matching item. >>> items = Peekable(range(10)) >>> is_small = lambda n: n < 4 >>> small_items = takewhile_peek(is_small, items) >>> list(small_items) [0, 1, 2, 3] >>> list(items) [4, 5, 6, 7, 8, 9] >>> empty = takewhile_peek(is_small, Peekable([])) >>> list(empty) [] >>> items = Peekable([3]) >>> small_items = takewhile_peek(is_small, items) >>> list(small_items) [3] >>> list(items) [] >>> items = Peekable([4]) >>> small_items = takewhile_peek(is_small, items) >>> list(small_items) [] >>> list(items) [4] """ while True: try: if not predicate(iterable.peek()): break yield next(iterable) except StopIteration: break
python
{ "resource": "" }
q276099
partition_items
test
def partition_items(count, bin_size): """ Given the total number of items, determine the number of items that can be added to each bin with a limit on the bin size. So if you want to partition 11 items into groups of 3, you'll want three of three and one of two. >>> partition_items(11, 3) [3, 3, 3, 2] But if you only have ten items, you'll have two groups of three and two of two. >>> partition_items(10, 3) [3, 3, 2, 2] """ num_bins = int(math.ceil(count / float(bin_size))) bins = [0] * num_bins for i in range(count): bins[i % num_bins] += 1 return bins
python
{ "resource": "" }