function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def __init__(self, fitID, itemIDs): wx.Command.__init__(self, True, 'Remove Cargos') self.internalHistory = InternalCommandHistory() self.fitID = fitID self.itemIDs = itemIDs
DarkFenX/Pyfa
[ 1401, 374, 1401, 265, 1370894453 ]
def cloudfare(url,quality,nyaa_c): web = BrowseUrl(url,quality,nyaa_c)
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def __init__(self,tmp): self.hdr = 'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:45.0) Gecko/20100101 Firefox/45.0' self.tmp_dir = tmp self.cookie_file = os.path.join(tmp,'nyaa.txt') if not os.path.exists(self.cookie_file): f = open(self.cookie_file,'w') f.close()
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def getOptions(self): criteria = ['Date','Seeders','Leechers','Downloads','History','LocalStreaming'] return criteria
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def ccurlN(self,url): content = ccurl(url+'#-b#'+self.cookie_file) #print(content) if 'checking_browser' in content: if os.path.exists(self.cookie_file): os.remove(self.cookie_file) cloudfare(url,'',self.cookie_file) content = ccurl(url+'#-b#'+self.cookie_file) return content
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def process_page(self,url): content = self.ccurlN(url) soup = BeautifulSoup(content,'lxml') #print(soup.prettify()) unit_element = soup.findAll('td',{'colspan':'2'}) #print(unit_element[0]) s = [] for i in unit_element: try: element = i.findAll('a') for index in element: et = index['href'] if '#comment' not in et: elem = index j = elem['title'] try: k = elem['href'].split('/')[-1] except: k = 'Download Not Available' break td = i.findNext('td', {'class':'text-center'}) sz = td.findNext('td', {'class':'text-center'}) dt = sz.findNext('td', {'class':'text-center'}) se = dt.findNext('td', {'class':'text-center'}) le = se.findNext('td', {'class':'text-center'}) down = le.findNext('td', {'class':'text-center'}) try: tmp = j.replace('_',' ')+' id='+k+'|Size='+sz.text+'|Seeds='+se.text+'|Leechers='+le.text+'|Total Downloads='+down.text except: tmp = 'Not Available' print(tmp) s.append(tmp) except Exception as e: print(e,'--98---')
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def search(self,name): strname = str(name) print(strname) url = "https://nyaa.si/?f=0&c=1_2&s=seeders&o=desc&q="+str(strname) m = self.process_page(url) return m
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def getCompleteList(self,opt,genre_num,ui,tmp_dir,hist_folder): global tmp_working_dir instr = "Press . or > for next page -1" tmp_working_dir = tmp_dir if opt == 'Date': url = 'https://nyaa.si/?c=1_2' elif opt == 'Seeders': url = 'https://nyaa.si/?c=1_2&s=seeders&o=desc' elif opt == 'Leechers': url = 'https://nyaa.si/?c=1_2&s=leechers&o=desc' elif opt == 'Downloads': url = 'https://nyaa.si/?c=1_2&s=downloads&o=desc' print(opt,url) m = self.process_page(url) m.append(instr) return m
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def getEpnList(self,name,opt,depth_list,extra_info,siteName,category): if extra_info == '-1': arr = [] return (arr,'Instructions','No.jpg',False,depth_list) else: print(extra_info) name_id = (re.search('id=[^|]*',extra_info).group()).split('=')[1] url = "https://nyaa.si/download/" + name_id + '.torrent' print(url) summary = ""
abhishek-archlinux/AnimeWatch
[ 22, 1, 22, 2, 1457414237 ]
def _load_dynamic_env_data(bld): bldnode = bld.bldnode.make_node('modules/ChibiOS') tmp_str = bldnode.find_node('include_dirs').read() tmp_str = tmp_str.replace(';\n','') tmp_str = tmp_str.replace('-I','') #remove existing -I flags # split, coping with separator idirs = re.split('; ', tmp_str) # create unique list, coping with relative paths idirs2 = [] for d in idirs: if d.startswith('../'): # relative paths from the make build are relative to BUILDROOT d = os.path.join(bld.env.BUILDROOT, d) d = os.path.normpath(d) if not d in idirs2: idirs2.append(d) _dynamic_env_data['include_dirs'] = idirs2
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def ch_dynamic_env(self): # The generated files from configuration possibly don't exist if it's just # a list command (TODO: figure out a better way to address that). if self.bld.cmd == 'list': return if not _dynamic_env_data: _load_dynamic_env_data(self.bld) self.use += ' ch' self.env.append_value('INCLUDES', _dynamic_env_data['include_dirs'])
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def run(self): upload_tools = self.env.get_flat('UPLOAD_TOOLS') src = self.inputs[0] return self.exec_command("python '{}/px_uploader.py' '{}'".format(upload_tools, src))
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def keyword(self): return "Uploading"
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def keyword(self): return "apj_tool"
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def keyword(self): return "Generating"
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def keyword(self): return "apj_gen"
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def keyword(self): return "Generating"
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def keyword(self): return "Generating"
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def chibios_firmware(self): self.link_task.always_run = True link_output = self.link_task.outputs[0] bin_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.bin').name) apj_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.apj').name) generate_bin_task = self.create_task('generate_bin', src=link_output, tgt=bin_target) generate_bin_task.set_run_after(self.link_task) generate_apj_task = self.create_task('generate_apj', src=bin_target, tgt=apj_target) generate_apj_task.set_run_after(generate_bin_task) if self.env.BUILD_ABIN: abin_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.abin').name) abin_task = self.create_task('build_abin', src=link_output, tgt=abin_target) abin_task.set_run_after(generate_apj_task) bootloader_bin = self.bld.srcnode.make_node("Tools/bootloaders/%s_bl.bin" % self.env.BOARD) if os.path.exists(bootloader_bin.abspath()) and self.bld.env.HAVE_INTEL_HEX: hex_target = self.bld.bldnode.find_or_declare('bin/' + link_output.change_ext('.hex').name) hex_task = self.create_task('build_intel_hex', src=[bin_target, bootloader_bin], tgt=hex_target) hex_task.set_run_after(generate_bin_task) if self.env.DEFAULT_PARAMETERS: default_params_task = self.create_task('set_default_parameters', src=link_output) default_params_task.set_run_after(self.link_task) generate_bin_task.set_run_after(default_params_task)
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def setup_can_build(cfg): '''enable CAN build. By doing this here we can auto-enable CAN in the build based on the presence of CAN pins in hwdef.dat''' env = cfg.env env.AP_LIBRARIES += [ 'AP_UAVCAN', 'modules/uavcan/libuavcan/src/**/*.cpp', 'modules/uavcan/libuavcan_drivers/stm32/driver/src/*.cpp' ] env.CFLAGS += ['-DUAVCAN_STM32_CHIBIOS=1', '-DUAVCAN_STM32_NUM_IFACES=2'] env.CXXFLAGS += [ '-Wno-error=cast-align', '-DUAVCAN_STM32_CHIBIOS=1', '-DUAVCAN_STM32_NUM_IFACES=2' ] env.DEFINES += [ 'UAVCAN_CPP_VERSION=UAVCAN_CPP03', 'UAVCAN_NO_ASSERTIONS=1', 'UAVCAN_NULLPTR=nullptr' ] env.INCLUDES += [ cfg.srcnode.find_dir('modules/uavcan/libuavcan/include').abspath(), cfg.srcnode.find_dir('modules/uavcan/libuavcan_drivers/stm32/driver/include').abspath() ] cfg.get_board().with_uavcan = True
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def configure(cfg): cfg.find_program('make', var='MAKE') #cfg.objcopy = cfg.find_program('%s-%s'%(cfg.env.TOOLCHAIN,'objcopy'), var='OBJCOPY', mandatory=True) cfg.find_program('arm-none-eabi-objcopy', var='OBJCOPY') env = cfg.env bldnode = cfg.bldnode.make_node(cfg.variant) def srcpath(path): return cfg.srcnode.make_node(path).abspath() def bldpath(path): return bldnode.make_node(path).abspath() env.AP_PROGRAM_FEATURES += ['ch_ap_program'] kw = env.AP_LIBRARIES_OBJECTS_KW kw['features'] = Utils.to_list(kw.get('features', [])) + ['ch_ap_library'] env.CH_ROOT = srcpath('modules/ChibiOS') env.AP_HAL_ROOT = srcpath('libraries/AP_HAL_ChibiOS') env.BUILDDIR = bldpath('modules/ChibiOS') env.BUILDROOT = bldpath('') env.SRCROOT = srcpath('') env.PT_DIR = srcpath('Tools/ardupilotwaf/chibios/image') env.UPLOAD_TOOLS = srcpath('Tools/ardupilotwaf') env.CHIBIOS_SCRIPTS = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts') env.TOOLS_SCRIPTS = srcpath('Tools/scripts') env.APJ_TOOL = srcpath('Tools/scripts/apj_tool.py') env.SERIAL_PORT = srcpath('/dev/serial/by-id/*_STLink*') # relative paths to pass to make, relative to directory that make is run from env.CH_ROOT_REL = os.path.relpath(env.CH_ROOT, env.BUILDROOT) env.AP_HAL_REL = os.path.relpath(env.AP_HAL_ROOT, env.BUILDROOT) env.BUILDDIR_REL = os.path.relpath(env.BUILDDIR, env.BUILDROOT) mk_custom = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/chibios_board.mk' % env.BOARD) mk_common = srcpath('libraries/AP_HAL_ChibiOS/hwdef/common/chibios_board.mk') # see if there is a board specific make file if os.path.exists(mk_custom): env.BOARD_MK = mk_custom else: env.BOARD_MK = mk_common if cfg.options.default_parameters: cfg.msg('Default parameters', cfg.options.default_parameters, color='YELLOW') env.DEFAULT_PARAMETERS = srcpath(cfg.options.default_parameters) # we need to run chibios_hwdef.py at configure stage to generate the ldscript.ld # that is needed by the remaining configure checks import subprocess if env.BOOTLOADER: env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef-bl.dat' % env.BOARD) env.BOOTLOADER_OPTION="--bootloader" else: env.HWDEF = srcpath('libraries/AP_HAL_ChibiOS/hwdef/%s/hwdef.dat' % env.BOARD) env.BOOTLOADER_OPTION="" hwdef_script = srcpath('libraries/AP_HAL_ChibiOS/hwdef/scripts/chibios_hwdef.py') hwdef_out = env.BUILDROOT if not os.path.exists(hwdef_out): os.mkdir(hwdef_out) try: cmd = "python '{0}' -D '{1}' '{2}' {3}".format(hwdef_script, hwdef_out, env.HWDEF, env.BOOTLOADER_OPTION) ret = subprocess.call(cmd, shell=True) except Exception: cfg.fatal("Failed to process hwdef.dat") if ret != 0: cfg.fatal("Failed to process hwdef.dat ret=%d" % ret) load_env_vars(cfg.env) if env.HAL_WITH_UAVCAN: setup_can_build(cfg)
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def build(bld): bld( # build hwdef.h from hwdef.dat. This is needed after a waf clean source=bld.path.ant_glob(bld.env.HWDEF), rule="python '${AP_HAL_ROOT}/hwdef/scripts/chibios_hwdef.py' -D '${BUILDROOT}' '%s' %s" % (bld.env.HWDEF, bld.env.BOOTLOADER_OPTION), group='dynamic_sources', target=[bld.bldnode.find_or_declare('hwdef.h'), bld.bldnode.find_or_declare('ldscript.ld')] )
yonahbox/ardupilot
[ 4, 2, 4, 1, 1485155198 ]
def proxyConfig(host, port): r = requests.get("http://{0}:{1}/config".format(host, port)) return r.json()
FaradayRF/Faraday-Software
[ 43, 19, 43, 53, 1473224429 ]
def initializeFaradayConfig(): ''' Initialize Faraday radio configuration file from faraday_config.sample.ini :return: None, exits program ''' faradayHelper.initializeConfig(faradayTruthFile, faradayFile) sys.exit(0)
FaradayRF/Faraday-Software
[ 43, 19, 43, 53, 1473224429 ]
def displayConfig(faradayConfigPath): ''' Prints out the Faraday Configuration file :param faradayConfigPath: path to faraday configuration file :return: None ''' with open(faradayConfigPath, 'r') as configFile: print configFile.read() sys.exit(0)
FaradayRF/Faraday-Software
[ 43, 19, 43, 53, 1473224429 ]
def configureDeviceConfiguration(args, faradayConfigPath): ''' Configure device configuration configuration file from command line :param args: argparse arguments :return: None ''' config = ConfigParser.RawConfigParser() config.read(deviceConfigPath) fconfig = ConfigParser.RawConfigParser() fconfig.read(faradayConfigPath) # Obtain proxy configuration # TODO: Not hardcode proxyConfiguration = proxyConfig("127.0.0.1", 8000) #Only works for UNIT0 at this time config.set('DEVICES', 'CALLSIGN', proxyConfiguration["UNIT0"].get("callsign")) config.set('DEVICES', 'NODEID', proxyConfiguration["UNIT0"].get("nodeid")) # Faraday radio configuration if args.callsign is not None: fconfig.set('BASIC', 'CALLSIGN', args.callsign) if args.nodeid is not None: fconfig.set('BASIC', 'ID', args.nodeid) # Obtain configboot bitmask options if args.redledtxon: fconfig.set('BASIC', 'REDLEDTX', 1) if args.redledtxoff: fconfig.set('BASIC', 'REDLEDTX', 0) if args.greenledrxon: fconfig.set('BASIC', 'GREENLEDRX', 1) if args.greenledrxoff: fconfig.set('BASIC', 'GREENLEDRX', 0) if args.unitconfigured: fconfig.set('BASIC', 'UNITCONFIGURED', 1) if args.unitunconfigured: fconfig.set('BASIC', 'UNITCONFIGURED', 0) # Create configuration boot bitmask integer bootmask = [0] * 8 redledtx = fconfig.get('BASIC', 'REDLEDTX') greenledrx = fconfig.get('BASIC', 'GREENLEDRX') unitconfigured = fconfig.get('BASIC', 'UNITCONFIGURED') bootmask[5] = greenledrx bootmask[6] = redledtx bootmask[7] = unitconfigured configbootbitmask = eightBitListToInt(bootmask) fconfig.set('BASIC', 'CONFIGBOOTBITMASK', configbootbitmask) # Detect and set GPIO P3 settings, create bitmask if args.gpiop3on >= 0 and args.gpiop3on <= 7: if args.gpiop3on is not None: fconfig.set('BASIC', 'GPIO_P3_' + str(args.gpiop3on), 1) if args.gpiop3off >= 0 and args.gpiop3off <= 7: if args.gpiop3off is not None: fconfig.set('BASIC', 'GPIO_P3_' + str(args.gpiop3off), 0) gpiomask = [0] * 8 if not args.gpiop3clear: gpio0 = fconfig.get('BASIC', 'GPIO_P3_0') gpio1 = fconfig.get('BASIC', 'GPIO_P3_1') gpio2 = fconfig.get('BASIC', 'GPIO_P3_2') gpio3 = fconfig.get('BASIC', 'GPIO_P3_3') gpio4 = fconfig.get('BASIC', 'GPIO_P3_4') gpio5 = fconfig.get('BASIC', 'GPIO_P3_5') gpio6 = fconfig.get('BASIC', 'GPIO_P3_6') gpio7 = fconfig.get('BASIC', 'GPIO_P3_7') gpiomask = [gpio7, gpio6, gpio5, gpio4, gpio3, gpio2, gpio1, gpio0] if args.gpiop3clear: fconfig.set('BASIC', 'GPIO_P3_0', 0) fconfig.set('BASIC', 'GPIO_P3_1', 0) fconfig.set('BASIC', 'GPIO_P3_2', 0) fconfig.set('BASIC', 'GPIO_P3_3', 0) fconfig.set('BASIC', 'GPIO_P3_4', 0) fconfig.set('BASIC', 'GPIO_P3_5', 0) fconfig.set('BASIC', 'GPIO_P3_6', 0) fconfig.set('BASIC', 'GPIO_P3_7', 0) gpiop3bitmask = eightBitListToInt(gpiomask) fconfig.set('BASIC', 'GPIO_P3', gpiop3bitmask) # Detect and set GPIO P4 settings, create bitmask if args.gpiop4on >= 0 and args.gpiop4on <= 7: if args.gpiop4on is not None: fconfig.set('BASIC', 'GPIO_P4_' + str(args.gpiop4on), 1) if args.gpiop4off >= 0 and args.gpiop4off <= 7: if args.gpiop4off is not None: fconfig.set('BASIC', 'GPIO_P4_' + str(args.gpiop4off), 0) gpiomask = [0] * 8 if not args.gpiop4clear: gpio0 = fconfig.get('BASIC', 'GPIO_P4_0') gpio1 = fconfig.get('BASIC', 'GPIO_P4_1') gpio2 = fconfig.get('BASIC', 'GPIO_P4_2') gpio3 = fconfig.get('BASIC', 'GPIO_P4_3') gpio4 = fconfig.get('BASIC', 'GPIO_P4_4') gpio5 = fconfig.get('BASIC', 'GPIO_P4_5') gpio6 = fconfig.get('BASIC', 'GPIO_P4_6') gpio7 = fconfig.get('BASIC', 'GPIO_P4_7') gpiomask = [gpio7, gpio6, gpio5, gpio4, gpio3, gpio2, gpio1, gpio0] if args.gpiop4clear: fconfig.set('BASIC', 'GPIO_P4_0', 0) fconfig.set('BASIC', 'GPIO_P4_1', 0) fconfig.set('BASIC', 'GPIO_P4_2', 0) fconfig.set('BASIC', 'GPIO_P4_3', 0) fconfig.set('BASIC', 'GPIO_P4_4', 0) fconfig.set('BASIC', 'GPIO_P4_5', 0) fconfig.set('BASIC', 'GPIO_P4_6', 0) fconfig.set('BASIC', 'GPIO_P4_7', 0) gpiop4bitmask = eightBitListToInt(gpiomask) fconfig.set('BASIC', 'GPIO_P4', gpiop4bitmask) # Detect and set GPIO P5 settings, create bitmask if args.gpiop5on >= 0 and args.gpiop5on <= 7: if args.gpiop5on is not None: fconfig.set('BASIC', 'GPIO_P5_' + str(args.gpiop5on), 1) if args.gpiop5off >= 0 and args.gpiop5off <= 7: if args.gpiop5off is not None: fconfig.set('BASIC', 'GPIO_P5_' + str(args.gpiop5off), 0) gpiomask = [0] * 8 if not args.gpiop5clear: gpio0 = fconfig.get('BASIC', 'GPIO_P5_0') gpio1 = fconfig.get('BASIC', 'GPIO_P5_1') gpio2 = fconfig.get('BASIC', 'GPIO_P5_2') gpio3 = fconfig.get('BASIC', 'GPIO_P5_3') gpio4 = fconfig.get('BASIC', 'GPIO_P5_4') gpio5 = fconfig.get('BASIC', 'GPIO_P5_5') gpio6 = fconfig.get('BASIC', 'GPIO_P5_6') gpio7 = fconfig.get('BASIC', 'GPIO_P5_7') gpiomask = [gpio7, gpio6, gpio5, gpio4, gpio3, gpio2, gpio1, gpio0] if args.gpiop5clear: fconfig.set('BASIC', 'GPIO_P5_0', 0) fconfig.set('BASIC', 'GPIO_P5_1', 0) fconfig.set('BASIC', 'GPIO_P5_2', 0) fconfig.set('BASIC', 'GPIO_P5_3', 0) fconfig.set('BASIC', 'GPIO_P5_4', 0) fconfig.set('BASIC', 'GPIO_P5_5', 0) fconfig.set('BASIC', 'GPIO_P5_6', 0) fconfig.set('BASIC', 'GPIO_P5_7', 0) gpiop5bitmask = eightBitListToInt(gpiomask) fconfig.set('BASIC', 'GPIO_P5', gpiop5bitmask) if args.bootfrequency is not None: fconfig.set('RF', 'boot_frequency_mhz', args.bootfrequency) if args.bootrfpower is not None: fconfig.set('RF', 'boot_rf_power', args.bootrfpower) if args.latitude is not None: fconfig.set('GPS', 'default_latitude', args.latitude) if args.longitude is not None: fconfig.set('GPS', 'default_longitude', args.longitude) if args.latitudedir is not None: fconfig.set('GPS', 'default_latitude_direction', args.latitudedir) if args.longitudedir is not None: fconfig.set('GPS', 'default_longitude_direction', args.longitudedir) if args.altitude is not None: fconfig.set('GPS', 'default_altitude', args.altitude) if args.gpsbooton: fconfig.set('GPS', 'gps_boot_bit', 1) if args.gpsbootoff: fconfig.set('GPS', 'gps_boot_bit', 0) if args.gpsenabled: fconfig.set('GPS', 'gps_present_bit', 1) if args.gpsdisabled: fconfig.set('GPS', 'gps_present_bit', 0) if args.uarttelemetryenabled: fconfig.set('TELEMETRY', 'uart_telemetry_boot_bit', 1) if args.uarttelemetrydisabled: fconfig.set('TELEMETRY', 'uart_telemetry_boot_bit', 0) if args.rftelemetryenabled: fconfig.set('TELEMETRY', 'rf_telemetry_boot_bit', 1) if args.rftelemetrydisabled: fconfig.set('TELEMETRY', 'rf_telemetry_boot_bit', 0) if args.uartinterval is not None and args.uartinterval > 0: fconfig.set('TELEMETRY', 'telemetry_default_uart_interval', args.uartinterval) if args.rfinterval is not None and args.rfinterval > 0: fconfig.set('TELEMETRY', 'telemetry_default_rf_interval', args.rfinterval) # Save device configuration with open(deviceConfigPath, 'wb') as configfile: config.write(configfile) # Save Faraday configuration with open(faradayConfigPath, 'wb') as configfile: fconfig.write(configfile)
FaradayRF/Faraday-Software
[ 43, 19, 43, 53, 1473224429 ]
def unitconfig(): """ This function is called when the RESTful API GET or POST call is made to the '/' of the operating port. Querying a GET will command the local and queried unit's device configuration in Flash memory and return the information as a JSON dictionary. Issuing a POST will cause the local .INI file configuration to be loaded into the respective units Flash memory device configuration. """ if request.method == "POST": try: print "test POST" # Obtain URL parameters (for local unit device callsign/ID assignment) callsign = request.args.get("callsign", "%") nodeid = request.args.get("nodeid", "%") # Obtain configuration values config = ConfigParser.RawConfigParser() config.read(deviceConfigPath) hostname = config.get("PROXY", "HOST") # Read Faraday device configuration file # Read configuration file faradayConfig = ConfigParser.RawConfigParser() faradayConfig.read(faradayConfigPath) # Create dictionaries of each config section device_basic_dict = dict() device_basic_dict['CONFIGBOOTBITMASK'] = faradayConfig.get("BASIC", 'CONFIGBOOTBITMASK') device_basic_dict['CALLSIGN'] = faradayConfig.get("BASIC", 'CALLSIGN') device_basic_dict['ID'] = faradayConfig.get("BASIC", 'ID') device_basic_dict['GPIO_P3'] = faradayConfig.get("BASIC", 'GPIO_P3') device_basic_dict['GPIO_P4'] = faradayConfig.get("BASIC", 'GPIO_P4') device_basic_dict['GPIO_P5'] = faradayConfig.get("BASIC", 'GPIO_P5') device_rf_dict = dict() device_rf_dict['BOOT_FREQUENCY_MHZ'] = faradayConfig.get("RF", 'BOOT_FREQUENCY_MHZ') device_rf_dict['BOOT_RF_POWER'] = faradayConfig.get("RF", 'BOOT_RF_POWER') device_gps_dict = dict() device_gps_dict['DEFAULT_LATITUDE'] = faradayConfig.get("GPS", 'DEFAULT_LATITUDE') device_gps_dict['DEFAULT_LATITUDE_DIRECTION'] = faradayConfig.get("GPS", 'DEFAULT_LATITUDE_DIRECTION') device_gps_dict['DEFAULT_LONGITUDE'] = faradayConfig.get("GPS", 'DEFAULT_LONGITUDE') device_gps_dict['DEFAULT_LONGITUDE_DIRECTION'] = faradayConfig.get("GPS", 'DEFAULT_LONGITUDE_DIRECTION') device_gps_dict['DEFAULT_ALTITUDE'] = faradayConfig.get("GPS", 'DEFAULT_ALTITUDE') device_gps_dict['DEFAULT_ALTITUDE_UNITS'] = faradayConfig.get("GPS", 'DEFAULT_ALTITUDE_UNITS') device_gps_dict['GPS_BOOT_BIT'] = faradayConfig.get("GPS", 'GPS_BOOT_BIT') device_gps_dict['GPS_PRESENT_BIT'] = faradayConfig.get("GPS", 'GPS_PRESENT_BIT') device_telemetry_dict = dict() device_telemetry_dict['UART_TELEMETRY_BOOT_BIT'] = faradayConfig.get("TELEMETRY", 'UART_TELEMETRY_BOOT_BIT') device_telemetry_dict['RF_TELEMETRY_BOOT_BIT'] = faradayConfig.get("TELEMETRY", 'RF_TELEMETRY_BOOT_BIT') device_telemetry_dict['TELEMETRY_DEFAULT_UART_INTERVAL'] = faradayConfig.get("TELEMETRY", 'TELEMETRY_DEFAULT_UART_INTERVAL') device_telemetry_dict['TELEMETRY_DEFAULT_RF_INTERVAL'] = faradayConfig.get("TELEMETRY", 'TELEMETRY_DEFAULT_RF_INTERVAL') # Create device configuration module object to use for programming packet creation device_config_object = deviceconfig.DeviceConfigClass() # Update the device configuration object with the fields obtained from the INI configuration files loaded config_bitmask = device_config_object.create_bitmask_configuration(int(device_basic_dict['CONFIGBOOTBITMASK'])) status_basic = device_config_object.update_basic( config_bitmask, str(device_basic_dict['CALLSIGN']), int(device_basic_dict['ID']), int(device_basic_dict['GPIO_P3']), int(device_basic_dict['GPIO_P4']), int(device_basic_dict['GPIO_P5'])) status_rf = device_config_object.update_rf( float(device_rf_dict['BOOT_FREQUENCY_MHZ']), int(device_rf_dict['BOOT_RF_POWER'])) status_gps = device_config_object.update_gps( device_config_object.update_bitmask_gps_boot(int(device_gps_dict['GPS_PRESENT_BIT']), int(device_gps_dict['GPS_BOOT_BIT'])), device_gps_dict['DEFAULT_LATITUDE'], device_gps_dict['DEFAULT_LATITUDE_DIRECTION'], device_gps_dict['DEFAULT_LONGITUDE'], device_gps_dict['DEFAULT_LONGITUDE_DIRECTION'], device_gps_dict['DEFAULT_ALTITUDE'], device_gps_dict['DEFAULT_ALTITUDE_UNITS']) status_telem = device_config_object.update_telemetry(device_config_object.update_bitmask_telemetry_boot( int(device_telemetry_dict['RF_TELEMETRY_BOOT_BIT']), int(device_telemetry_dict['UART_TELEMETRY_BOOT_BIT'])), int(device_telemetry_dict['TELEMETRY_DEFAULT_UART_INTERVAL']), int(device_telemetry_dict['TELEMETRY_DEFAULT_RF_INTERVAL'])) if (status_basic and status_gps and status_rf and status_telem): # Create the raw device configuration packet to send to unit device_config_packet = device_config_object.create_config_packet() # Transmit device configuration to local unit as supplied by the function arguments proxy.POST(hostname, str(callsign), int(nodeid), UART_PORT_APP_COMMAND, faradayCmd.CommandLocal(faradayCmd.CMD_DEVICECONFIG, device_config_packet)) return '', 204 # nothing to return but successful transmission else: logger.error('Failed to create configuration packet!') return 'Failed to create configuration packet!', 400 except ValueError as e: logger.error("ValueError: " + str(e)) return json.dumps({"error": str(e)}), 400 except IndexError as e: logger.error("IndexError: " + str(e)) return json.dumps({"error": str(e)}), 400 except KeyError as e: logger.error("KeyError: " + str(e)) return json.dumps({"error": str(e)}), 400 else: # If a GET command """ Provides a RESTful interface to device-configuration at URL '/' """ try: # Obtain URL parameters callsign = request.args.get("callsign", "%") nodeid = request.args.get("nodeid", "%") # Obtain configuration values config = ConfigParser.RawConfigParser() config.read(deviceConfigPath) hostname = config.get("PROXY", "HOST") callsign = str(callsign).upper() nodeid = str(nodeid) # Flush all old data from recieve buffer of local unit proxy.FlushRxPort(callsign, nodeid, proxy.CMD_UART_PORT) proxy.POST(hostname, str(callsign), int(nodeid), UART_PORT_APP_COMMAND, faradayCmd.CommandLocalSendReadDeviceConfig()) # Wait enough time for Faraday to respond to commanded memory read. time.sleep(2) try: # Retrieve the next device configuration read packet to arrive data = proxy.GETWait(hostname, str(callsign), str(nodeid), proxy.CMD_UART_PORT, 2) # Create device configuration module object device_config_object = deviceconfig.DeviceConfigClass() # Decode BASE64 JSON data packet into data = proxy.DecodeRawPacket(data[0]["data"]) # Get first item data = device_config_object.extract_config_packet(data) # Parse device configuration into dictionary parsed_config_dict = device_config_object.parse_config_packet(data) # Encoded dictionary data for save network transit pickled_parsed_config_dict = json.dumps(parsed_config_dict) pickled_parsed_config_dict_b64 = base64.b64encode(pickled_parsed_config_dict) except ValueError as e: print e except IndexError as e: print e except KeyError as e: print e except StandardError as e: print e except ValueError as e: logger.error("ValueError: " + str(e)) return json.dumps({"error": str(e)}), 400 except IndexError as e: logger.error("IndexError: " + str(e)) return json.dumps({"error": str(e)}), 400 except KeyError as e: logger.error("KeyError: " + str(e)) return json.dumps({"error": str(e)}), 400 return json.dumps({"data": pickled_parsed_config_dict_b64}, indent=1), 200, \ {'Content-Type': 'application/json'}
FaradayRF/Faraday-Software
[ 43, 19, 43, 53, 1473224429 ]
def read(relpath): """ Return string containing the contents of the file at *relpath* relative to this file. """ cwd = os.path.dirname(__file__) abspath = os.path.join(cwd,os.path.normpath(relpath)) with open(abspath) as f: return f.read()
libcrack/iker
[ 18, 5, 18, 1, 1436373960 ]
def setUp(self): super().setUp() self.student = UserFactory.create() self.request = RequestFactory().request() self.request.session = {} self.request.user = self.student MessageMiddleware().process_request(self.request)
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def test_message_escaping(self, message, expected_message_html): """ Verifies that a user message is escaped correctly. """ PageLevelMessages.register_user_message(self.request, UserMessageType.INFO, message) messages = list(PageLevelMessages.user_messages(self.request)) assert len(messages) == 1 assert messages[0].message_html == expected_message_html
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def test_message_icon(self, message_type, expected_css_class, expected_icon_class): """ Verifies that a user message returns the correct CSS and icon classes. """ PageLevelMessages.register_user_message(self.request, message_type, TEST_MESSAGE) messages = list(PageLevelMessages.user_messages(self.request)) assert len(messages) == 1 assert messages[0].css_class == expected_css_class assert messages[0].icon_class == expected_icon_class
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def test_message_type(self, register_message_function, expected_message_type): """ Verifies that each user message function returns the correct type. """ register_message_function(self.request, TEST_MESSAGE) messages = list(PageLevelMessages.user_messages(self.request)) assert len(messages) == 1 assert messages[0].type == expected_message_type
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def test_global_message_off_by_default(self): """Verifies feature toggle.""" with self.settings( GLOBAL_NOTICE_ENABLED=False, GLOBAL_NOTICE_MESSAGE="I <3 HTML-escaping", GLOBAL_NOTICE_TYPE='WARNING' ): # Missing when feature disabled assert self.global_message_count() == 0
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def test_global_message_error_isolation(self): """Verifies that any setting errors don't break the page, or other messages.""" with self.settings( GLOBAL_NOTICE_ENABLED=True, GLOBAL_NOTICE_MESSAGE=ThrowingMarkup(), # force an error GLOBAL_NOTICE_TYPE='invalid' ): PageLevelMessages.register_user_message(self.request, UserMessageType.WARNING, "something else") # Doesn't throw, or even interfere with other messages, # when given invalid settings with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') messages = list(PageLevelMessages.user_messages(self.request)) assert len(w) == 1 assert str(w[0].message) == "Could not register global notice: Exception('Some random error')" assert len(messages) == 1 assert "something else" in messages[0].message_html
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def awakeFromNib(self): self.color_index = alphaIndex
albertz/music-player
[ 483, 61, 483, 16, 1345772141 ]
def setClearColor_(self, sender): self.color_index = sender.tag() self.setNeedsDisplay_(True)
albertz/music-player
[ 483, 61, 483, 16, 1345772141 ]
def testConstants(self): self.assertEqual(NSXMLNodeOptionsNone, 0) self.assertEqual(NSXMLNodeIsCDATA, 1 << 0) self.assertEqual(NSXMLNodeExpandEmptyElement, 1 << 1) self.assertEqual(NSXMLNodeCompactEmptyElement, 1 << 2) self.assertEqual(NSXMLNodeUseSingleQuotes, 1 << 3) self.assertEqual(NSXMLNodeUseDoubleQuotes, 1 << 4) self.assertEqual(NSXMLDocumentTidyHTML, 1 << 9) self.assertEqual(NSXMLDocumentTidyXML, 1 << 10) self.assertEqual(NSXMLDocumentValidate, 1 << 13) self.assertEqual(NSXMLNodeLoadExternalEntitiesAlways, 1 << 14) self.assertEqual(NSXMLNodeLoadExternalEntitiesSameOriginOnly, 1 << 15) self.assertEqual(NSXMLNodeLoadExternalEntitiesNever, 1 << 19) self.assertEqual(NSXMLDocumentXInclude, 1 << 16) self.assertEqual(NSXMLNodePrettyPrint, 1 << 17) self.assertEqual(NSXMLDocumentIncludeContentTypeDeclaration, 1 << 18) self.assertEqual(NSXMLNodePreserveNamespaceOrder, 1 << 20) self.assertEqual(NSXMLNodePreserveAttributeOrder, 1 << 21) self.assertEqual(NSXMLNodePreserveEntities, 1 << 22) self.assertEqual(NSXMLNodePreservePrefixes, 1 << 23) self.assertEqual(NSXMLNodePreserveCDATA, 1 << 24) self.assertEqual(NSXMLNodePreserveWhitespace, 1 << 25) self.assertEqual(NSXMLNodePreserveDTD, 1 << 26) self.assertEqual(NSXMLNodePreserveCharacterReferences, 1 << 27) self.assertEqual(NSXMLNodePreserveEmptyElements, ( NSXMLNodeExpandEmptyElement | NSXMLNodeCompactEmptyElement)) self.assertEqual(NSXMLNodePreserveQuotes, (NSXMLNodeUseSingleQuotes | NSXMLNodeUseDoubleQuotes)) self.assertEqual(NSXMLNodePreserveAll & 0xFFFFFFFF, 0xFFFFFFFF & ( NSXMLNodePreserveNamespaceOrder | NSXMLNodePreserveAttributeOrder | NSXMLNodePreserveEntities | NSXMLNodePreservePrefixes | NSXMLNodePreserveCDATA | NSXMLNodePreserveEmptyElements | NSXMLNodePreserveQuotes | NSXMLNodePreserveWhitespace | NSXMLNodePreserveDTD | NSXMLNodePreserveCharacterReferences | 0xFFF00000))
albertz/music-player
[ 483, 61, 483, 16, 1345772141 ]
def SetExpectationImplementation(impl): global Expectation assert issubclass(impl, BaseExpectation) Expectation = impl
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def SetBuildStatsImplementation(impl): global BuildStats assert issubclass(impl, BaseBuildStats) BuildStats = impl
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __init__(self, test, tags, expected_results, bug=None): self.test = test self.tags = frozenset(tags) self.bug = bug or '' if isinstance(expected_results, str): self.expected_results = frozenset([expected_results]) else: self.expected_results = frozenset(expected_results) # We're going to be making a lot of comparisons, and fnmatch is *much* # slower (~40x from rough testing) than a straight comparison, so only use # it if necessary. if '*' in test: self._comp = self._CompareWildcard else: self._comp = self._CompareNonWildcard
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __ne__(self, other): return not self.__eq__(other)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def _CompareWildcard(self, result_test_name): return fnmatch.fnmatch(result_test_name, self.test)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def AppliesToResult(self, result): """Checks whether this expectation should have applied to |result|. An expectation applies to a result if the test names match (including wildcard expansion) and the expectation's tags are a subset of the result's tags. Args: result: A Result instance to check against. Returns: True if |self| applies to |result|, otherwise False. """ assert isinstance(result, BaseResult) return (self._comp(result.test) and self.tags <= result.tags)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __init__(self, test, tags, actual_result, step, build_id): """ Args: test: A string containing the name of the test. tags: An iterable containing the typ tags for the result. actual_result: The actual result of the test as a string. step: A string containing the name of the step on the builder. build_id: A string containing the Buildbucket ID for the build this result came from. """ self.test = test self.tags = frozenset(tags) self.actual_result = actual_result self.step = step self.build_id = build_id
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __ne__(self, other): return not self.__eq__(other)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __init__(self): self.passed_builds = 0 self.total_builds = 0 self.failure_links = frozenset()
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def failed_builds(self): return self.total_builds - self.passed_builds
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def did_fully_pass(self): return self.passed_builds == self.total_builds
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def did_never_pass(self): return self.failed_builds == self.total_builds
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def AddFailedBuild(self, build_id): self.total_builds += 1 build_link = BuildLinkFromBuildId(build_id) self.failure_links = frozenset([build_link]) | self.failure_links
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def NeverNeededExpectation(self, expectation): # pylint:disable=unused-argument """Returns whether the results tallied in |self| never needed |expectation|. Args: expectation: An Expectation object that |stats| is located under. Returns: True if all the results tallied in |self| would have passed without |expectation| being present. Otherwise, False. """ return self.did_fully_pass
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __eq__(self, other): return (isinstance(other, BuildStats) and self.passed_builds == other.passed_builds and self.total_builds == other.total_builds and self.failure_links == other.failure_links)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def BuildLinkFromBuildId(build_id): return 'http://ci.chromium.org/b/%s' % build_id
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __init__(self, *args, **kwargs): # pylint:disable=super-init-not-called self.update(*args, **kwargs)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def setdefault(self, key, value=None): if key not in self: self[key] = value return self[key]
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def IterToValueType(self, value_type): """Recursively iterates over contents until |value_type| is found. Used to get rid of nested loops, instead using a single loop that automatically iterates through all the contents at a certain depth. Args: value_type: The type to recurse to and then iterate over. For example, "BuilderStepMap" would result in iterating over the BuilderStepMap values, meaning that the returned generator would create tuples in the form (test_name, expectation, builder_map). Returns: A generator that yields tuples. The length and content of the tuples will vary depending on |value_type|. For example, using "BuilderStepMap" would result in tuples of the form (test_name, expectation, builder_map), while "BuildStats" would result in (test_name, expectation, builder_name, step_name, build_stats). """ if self._value_type() == value_type: for k, v in self.items(): yield k, v else: for k, v in self.items(): for nested_value in v.IterToValueType(value_type): yield (k, ) + nested_value
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __setitem__(self, key, value): assert IsStringType(key) assert isinstance(value, ExpectationBuilderMap) super(BaseTestExpectationMap, self).__setitem__(key, value)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def IterBuilderStepMaps(self): """Iterates over all BuilderStepMaps contained in the map. Returns: A generator yielding tuples in the form (expectation_file (str), expectation (Expectation), builder_map (BuilderStepMap)) """ return self.IterToValueType(BuilderStepMap)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def _AddGroupedResults(self, grouped_results, builder, expectation_files): """Adds all results in |grouped_results| to |self|. Args: grouped_results: A dict mapping test name (str) to a list of data_types.Result objects for that test. builder: A string containing the name of the builder |grouped_results| came from. expectation_files: An iterable of expectation file names that these results could possibly apply to. If None, then expectations from all known expectation files will be used. Returns: A set of data_types.Result objects that had at least one matching expectation. """ matched_results = set() for test_name, result_list in grouped_results.items(): for ef, expectation_map in self.items(): if expectation_files is not None and ef not in expectation_files: continue for expectation, builder_map in expectation_map.items(): if not expectation.MaybeAppliesToTest(test_name): continue for r in result_list: if expectation.AppliesToResult(r): matched_results.add(r) step_map = builder_map.setdefault(builder, StepBuildStatsMap()) stats = step_map.setdefault(r.step, BuildStats()) self._AddSingleResult(r, stats) return matched_results
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def SplitByStaleness(self): """Separates stored data based on expectation staleness. Returns: Three TestExpectationMaps (stale_dict, semi_stale_dict, active_dict). All three combined contain the information of |self|. |stale_dict| contains entries for expectations that are no longer being helpful, |semi_stale_dict| contains entries for expectations that might be removable or modifiable, but have at least one failed test run. |active_dict| contains entries for expectations that are preventing failures on all builders they're active on, and thus shouldn't be removed. """ stale_dict = TestExpectationMap() semi_stale_dict = TestExpectationMap() active_dict = TestExpectationMap() # This initially looks like a good target for using # TestExpectationMap's iterators since there are many nested loops. # However, we need to reset state in different loops, and the alternative of # keeping all the state outside the loop and resetting under certain # conditions ends up being less readable than just using nested loops. for expectation_file, expectation_map in self.items(): for expectation, builder_map in expectation_map.items(): # A temporary map to hold data so we can later determine whether an # expectation is stale, semi-stale, or active. tmp_map = { FULL_PASS: BuilderStepMap(), NEVER_PASS: BuilderStepMap(), PARTIAL_PASS: BuilderStepMap(), } split_stats_map = builder_map.SplitBuildStatsByPass(expectation) for builder_name, (fully_passed, never_passed, partially_passed) in split_stats_map.items(): if fully_passed: tmp_map[FULL_PASS][builder_name] = fully_passed if never_passed: tmp_map[NEVER_PASS][builder_name] = never_passed if partially_passed: tmp_map[PARTIAL_PASS][builder_name] = partially_passed def _CopyPassesIntoBuilderMap(builder_map, pass_types): for pt in pass_types: for builder, steps in tmp_map[pt].items(): builder_map.setdefault(builder, StepBuildStatsMap()).update(steps) # Handle the case of a stale expectation. if not (tmp_map[NEVER_PASS] or tmp_map[PARTIAL_PASS]): builder_map = stale_dict.setdefault( expectation_file, ExpectationBuilderMap()).setdefault(expectation, BuilderStepMap()) _CopyPassesIntoBuilderMap(builder_map, [FULL_PASS]) # Handle the case of an active expectation. elif not tmp_map[FULL_PASS]: builder_map = active_dict.setdefault( expectation_file, ExpectationBuilderMap()).setdefault(expectation, BuilderStepMap()) _CopyPassesIntoBuilderMap(builder_map, [NEVER_PASS, PARTIAL_PASS]) # Handle the case of a semi-stale expectation. else: # TODO(crbug.com/998329): Sort by pass percentage so it's easier to # find problematic builders without highlighting. builder_map = semi_stale_dict.setdefault( expectation_file, ExpectationBuilderMap()).setdefault(expectation, BuilderStepMap()) _CopyPassesIntoBuilderMap(builder_map, [FULL_PASS, PARTIAL_PASS, NEVER_PASS]) return stale_dict, semi_stale_dict, active_dict
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __setitem__(self, key, value): assert isinstance(key, BaseExpectation) assert isinstance(value, self._value_type()) super(ExpectationBuilderMap, self).__setitem__(key, value)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __setitem__(self, key, value): assert IsStringType(key) assert isinstance(value, self._value_type()) super(BuilderStepMap, self).__setitem__(key, value)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def SplitBuildStatsByPass(self, expectation): """Splits the underlying BuildStats data by passing-ness. Args: expectation: The Expectation that this BuilderStepMap is located under. Returns: A dict mapping builder name to a tuple (fully_passed, never_passed, partially_passed). Each *_passed is a StepBuildStatsMap containing data for the steps that either fully passed on all builds, never passed on any builds, or passed some of the time. """ retval = {} for builder_name, step_map in self.items(): fully_passed = StepBuildStatsMap() never_passed = StepBuildStatsMap() partially_passed = StepBuildStatsMap() for step_name, stats in step_map.items(): if stats.NeverNeededExpectation(expectation): assert step_name not in fully_passed fully_passed[step_name] = stats elif stats.AlwaysNeededExpectation(expectation): assert step_name not in never_passed never_passed[step_name] = stats else: assert step_name not in partially_passed partially_passed[step_name] = stats retval[builder_name] = (fully_passed, never_passed, partially_passed) return retval
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __setitem__(self, key, value): assert IsStringType(key) assert isinstance(value, self._value_type()) super(StepBuildStatsMap, self).__setitem__(key, value)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def IsStringType(s): return isinstance(s, six.string_types)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def do_latest(): print(_FILE_VERSION)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def main(): ap = argparse.ArgumentParser() sub = ap.add_subparsers() latest = sub.add_parser("latest") latest.set_defaults(func=lambda _opts: do_latest()) download = sub.add_parser("get_url") download.set_defaults( func=lambda _opts: get_download_url(os.environ['_3PP_VERSION'])) opts = ap.parse_args() opts.func(opts)
nwjs/chromium.src
[ 136, 133, 136, 45, 1453904223 ]
def __init__(self, base, obj, msg=None): if not msg: try: base.class_mapper(type(obj)) name = _safe_cls_name(type(obj)) msg = ( "Class %r is mapped, but this instance lacks " "instrumentation. This occurs when the instance " "is created before sqlalchemy.orm.mapper(%s) " "was called." % (name, name) ) except UnmappedClassError: msg = _default_unmapped(type(obj)) if isinstance(obj, type): msg += ( "; was a class (%s) supplied where an instance was " "required?" % _safe_cls_name(obj) ) UnmappedError.__init__(self, msg)
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def __init__(self, cls, msg=None): if not msg: msg = _default_unmapped(cls) UnmappedError.__init__(self, msg)
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def __init__(self, base, state, msg=None): if not msg: msg = ( "Instance '%s' has been deleted, or its " "row is otherwise not present." % base.state_str(state) ) sa_exc.InvalidRequestError.__init__(self, msg)
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def __init__( self, applied_to_property_type, requesting_property, applies_to, actual_strategy_type, strategy_key,
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def _safe_cls_name(cls): try: cls_name = ".".join((cls.__module__, cls.__name__)) except AttributeError: cls_name = getattr(cls, "__name__", None) if cls_name is None: cls_name = repr(cls) return cls_name
gltn/stdm
[ 26, 29, 26, 55, 1401777923 ]
def edit(self, spec, prefix): # libquadmath is only available x86_64 and powerle # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85440 if self.spec.target.family not in ['x86_64', 'ppc64le']: comps = join_path('amrex', 'Tools', 'GNUMake', 'comps') maks = [ join_path(comps, 'gnu.mak'), join_path(comps, 'llvm.mak'), ] for mak in maks: filter_file('-lquadmath', '', mak) # Set all available makefile options to values we want makefile = FileFilter('GNUmakefile') makefile.filter( r'^AMREX_HOME\s*\?=.*', 'AMREX_HOME = {0}'.format('./amrex') ) makefile.filter( r'^PRECISION\s*=.*', 'PRECISION = {0}'.format(spec.variants['prec'].value) ) makefile.filter( r'^DIM\s*=.*', 'DIM = {0}'.format(spec.variants['dims'].value) ) makefile.filter( r'^PROFILE\s*=.*', 'PROFILE = {0}'.format( spec.variants['profiling'].value ).upper() ) makefile.filter( r'^TRACE_PROFILE\s*=.*', 'TRACE_PROFILE = {0}'.format( spec.variants['profiling'].value ).upper() ) makefile.filter( r'^COMM_PROFILE\s*=.*', 'COMM_PROFILE = {0}'.format( spec.variants['profiling'].value ).upper() ) makefile.filter( r'^COMP\s*=.*', 'COMP = {0}'.format(self.compiler.name) ) makefile.filter( r'^DEBUG\s*=.*', 'DEBUG = {0}'.format(spec.variants['debug'].value).upper() ) makefile.filter( r'^USE_ARRAYVIEW\s*=.*', 'USE_ARRAY_VIEW = FALSE' ) makefile.filter( r'^USE_MPI\s*=.*', 'USE_MPI = {0}'.format(spec.variants['mpi'].value).upper() ) makefile.filter( r'^USE_CXX11\s*=.*', 'USE_CXX11 = TRUE' ) makefile.filter( r'^USE_VOLRENDER\s*=.*', 'USE_VOLRENDER = FALSE' ) makefile.filter( r'^USE_PARALLELVOLRENDER\s*=.*', 'USE_PARALLELVOLRENDER = FALSE' ) makefile.filter( r'^USE_PROFPARSER\s*=.*', 'USE_PROFPARSER = {0}'.format( spec.variants['profiling'].value ).upper() ) # A bit risky here deleting all /usr and /opt X # library default search paths in makefile makefile.filter( r'^.*\b(usr|opt)\b.*$', '# Spack removed INCLUDE_LOCATIONS and LIBRARY_LOCATIONS' ) # Read GNUmakefile into array with open('GNUmakefile', 'r') as file: contents = file.readlines() # Edit GNUmakefile includes and libraries to point to Spack # dependencies. # The safest bet is to put the LIBRARY_LOCATIONS and # INCLUDE_LOCATIONS at the beginning of the makefile. line_offset = 0 count = 0 for lib in ['libsm', 'libice', 'libxpm', 'libx11', 'libxt', 'libxext', 'motif']: contents.insert( line_offset + count, 'LIBRARY_LOCATIONS += {0}\n'.format(spec[lib].prefix.lib) ) contents.insert( line_offset + count + 1, 'INCLUDE_LOCATIONS += {0}\n'.format(spec[lib].prefix.include) ) count += 1 # Write GNUmakefile with open('GNUmakefile', 'w') as file: file.writelines(contents)
LLNL/spack
[ 3244, 1839, 3244, 2847, 1389172932 ]
def step_fn(x): return x * 2
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, device): """Creates a `OneDeviceStrategy`. Args: device: Device string identifier for the device on which the variables should be placed. See class docs for more details on how the device is used. Examples: "/cpu:0", "/gpu:0", "/device:CPU:0", "/device:GPU:0" """ super(OneDeviceStrategy, self).__init__(OneDeviceExtended(self, device)) distribute_lib.distribution_strategy_gauge.get_cell("V2").set( "OneDeviceStrategy")
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def distribute_datasets_from_function( self, dataset_fn, # pylint: disable=useless-super-delegation options=None): """Distributes `tf.data.Dataset` instances created by calls to `dataset_fn`. `dataset_fn` will be called once for each worker in the strategy. In this case, we only have one worker and one device so `dataset_fn` is called once. The `dataset_fn` should take an `tf.distribute.InputContext` instance where information about batching and input replication can be accessed: ``` def dataset_fn(input_context): batch_size = input_context.get_per_replica_batch_size(global_batch_size) d = tf.data.Dataset.from_tensors([[1.]]).repeat().batch(batch_size) return d.shard( input_context.num_input_pipelines, input_context.input_pipeline_id) inputs = strategy.distribute_datasets_from_function(dataset_fn) for batch in inputs: replica_results = strategy.run(replica_fn, args=(batch,)) ``` IMPORTANT: The `tf.data.Dataset` returned by `dataset_fn` should have a per-replica batch size, unlike `experimental_distribute_dataset`, which uses the global batch size. This may be computed using `input_context.get_per_replica_batch_size`. Args: dataset_fn: A function taking a `tf.distribute.InputContext` instance and returning a `tf.data.Dataset`. options: `tf.distribute.InputOptions` used to control options on how this dataset is distributed. Returns: A "distributed `Dataset`", which the caller can iterate over like regular datasets. """ return super(OneDeviceStrategy, self).distribute_datasets_from_function(dataset_fn, options)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def run(self, fn, args=(), kwargs=None, options=None): # pylint: disable=useless-super-delegation """Run `fn` on each replica, with the given arguments. In `OneDeviceStrategy`, `fn` is simply called within a device scope for the given device, with the provided arguments. Args: fn: The function to run. The output must be a `tf.nest` of `Tensor`s. args: (Optional) Positional arguments to `fn`. kwargs: (Optional) Keyword arguments to `fn`. options: (Optional) An instance of `tf.distribute.RunOptions` specifying the options to run `fn`. Returns: Return value from running `fn`. """ return super(OneDeviceStrategy, self).run(fn, args, kwargs, options)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def scope(self): # pylint: disable=useless-super-delegation """Returns a context manager selecting this Strategy as current. Inside a `with strategy.scope():` code block, this thread will use a variable creator set by `strategy`, and will enter its "cross-replica context". In `OneDeviceStrategy`, all variables created inside `strategy.scope()` will be on `device` specified at strategy construction time. See example in the docs for this class. Returns: A context manager to use for creating variables with this strategy. """ return super(OneDeviceStrategy, self).scope()
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, device): super(OneDeviceStrategyV1, self).__init__(OneDeviceExtended(self, device)) distribute_lib.distribution_strategy_gauge.get_cell("V1").set( "OneDeviceStrategy")
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, container_strategy, device): super(OneDeviceExtended, self).__init__(container_strategy) self._device = device_util.resolve(device) self._input_device = device_util.get_host_for_device(self._device)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _input_workers(self): return self._input_workers_with_options()
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _validate_colocate_with_variable(self, colocate_with_variable): distribute_utils.validate_colocate(colocate_with_variable, self)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _make_input_fn_iterator( self, input_fn, replication_mode=distribute_lib.InputReplicationMode.PER_WORKER): return input_lib_v1.InputFunctionIterator(input_fn, self._input_workers, [distribute_lib.InputContext()], self._container_strategy())
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _broadcast_to(self, tensor, destinations): del destinations return tensor
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _distribute_datasets_from_function(self, dataset_fn, options): if (options and options.experimental_replication_mode == distribute_lib.InputReplicationMode.PER_REPLICA): raise NotImplementedError( "InputReplicationMode.PER_REPLICA " "is only supported in " "`experimental_distribute_datasets_from_function` " "of tf.distribute.MirroredStrategy") return input_util.get_distributed_datasets_from_function( dataset_fn, self._input_workers_with_options(options), [distribute_lib.InputContext()], self._container_strategy(), options=options)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _experimental_run_steps_on_iterator(self, fn, iterator, iterations, initial_loop_values=None): if initial_loop_values is None: initial_loop_values = {} initial_loop_values = nest.flatten(initial_loop_values) ctx = input_lib.MultiStepContext() def body(i, *args): """A wrapper around `fn` to create the while loop body.""" del args fn_result = fn(ctx, iterator.get_next()) flat_last_step_outputs = nest.flatten(ctx.last_step_outputs) with ops.control_dependencies([fn_result]): return [i + 1] + flat_last_step_outputs # We capture the control_flow_context at this point, before we run `fn` # inside a while_loop. This is useful in cases where we might need to exit # these contexts and get back to the outer context to do some things, for # e.g. create an op which should be evaluated only once at the end of the # loop on the host. One such usage is in creating metrics' value op. self._outer_control_flow_context = ( ops.get_default_graph()._get_control_flow_context()) # pylint: disable=protected-access # TODO(priyag): Use max_iterations instead of an explicit counter. cond = lambda i, *args: i < iterations i = constant_op.constant(0) loop_result = control_flow_ops.while_loop( cond, body, [i] + initial_loop_values, name="", parallel_iterations=1, back_prop=False, swap_memory=False, return_same_structure=True) del self._outer_control_flow_context ctx.run_op = control_flow_ops.group(loop_result) # Convert the last_step_outputs from a list to the original dict structure # of last_step_outputs. last_step_tensor_outputs = loop_result[1:] last_step_tensor_outputs_dict = nest.pack_sequence_as( ctx.last_step_outputs, last_step_tensor_outputs) ctx._set_last_step_outputs(last_step_tensor_outputs_dict) # pylint: disable=protected-access return ctx
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _reduce_to(self, reduce_op, value, destinations, options): del reduce_op, destinations, options return value
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _update(self, var, fn, args, kwargs, group): # The implementations of _update() and _update_non_slot() are identical # except _update() passes `var` as the first argument to `fn()`. return self._update_non_slot(var, fn, (var,) + tuple(args), kwargs, group)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def read_var(self, replica_local_var): """Read the aggregate value of a replica-local variable.""" return array_ops.identity(replica_local_var)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def value_container(self, value): return value
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _num_replicas_in_sync(self): return 1
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def worker_devices(self): return (self._device,)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def parameter_devices(self): return (self._device,)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def experimental_should_init(self): return True
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def experimental_between_graph(self): return False
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def should_checkpoint(self): return True
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def should_save_summary(self): return True
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _global_batch_size(self): """Global and per-replica batching are equivalent for OneDeviceStrategy.""" return True
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def _support_per_replica_values(self): return False
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]
def __init__(self, strategy): distribute_lib.ReplicaContext.__init__( self, strategy, replica_id_in_sync_group=0)
tensorflow/tensorflow
[ 171949, 87931, 171949, 2300, 1446859160 ]