function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def ctime(self): return self._ctime
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def ctime(self, value): self._ctime = self._validate_integer("ctime", value)
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def generation(self): return self._generation
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def generation(self, value): self._generation = self._validate_integer("generation", value)
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def sequence(self): return self._sequence
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def sequence(self, value): self._sequence = self._validate_integer("sequence", value)
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def _validate_integer(cls, property, value): if not isinstance(value, six.integer_types): raise AssertionError( "Invalid value for metadata property {!r}: {!r}".format( property, value)) return value
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def validate_description(cls, desc): desc = str(desc) # We cannot fail when the description is too long, since we must # support older engine that may send such values, or old disks # with long description. if len(desc) > sc.DESCRIPTION_SIZE: cls.log.warning("Description is too long, truncating to %d bytes", sc.DESCRIPTION_SIZE) desc = desc[:sc.DESCRIPTION_SIZE] return desc
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def __getitem__(self, item): try: value = getattr(self, self._fieldmap[item]) except AttributeError: raise KeyError(item) # Some fields needs to be converted to string if item in (sc.CAPACITY, sc.CTIME): value = str(value) return value
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def get(self, item, default=None): try: return self[item] except KeyError: return default
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def testProcessDeviceParams(self): deviceXML = hostdev._process_device_params( libvirtconnection.get().nodeDeviceLookupByName( hostdevlib.ADDITIONAL_DEVICE).XMLDesc() ) self.assertEqual( hostdevlib.ADDITIONAL_DEVICE_PROCESSED, deviceXML )
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def testProcessDeviceParamsInvalidEncoding(self): deviceXML = hostdev._process_device_params( libvirtconnection.get().nodeDeviceLookupByName( hostdevlib.COMPUTER_DEVICE).XMLDesc() ) self.assertEqual( hostdevlib.COMPUTER_DEVICE_PROCESSED, deviceXML )
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def testProcessSRIOV_VFDeviceParams(self): deviceXML = hostdev._process_device_params( libvirtconnection.get().nodeDeviceLookupByName( hostdevlib.SRIOV_VF).XMLDesc() ) self.assertEqual(hostdevlib.SRIOV_VF_PROCESSED, deviceXML)
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def testProcessMdevDeviceParams(self): deviceXML = hostdev._process_device_params( libvirtconnection.get().nodeDeviceLookupByName( hostdevlib.MDEV_DEVICE).XMLDesc() ) self.assertEqual(hostdevlib.MDEV_DEVICE_PROCESSED, deviceXML)
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def testListByCaps(self, caps): devices = hostdev.list_by_caps(caps) for cap in caps: self.assertTrue(set(hostdevlib.DEVICES_BY_CAPS[cap].keys()). issubset(set(devices.keys())))
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def test_device_name_from_address(self, addr_type, addr, name): # we need to make sure we scan all the devices (hence caps=None) hostdev.list_by_caps() self.assertEqual( hostdev.device_name_from_address(addr_type, addr), name )
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def test_3k_storage_devices(self): with hostdevlib.Connection.use_hostdev_tree(): self.assertEqual( len(hostdev.list_by_caps()), len(libvirtconnection.get().listAllDevices()) )
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def setUp(self): self.conf = { 'vmName': 'testVm', 'vmId': '9ffe28b6-6134-4b1e-8804-1185f49c436f', 'smp': '8', 'maxVCpus': '160', 'memSize': '1024', 'memGuaranteedSize': '512'}
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def testCreateSRIOVVF(self): dev_spec = {'type': hwclass.NIC, 'device': 'hostdev', 'hostdev': hostdevlib.SRIOV_VF, 'macAddr': 'ff:ff:ff:ff:ff:ff', 'specParams': {'vlanid': 3}, 'bootOrder': '9'} device = network.Interface(self.log, **dev_spec) self.assertXMLEqual( xmlutils.tostring(device.getXML()), hostdevlib.DEVICE_XML[hostdevlib.SRIOV_VF] % ('',))
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def setUp(self): def make_device(name): mdev_types = [ hostdevlib.FakeMdevType('incompatible-1', 2), hostdevlib.FakeMdevType('8q', 1), hostdevlib.FakeMdevType('4q', 2), hostdevlib.FakeMdevType('incompatible-2', 2), ] return hostdevlib.FakeMdevDevice(name=name, vendor='0x10de', mdev_types=mdev_types) self.devices = [make_device(name) for name in ('card-1', 'card-2',)]
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def test_vgpu_placement(self, mdev_specs, mdev_placement, instances): with MonkeyPatchScope([ (hostdev, '_each_mdev_device', lambda: self.devices) ]): for mdev_type, mdev_uuid in mdev_specs: hostdev.spawn_mdev(mdev_type, mdev_uuid, mdev_placement, self.log) for inst, dev in zip(instances, self.devices): dev_inst = [] for mdev_type in dev.mdev_types: dev_inst.extend(mdev_type.instances) self.assertEqual(inst, dev_inst)
oVirt/vdsm
[ 129, 183, 129, 68, 1351274855 ]
def borat(self, mess, args): """ Random quotes from the DEVOPS_BORAT twitter account """ myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=DEVOPS_BORAT') items = myfeed['entries'] return choice(items).description
errbotio/err-devops-borat
[ 6, 4, 6, 1, 1337606195 ]
def jesus(self, mess, args): """ Random quotes from the devops_jesus twitter account """ myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=devops_jesus') items = myfeed['entries'] return choice(items).description
errbotio/err-devops-borat
[ 6, 4, 6, 1, 1337606195 ]
def __init__ (self, plugin, address): self.plugin = plugin # Call constructor of the parent class asynchat.async_chat.__init__(self) # Set up input line terminator self.set_terminator('\r\n') # Initialize input data buffer self.buffer = '' # create and connect a socket self.create_socket(socket.AF_INET, socket.SOCK_STREAM) eg.RestartAsyncore() self.settimeout(1.0) try: self.connect(address) except: pass
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def handle_expt(self): # connection failed self.plugin.isSessionRunning = False self.plugin.TriggerEvent("NoConnection") self.close()
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def collect_incoming_data(self, data): """ Called with data holding an arbitrary amount of received data. """ self.buffer = self.buffer + data
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def __call__(self): self.plugin.DoCommand(self.value)
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def __call__(self, Param): self.plugin.DoCommand(self.value + " " + Param)
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def __call__(self): self.plugin.DoCommand("5000 " + self.value)
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def __init__(self): self.host = "localhost" self.port = 2663 self.isSessionRunning = False self.timeline = "" self.waitStr = None self.waitFlag = threading.Event() self.PlayState = -1 self.lastMessage = {} self.lastSubtitleNum = 0 self.lastSubtitlesEnabled = False self.lastAudioTrackNum = 0 group = self.AddGroup('Requests') for className, scancode, descr in ttRequests: clsAttributes = dict(name=descr, value=scancode) cls = new.classobj(className, (stdAction,), clsAttributes) group.AddAction(cls) group = self.AddGroup('Commands') for className, scancode, descr, ParamDescr in ttCommands: clsAttributes = dict(name=descr, value=scancode) if ParamDescr == "": if className[0:3] == "IP_": cls = new.classobj(className, (stdAction,), clsAttributes) else: cls = new.classobj(className, (wmAction,), clsAttributes) else: cls = new.classobj(className, (stdActionWithStringParameter,), clsAttributes) cls.parameterDescription = ParamDescr group.AddAction(cls)
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def ValueUpdate(self, text): if text == self.waitStr: self.waitStr = None self.waitFlag.set() return header = text[0:4] state = text[5:].decode('utf-8') self.lastMessage[header] = state ttEvent = self.ttEvents.get(header, None) if ttEvent is not None: if type(ttEvent) == type({}): eventString = ttEvent.get(state, None) if eventString is not None: self.TriggerEvent(eventString) else: self.TriggerEvent(header, [state]) elif type(ttEvent) == type(()): suffix2 = ttEvent[1].get(state, None) if suffix2 is not None: self.TriggerEvent(ttEvent[0] + "." + suffix2) else: self.TriggerEvent(ttEvent[0] + "." + str(state)) else: if state == "": self.TriggerEvent(ttEvent) else: self.TriggerEvent(ttEvent, [state]) return else: self.TriggerEvent(header, [state])
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def DoCommand(self, cmdstr): self.waitFlag.clear() self.waitStr = cmdstr if not self.isSessionRunning: self.session = TheaterTekSession(self, (self.host, self.port)) self.isSessionRunning = True try: self.session.sendall(cmdstr + "\r\n") except: self.isSessionRunning = False self.TriggerEvent('close') self.session.close() self.waitFlag.wait(1.0) self.waitStr = None self.waitFlag.set()
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def Configure( self, host="localhost", port=2663, dummy1=None, dummy2=None
EventGhost/EventGhost
[ 368, 86, 368, 69, 1390085124 ]
def load_font(font_path): """ Load a new TTF font into Blender, and return the font object """ # get the original list of fonts (before we add a new one) original_fonts = bpy.data.fonts.keys()
XXLRay/libreshot
[ 1, 1, 1, 4, 1442911504 ]
def __init__(self, db, subscription, ds, address, timestamp_range=None, port=None, page_size=50): self.db = db self.sub = subscription self.table_nodes = "s{acct}_Nodes".format(acct=self.sub) self.table_links = "s{acct}_ds{id}_Links".format(acct=self.sub, id=ds) self.table_links_in = "s{acct}_ds{id}_LinksIn".format(acct=self.sub, id=ds) self.table_links_out = "s{acct}_ds{id}_LinksOut".format(acct=self.sub, id=ds) self.ds = ds self.ip_start, self.ip_end = sam.common.determine_range_string(address) self.page_size = page_size self.port = port if timestamp_range: self.time_range = timestamp_range else: linksModel = sam.models.links.Links(db, self.sub, self.ds) tr = linksModel.get_timerange() self.time_range = (tr['min'], tr['max']) if self.db.dbname == 'mysql': self.elapsed = '(UNIX_TIMESTAMP(MAX(timestamp)) - UNIX_TIMESTAMP(MIN(timestamp)))' self.divop = 'DIV' else: self.elapsed = '(MAX(timestamp) - MIN(timestamp))' self.divop = '/' sam.common.sqlite_udf(self.db)
riolet/SAM
[ 175, 17, 175, 5, 1467750068 ]
def build_where_clause(self, timestamp_range=None, port=None, protocol=None, rounding=True): """ Build a WHERE SQL clause that covers basic timerange, port, and protocol filtering. :param timestamp_range: start and end times as unix timestamps (integers). Default is all time. :type timestamp_range: tuple[int, int] :param port: exclusively report traffic destined for this port, if specified. :type port: int or str :param protocol: exclusively report traffic using this protocol :type protocol: str :param rounding: round each time stamp to the nearest quantization mark. (db records are quantized for consiceness) :type rounding: bool :return: String SQL clause :rtype: str """ clauses = [] t_start = 0 t_end = 0 if timestamp_range: t_start = timestamp_range[0] t_end = timestamp_range[1] if rounding: # rounding to 5 minutes, for use with the Syslog table if t_start > 150: t_start -= 150 if t_end <= 2 ** 31 - 150: t_end += 149 if self.db.dbname == 'sqlite': clauses.append("timestamp BETWEEN $tstart AND $tend") else: clauses.append("timestamp BETWEEN FROM_UNIXTIME($tstart) AND FROM_UNIXTIME($tend)") if port: clauses.append("port = $port") if protocol: clauses.append("protocols LIKE $protocol") protocol = "%{0}%".format(protocol) qvars = {'tstart': t_start, 'tend': t_end, 'port': port, 'protocol': protocol} where = str(web.db.reparam("\n AND ".join(clauses), qvars)) if where: where = " AND " + where return where
riolet/SAM
[ 175, 17, 175, 5, 1467750068 ]
def get_details_ports(self, page=1, order="-links"): sort_options = ['links', 'port'] first_result = (page - 1) * self.page_size qvars = { 'links_table': self.table_links, 'start': self.ip_start, 'end': self.ip_end, 'first': first_result, 'size': self.page_size, 'WHERE': self.build_where_clause(self.time_range, self.port), } if order and order[0] == '-': sort_dir = "DESC" else: sort_dir = "ASC" if order and order[1:] in sort_options: sort_by = order[1:] else: sort_by = sort_options[0] qvars['order'] = "{0} {1}".format(sort_by, sort_dir) query = """ SELECT port AS 'port', sum(links) AS 'links' FROM {links_table} WHERE dst BETWEEN $start AND $end {WHERE} GROUP BY port ORDER BY {order} LIMIT $first, $size; """.format(**qvars) return list(sam.common.db.query(query, vars=qvars))
riolet/SAM
[ 175, 17, 175, 5, 1467750068 ]
def get_mutually_exclusive_required_option(options, *selections): """ Validates that exactly one of the 2 given options is specified. Returns the name of the found option. """ selected = [sel for sel in selections if options.get(sel)] if len(selected) != 1: selection_string = ', '.join(f'--{selection}' for selection in selections) raise CommandError(f'Must specify exactly one of {selection_string}') return selected[0]
eduNEXT/edunext-platform
[ 28, 7, 28, 10, 1414072000 ]
def validate_dependent_option(options, dependent_option, depending_on_option): """ Validates that option_1 is specified if dependent_option is specified. """ if options.get(dependent_option) and not options.get(depending_on_option): raise CommandError(f'Option --{dependent_option} requires option --{depending_on_option}.')
eduNEXT/edunext-platform
[ 28, 7, 28, 10, 1414072000 ]
def addTemplate(core): mobileTemplate = MobileTemplate()
ProjectSWGCore/NGECore2
[ 23, 70, 23, 56, 1372673790 ]
def __init__(self,dae,nk): self.dae = dae self.nk = nk self._gaussNewtonObjF = [] mapSize = len(self.dae.xNames())*(self.nk+1) + len(self.dae.pNames()) V = C.msym('dvs',mapSize) self._dvMap = nmheMaps.VectorizedReadOnlyNmheMap(self.dae,self.nk,V) self._boundMap = nmheMaps.WriteableNmheMap(self.dae,self.nk) self._guessMap = nmheMaps.WriteableNmheMap(self.dae,self.nk) self._U = C.msym('u',self.nk,len(self.dae.uNames())) self._outputMapGenerator = nmheMaps.NmheOutputMapGenerator(self,self._U) self._outputMap = nmheMaps.NmheOutputMap(self._outputMapGenerator, self._dvMap.vectorize(), self._U) self._constraints = Constraints()
ghorn/rawesome
[ 10, 9, 10, 17, 1348591411 ]
def lookup(self,name,timestep=None): try: return self._dvMap.lookup(name,timestep=timestep) except NameError: pass try: return self._outputMap.lookup(name,timestep) except NameError: pass raise NameError("unrecognized name \""+name+"\"")
ghorn/rawesome
[ 10, 9, 10, 17, 1348591411 ]
def guess(self,name,val,timestep=None): self._guessMap.setVal(name,val,timestep=timestep)
ghorn/rawesome
[ 10, 9, 10, 17, 1348591411 ]
def setObj(self,obj): if hasattr(self,'_obj'): raise ValueError("don't change the objective function") self._obj = obj
ghorn/rawesome
[ 10, 9, 10, 17, 1348591411 ]
def _setupDynamicsConstraints(self,endTime,traj): # Todo: add parallelization # Todo: get endTime right g = [] nicp = 1 deg = 4 p = self._dvMap.pVec() for k in range(self.nk): newton = Newton(LagrangePoly,self.dae,1,nicp,deg,'RADAU') newton.setupStuff(endTime) X0_i = self._dvMap.xVec(k) U_i = self._U[k,:].T # guess if traj is None: newton.isolver.setOutput(1,0) else: X = C.DMatrix([[traj.lookup(name,timestep=k,degIdx=j) for j in range(1,traj.dvMap._deg+1)] \ for name in traj.dvMap._xNames]) Z = C.DMatrix([[traj.lookup(name,timestep=k,degIdx=j) for j in range(1,traj.dvMap._deg+1)] \ for name in traj.dvMap._zNames]) newton.isolver.setOutput(C.veccat([X,Z]),0) _, Xf_i = newton.isolver.call([X0_i,U_i,p]) X0_i_plus = self._dvMap.xVec(k+1) g.append(Xf_i-X0_i_plus) return g
ghorn/rawesome
[ 10, 9, 10, 17, 1348591411 ]
def runSolver(self,U,trajTrue=None): # make sure all bounds are set (xMissing,pMissing) = self._guessMap.getMissing() msg = [] for name in xMissing: msg.append("you forgot to set a guess for \""+name+"\" at timesteps: "+str(xMissing[name])) for name in pMissing: msg.append("you forgot to set a guess for \""+name+"\"") if len(msg)>0: raise ValueError('\n'.join(msg)) lbx,ubx = zip(*(self._boundMap.vectorize())) xk = C.DMatrix(list(self._guessMap.vectorize())) for k in range(100): ############# plot stuff ############### print "iteration: ",k
ghorn/rawesome
[ 10, 9, 10, 17, 1348591411 ]
def _load_lib(): """Load xlearn shared library""" lib_path = find_lib_path() if len(lib_path) == 0: return None lib = ctypes.cdll.LoadLibrary(lib_path[0]) return lib
PKU-Cloud-Lab/xLearn
[ 3039, 537, 3039, 195, 1497082171 ]
def _check_call(ret): """Check the return value of C API call This function will raise exception when error occurs. Wrap every API call with this function Parameters ---------- ret : int return value from API calls """ if ret != 0: msg = "" # raise XLearnError() _LIB.XLearnGetLastError.restype = ctypes.POINTER(ctypes.c_ubyte) ptr = _LIB.XLearnGetLastError() idx = 0 while(ptr[idx] != 0): msg += chr(ptr[idx]) idx += 1 raise XLearnError(msg)
PKU-Cloud-Lab/xLearn
[ 3039, 537, 3039, 195, 1497082171 ]
def c_str(string): """Create ctypes char * from a Python string. Parameters ---------- string : string type Pyrhon string. Returns ------- str : c_char_p A char pointer that can be passed to C API. Examples -------- >>> x = c_str("Hello, world!") >>> print x.value Hello, world! """ return ctypes.c_char_p(string)
PKU-Cloud-Lab/xLearn
[ 3039, 537, 3039, 195, 1497082171 ]
def c_str(string): """Create ctypes char * from a Python string. Parameters ---------- string : string type Pyrhon string. Returns ------- str : c_char_p A char pointer that can be passed to C API. Examples -------- >>> x = c_str("Hello, world!") >>> print(x.value) Hello, world! """ return ctypes.c_char_p(string.encode('utf-8'))
PKU-Cloud-Lab/xLearn
[ 3039, 537, 3039, 195, 1497082171 ]
def test_successful_provider_removal(): """ Here we give the module a text file with PROVIDER: written in it, it should remove that line in the file """ remove_provider = remove.RemoveProviderR() remove.web = WebDummy() # override the web variable in remove.py test_provider = "PROV" expected_providers_contents = ['What:\n', 'Test:'] # what we expect to see in providers.txt after we call GET # create the file with open(PROVIDERS_FILE_PATH, 'w') as f: f.writelines([ "What:", "\n", test_provider + ":", "\n", "Test:" ]) assert remove_provider.GET(test_provider) == "removed " + test_provider # read the file and see if it has removed the line with the test_provider with open(PROVIDERS_FILE_PATH, 'r') as f: provider_contents = f.readlines() delete_file(PROVIDERS_FILE_PATH) # delete the file assert provider_contents == expected_providers_contents
CyberReboot/vcontrol
[ 5, 12, 5, 64, 1470093229 ]
def check_label_shapes(labels, preds, wrap=False, shape=False): """Helper function for checking shape of label and prediction Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. wrap : boolean If True, wrap labels/preds in a list if they are single NDArray shape : boolean If True, check the shape of labels and preds; Otherwise only check their length. """ if not shape: label_shape, pred_shape = len(labels), len(preds) else: label_shape, pred_shape = labels.shape, preds.shape if label_shape != pred_shape: raise ValueError("Shape of labels {} does not match shape of " "predictions {}".format(label_shape, pred_shape)) if wrap: if isinstance(labels, ndarray.ndarray.NDArray): labels = [labels] if isinstance(preds, ndarray.ndarray.NDArray): preds = [preds] return labels, preds
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name, output_names=None, label_names=None, **kwargs): self.name = str(name) self.output_names = output_names self.label_names = label_names self._kwargs = kwargs self.reset()
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def get_config(self): """Save configurations of metric. Can be recreated from configs with metric.create(**config) """ config = self._kwargs.copy() config.update({ 'metric': self.__class__.__name__, 'name': self.name, 'output_names': self.output_names, 'label_names': self.label_names}) return config
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ raise NotImplementedError()
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def get(self): """Gets the current evaluation result. Returns ------- names : list of str Name of the metrics. values : list of float Value of the evaluations. """ if self.num_inst == 0: return (self.name, float('nan')) else: return (self.name, self.sum_metric / self.num_inst)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def create(metric, *args, **kwargs): """Creates evaluation metric from metric names or instances of EvalMetric or a custom metric function. Parameters ---------- metric : str or callable Specifies the metric to create. This argument must be one of the below: - Name of a metric. - An instance of `EvalMetric`. - A list, each element of which is a metric or a metric name. - An evaluation function that computes custom metric for a given batch of labels and predictions. *args : list Additional arguments to metric constructor. Only used when metric is str. **kwargs : dict Additional arguments to metric constructor. Only used when metric is str Examples -------- >>> def custom_metric(label, pred): ... return np.mean(np.abs(label - pred)) ... >>> metric1 = mx.metric.create('acc') >>> metric2 = mx.metric.create(custom_metric) >>> metric3 = mx.metric.create([metric1, metric2, 'rmse']) """ if callable(metric): return CustomMetric(metric, *args, **kwargs) elif isinstance(metric, list): composite_metric = CompositeEvalMetric() for child_metric in metric: composite_metric.add(create(child_metric, *args, **kwargs)) return composite_metric return _create(metric, *args, **kwargs)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, metrics=None, name='composite', output_names=None, label_names=None): super(CompositeEvalMetric, self).__init__( 'composite', output_names=output_names, label_names=label_names) if metrics is None: metrics = [] self.metrics = [create(i) for i in metrics]
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def get_metric(self, index): """Returns a child metric. Parameters ---------- index : int Index of child metric in the list of metrics. """ try: return self.metrics[index] except IndexError: return ValueError("Metric index {} is out of range 0 and {}".format( index, len(self.metrics)))
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def update(self, labels, preds): """Updates the internal evaluation result. Parameters ---------- labels : list of `NDArray` The labels of the data. preds : list of `NDArray` Predicted values. """ for metric in self.metrics: metric.update(labels, preds)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def get(self): """Returns the current evaluation result. Returns ------- names : list of str Name of the metrics. values : list of float Value of the evaluations. """ names = [] values = [] for metric in self.metrics: name, value = metric.get() if isinstance(name, string_types): name = [name] if isinstance(value, numeric_types): value = [value] names.extend(name) values.extend(value) return (names, values)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, axis=1, name='accuracy', output_names=None, label_names=None): super(Accuracy, self).__init__( name, axis=axis, output_names=output_names, label_names=label_names) self.axis = axis
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, top_k=1, name='top_k_accuracy', output_names=None, label_names=None): super(TopKAccuracy, self).__init__( name, top_k=top_k, output_names=output_names, label_names=label_names) self.top_k = top_k assert(self.top_k > 1), 'Please use Accuracy if top_k is no more than 1' self.name += '_%d' % self.top_k
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self): self.true_positives = 0 self.false_negatives = 0 self.false_positives = 0 self.true_negatives = 0
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def precision(self): if self.true_positives + self.false_positives > 0: return float(self.true_positives) / (self.true_positives + self.false_positives) else: return 0.
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def recall(self): if self.true_positives + self.false_negatives > 0: return float(self.true_positives) / (self.true_positives + self.false_negatives) else: return 0.
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def fscore(self): if self.precision + self.recall > 0: return 2 * self.precision * self.recall / (self.precision + self.recall) else: return 0.
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def total_examples(self): return self.false_negatives + self.false_positives + \ self.true_negatives + self.true_positives
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='f1', output_names=None, label_names=None, average="macro"): self.average = average self.metrics = _BinaryClassificationMetrics() EvalMetric.__init__(self, name=name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def reset(self): """Resets the internal evaluation result to initial state.""" self.sum_metric = 0. self.num_inst = 0. self.metrics.reset_stats()
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, ignore_label, axis=-1, name='perplexity', output_names=None, label_names=None): super(Perplexity, self).__init__( name, ignore_label=ignore_label, output_names=output_names, label_names=label_names) self.ignore_label = ignore_label self.axis = axis
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def get(self): """Returns the current evaluation result. Returns ------- Tuple of (str, float) Representing name of the metric and evaluation result. """ return (self.name, math.exp(self.sum_metric/self.num_inst))
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='mae', output_names=None, label_names=None): super(MAE, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='mse', output_names=None, label_names=None): super(MSE, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='rmse', output_names=None, label_names=None): super(RMSE, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, eps=1e-12, name='cross-entropy', output_names=None, label_names=None): super(CrossEntropy, self).__init__( name, eps=eps, output_names=output_names, label_names=label_names) self.eps = eps
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, eps=1e-12, name='nll-loss', output_names=None, label_names=None): super(NegativeLogLikelihood, self).__init__( name, eps=eps, output_names=output_names, label_names=label_names) self.eps = eps
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='pearsonr', output_names=None, label_names=None): super(PearsonCorrelation, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='loss', output_names=None, label_names=None): super(Loss, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='torch', output_names=None, label_names=None): super(Torch, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, name='caffe', output_names=None, label_names=None): super(Caffe, self).__init__( name, output_names=output_names, label_names=label_names)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def __init__(self, feval, name=None, allow_extra_outputs=False, output_names=None, label_names=None): if name is None: name = feval.__name__ if name.find('<') != -1: name = 'custom(%s)' % name super(CustomMetric, self).__init__( name, feval=feval, allow_extra_outputs=allow_extra_outputs, output_names=output_names, label_names=label_names) self._feval = feval self._allow_extra_outputs = allow_extra_outputs
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def get_config(self): raise NotImplementedError("CustomMetric cannot be serialized")
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def np(numpy_feval, name=None, allow_extra_outputs=False): """Creates a custom evaluation metric that receives its inputs as numpy arrays. Parameters ---------- numpy_feval : callable(label, pred) Custom evaluation function that receives labels and predictions for a minibatch as numpy arrays and returns the corresponding custom metric as a floating point number. name : str, optional Name of the custom metric. allow_extra_outputs : bool, optional Whether prediction output is allowed to have extra outputs. This is useful in cases like RNN where states are also part of output which can then be fed back to the RNN in the next step. By default, extra outputs are not allowed. Returns ------- float Custom metric corresponding to the provided labels and predictions. Example ------- >>> def custom_metric(label, pred): ... return np.mean(np.abs(label-pred)) ... >>> metric = mx.metric.np(custom_metric) """ def feval(label, pred): """Internal eval function.""" return numpy_feval(label, pred) feval.__name__ = numpy_feval.__name__ return CustomMetric(feval, name, allow_extra_outputs)
TuSimple/mxnet
[ 28, 25, 28, 1, 1457693796 ]
def Args(parser): """Register flags for this command.""" parser.add_argument( 'name', help='Name of the function to be called.', type=util.ValidateFunctionNameOrRaise) parser.add_argument( '--data', default='', help='Data passed to the function (JSON string)')
KaranToor/MA450
[ 1, 1, 1, 4, 1484697944 ]
def test_can_write_simple_identifier(self): escaped = cypher_escape("foo") assert escaped == "foo"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_can_write_identifier_containing_back_ticks(self): escaped = cypher_escape("foo `bar`") assert escaped == "`foo ``bar```"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_cannot_write_none_identifier(self): with self.assertRaises(TypeError): _ = cypher_escape(None)
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_none(self): encoded = cypher_repr(None) assert encoded == u"null"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_true(self): encoded = cypher_repr(True) assert encoded == u"true"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_zero(self): encoded = cypher_repr(0) assert encoded == u"0"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_negative_integer(self): encoded = cypher_repr(-123) assert encoded == u"-123"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_zero(self): encoded = cypher_repr(0.0) assert encoded == u"0.0"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_negative_float(self): encoded = cypher_repr(-123.456) assert encoded == u"-123.456"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_bytes(self): encoded = cypher_repr(b"hello, world") assert encoded == u"'hello, world'"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_bytes_with_escaped_chars(self): encoded = cypher_repr(b"hello, 'world'", quote=u"'") assert encoded == u"'hello, \\'world\\''"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_empty_string(self): encoded = cypher_repr(u"") assert encoded == u"''"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_backspace(self): encoded = cypher_repr(u"\b") assert encoded == u"'\\b'"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_new_line(self): encoded = cypher_repr(u"\n") assert encoded == u"'\\n'"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]
def test_should_encode_horizontal_tab(self): encoded = cypher_repr(u"\t") assert encoded == u"'\\t'"
technige/cypy
[ 5, 2, 5, 1, 1445556635 ]