language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def _databunch_load_empty(cls, path, fname:str='export.pkl'): "Load an empty `DataBunch` from the exported file in `path/fname` with optional `tfms`." sd = LabelLists.load_empty(path, fn=fname) return sd.databunch()
python
def generate_checksums(directory, blacklist=_BLACKLIST): """ Compute checksum for each file in `directory`, with exception of files specified in `blacklist`. Args: directory (str): Absolute or relative path to the directory. blacklist (list/set/tuple): List of blacklisted filenames. Only filenames are checked, not paths! Returns: dict: Dict in format ``{fn: md5_hash}``. Note: File paths are returned as absolute paths from package root. Raises: UserWarning: When `directory` doesn't exists. """ if not os.path.exists(directory): raise UserWarning("'%s' doesn't exists!" % directory) hashes = {} for root, dirs, files in os.walk(directory): for fn in sorted(files): # skip files on blacklist if fn in blacklist: continue fn = os.path.join(root, fn) # compute hash of the file with open(fn) as f: checksum = hashlib.md5(f.read()) fn = _get_required_fn(fn, directory) hashes[fn] = checksum.hexdigest() return hashes
python
def normalizeGlyphNote(value): """ Normalizes Glyph Note. * **value** must be a :ref:`type-string`. * Returned value is an unencoded ``unicode`` string """ if not isinstance(value, basestring): raise TypeError("Note must be a string, not %s." % type(value).__name__) return unicode(value)
java
public void addMultipleItems(int times, final Object... values) { for (int i = 0; i < times; i++) { addItemAsArray(values); } }
java
public Object parseIdRefElement(Element ele) { // A generic reference to any name of any bean. String refName = ele.getAttribute(BEAN_REF_ATTRIBUTE); if (!StringUtils.hasLength(refName)) { // A reference to the id of another bean in the same XML file. refName = ele.getAttribute(LOCAL_REF_ATTRIBUTE); if (!StringUtils.hasLength(refName)) { error("Either 'bean' or 'local' is required for <idref> element", ele); return null; } } if (!StringUtils.hasText(refName)) { error("<idref> element contains empty target attribute", ele); return null; } RuntimeBeanNameReference ref = new RuntimeBeanNameReference(refName); ref.setSource(extractSource(ele)); return ref; }
java
@SuppressWarnings("WeakerAccess") public static NumberField buildRMST(int requestingPlayer, Message.MenuIdentifier targetMenu, CdjStatus.TrackSourceSlot slot, CdjStatus.TrackType trackType) { return new NumberField(((requestingPlayer & 0x0ff) << 24) | ((targetMenu.protocolValue & 0xff) << 16) | ((slot.protocolValue & 0xff) << 8) | (trackType.protocolValue & 0xff)); }
java
public static int getIntValue(String primaryKey, String secondaryKey) { Object val = CFG.get(primaryKey); if (val == null) { val = CFG.get(secondaryKey); if (val == null) { throw new SofaRpcRuntimeException("Not found key: " + primaryKey + "/" + secondaryKey); } } return Integer.parseInt(val.toString()); }
java
public RolloutGroupConditionBuilder errorAction(final RolloutGroupErrorAction action, final String expression) { conditions.setErrorAction(action); conditions.setErrorActionExp(expression); return this; }
java
public void marshall(AudioNormalizationSettings audioNormalizationSettings, ProtocolMarshaller protocolMarshaller) { if (audioNormalizationSettings == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(audioNormalizationSettings.getAlgorithm(), ALGORITHM_BINDING); protocolMarshaller.marshall(audioNormalizationSettings.getAlgorithmControl(), ALGORITHMCONTROL_BINDING); protocolMarshaller.marshall(audioNormalizationSettings.getTargetLkfs(), TARGETLKFS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def buildFITSName(geisname): """Build a new FITS filename for a GEIS input image.""" # User wants to make a FITS copy and update it... _indx = geisname.rfind('.') _fitsname = geisname[:_indx] + '_' + geisname[_indx + 1:-1] + 'h.fits' return _fitsname
java
public Attribute removeAttributeWithPrefix(CharSequence prefix, CharSequence name) { for (Iterator<Attribute> it = event.attributes.iterator(); it.hasNext(); ) { Attribute attr = it.next(); if (attr.localName.equals(name) && attr.namespacePrefix.equals(prefix)) { it.remove(); return attr; } } return null; }
python
def deleteFile(self, CorpNum, MgtKeyType, MgtKey, FileID, UserID=None): """ ์ฒจ๋ถ€ํŒŒ์ผ ์‚ญ์ œ args CorpNum : ํšŒ์› ์‚ฌ์—…์ž ๋ฒˆํ˜ธ MgtKeyType : ๊ด€๋ฆฌ๋ฒˆํ˜ธ ์œ ํ˜• one of ['SELL','BUY','TRUSTEE'] MgtKey : ํŒŒํŠธ๋„ˆ ๊ด€๋ฆฌ๋ฒˆํ˜ธ UserID : ํŒ๋นŒ ํšŒ์›์•„์ด๋”” return ์ฒ˜๋ฆฌ๊ฒฐ๊ณผ. consist of code and message raise PopbillException """ if MgtKeyType not in self.__MgtKeyTypes: raise PopbillException(-99999999, "๊ด€๋ฆฌ๋ฒˆํ˜ธ ํ˜•ํƒœ๊ฐ€ ์˜ฌ๋ฐ”๋ฅด์ง€ ์•Š์Šต๋‹ˆ๋‹ค.") if MgtKey == None or MgtKey == "": raise PopbillException(-99999999, "๊ด€๋ฆฌ๋ฒˆํ˜ธ๊ฐ€ ์ž…๋ ฅ๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.") if FileID == None or FileID == "": raise PopbillException(-99999999, "ํŒŒ์ผ์•„์ด๋””๊ฐ€ ์ž…๋ ฅ๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค.") postData = '' return self._httppost('/Taxinvoice/' + MgtKeyType + "/" + MgtKey + "/Files/" + FileID, postData, CorpNum, UserID, 'DELETE')
java
public static AppIdNamespace parseEncodedAppIdNamespace( String encodedAppIdNamespace ) { if ( encodedAppIdNamespace == null ) { throw new IllegalArgumentException( "appIdNamespaceString may not be null" ); } int index = encodedAppIdNamespace.indexOf( NamespaceResources.NAMESPACE_SEPARATOR ); if ( index == -1 ) { return new AppIdNamespace( encodedAppIdNamespace, "" ); } String appId = encodedAppIdNamespace.substring( 0, index ); String namespace = encodedAppIdNamespace.substring( index + 1 ); if ( namespace.length() == 0 ) { throw new IllegalArgumentException( "encodedAppIdNamespace with empty namespace may not contain a '" + NamespaceResources.NAMESPACE_SEPARATOR + "'" ); } return new AppIdNamespace( appId, namespace ); }
java
public boolean validateContentSpec(final ContentSpec contentSpec, final String username) { boolean valid = preValidateContentSpec(contentSpec); if (!postValidateContentSpec(contentSpec, username)) { valid = false; } return valid; }
java
public EClass getGCFARC() { if (gcfarcEClass == null) { gcfarcEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(452); } return gcfarcEClass; }
java
private List<Alert> _getAlertsByOwner(String alertname, PrincipalUser owner, boolean populateMetaFieldsOnly) { List<Alert> result; if (alertname != null && !alertname.isEmpty()) { result = new ArrayList<>(); Alert alert = alertService.findAlertByNameAndOwner(alertname, owner); if (alert != null) { result.add(alert); } else { throw new WebApplicationException(Response.Status.NOT_FOUND.getReasonPhrase(), Response.Status.NOT_FOUND); } } else { if(owner.isPrivileged()) { result = populateMetaFieldsOnly ? alertService.findAllAlerts(true) : alertService.findAllAlerts(false); } else { result = populateMetaFieldsOnly ? alertService.findAlertsByOwner(owner, true): alertService.findAlertsByOwner(owner, false); } } return result; }
python
def from_base(cls, base, repo): """ Create a :class:`DXF` object which uses the same host, settings and session as an existing :class:`DXFBase` object. :param base: Existing :class:`DXFBase` object. :type base: :class:`DXFBase` :param repo: Name of the repository to access on the registry. Typically this is of the form ``username/reponame`` but for your own registries you don't actually have to stick to that. :type repo: str :returns: :class:`DXF` object which shares configuration and session with ``base`` but which can also be used to operate on the ``repo`` repository. :rtype: :class:`DXF` """ # pylint: disable=protected-access r = cls(base._host, repo, base._auth, base._insecure, base._auth_host, base._tlsverify) r._token = base._token r._headers = base._headers r._sessions = [base._sessions[0]] return r
java
public SVGPath relativeEllipticalArc(double rx, double ry, double ar, double la, double sp, double[] xy) { return append(PATH_ARC_RELATIVE).append(rx).append(ry).append(ar).append(la).append(sp).append(xy[0]).append(xy[1]); }
python
def _compute_distance_scaling(self, C, rrup, mag): """ Returns the distance scaling term """ rscale1 = rrup + C["c2"] * (10.0 ** (C["c3"] * mag)) return -np.log10(rscale1) - (C["c4"] * rrup)
python
def find_nic(nic_id=None, nic_mac_address=None, nic_name=None): """ find the NIC according nic id (prioritary) or name or mac_Address :rtype : object :param nic_id: the NIC id :param nic_mac_address: the NIC mac Address :param nic_name : name :return: found NIC or None if not found """ LOGGER.debug("NICService.find_nic") if (nic_id is None or not nic_id) and (nic_name is None or not nic_name) and \ (nic_mac_address is None or not nic_mac_address): raise exceptions.ArianeCallParametersError('id and name and mac_Address)') if (nic_id is not None and nic_id) and \ ((nic_name is not None and nic_name) or (nic_mac_address is not None and nic_mac_address)): LOGGER.warn('NICService.find_nic - Both id and (name or macAddress) are defined. ' 'Will give you search on id.') nic_name = None nic_mac_address = None if (nic_name is not None or nic_name) and (nic_mac_address is not None and nic_mac_address): LOGGER.warn('NICService.find_nic - Both name and mac address are defined. ' 'Will give you search on mac address.') nic_name = None params = None if nic_id is not None and nic_id: params = {'id': nic_id} elif nic_mac_address is not None and nic_mac_address: params = {'macAddress': nic_mac_address} elif nic_name is not None and nic_name: params = {'name': nic_name} ret = None if params is not None: args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params} response = NICService.requester.call(args) if response.rc == 0: ret = NIC.json_2_nic(response.response_content) elif response.rc != 404: err_msg = 'NICService.find_nic - Problem while finding NIC (id:' + str(nic_id) + \ ', name:' + str(nic_name) + \ ", mac address:" + str(nic_mac_address) \ + '. Reason: ' + str(response.response_content) + '-' + str(response.error_message) + \ " (" + str(response.rc) + ")" LOGGER.warning( err_msg ) return ret
java
@Override public long dynamicQueryCount(DynamicQuery dynamicQuery, Projection projection) { return commerceDiscountRulePersistence.countWithDynamicQuery(dynamicQuery, projection); }
python
def delete_node(self, node_id): """Removes the node identified by node_id from the graph.""" node = self.get_node(node_id) # Remove all edges from the node for e in node['edges']: self.delete_edge_by_id(e) # Remove all edges to the node edges = [edge_id for edge_id, edge in list(self.edges.items()) if edge['vertices'][1] == node_id] for e in edges: self.delete_edge_by_id(e) # Remove the node from the node list del self.nodes[node_id] self._num_nodes -= 1
java
@Override public CPDefinitionSpecificationOptionValue fetchByC_CSOVI( long CPDefinitionId, long CPDefinitionSpecificationOptionValueId, boolean retrieveFromCache) { Object[] finderArgs = new Object[] { CPDefinitionId, CPDefinitionSpecificationOptionValueId }; Object result = null; if (retrieveFromCache) { result = finderCache.getResult(FINDER_PATH_FETCH_BY_C_CSOVI, finderArgs, this); } if (result instanceof CPDefinitionSpecificationOptionValue) { CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue = (CPDefinitionSpecificationOptionValue)result; if ((CPDefinitionId != cpDefinitionSpecificationOptionValue.getCPDefinitionId()) || (CPDefinitionSpecificationOptionValueId != cpDefinitionSpecificationOptionValue.getCPDefinitionSpecificationOptionValueId())) { result = null; } } if (result == null) { StringBundler query = new StringBundler(4); query.append(_SQL_SELECT_CPDEFINITIONSPECIFICATIONOPTIONVALUE_WHERE); query.append(_FINDER_COLUMN_C_CSOVI_CPDEFINITIONID_2); query.append(_FINDER_COLUMN_C_CSOVI_CPDEFINITIONSPECIFICATIONOPTIONVALUEID_2); String sql = query.toString(); Session session = null; try { session = openSession(); Query q = session.createQuery(sql); QueryPos qPos = QueryPos.getInstance(q); qPos.add(CPDefinitionId); qPos.add(CPDefinitionSpecificationOptionValueId); List<CPDefinitionSpecificationOptionValue> list = q.list(); if (list.isEmpty()) { finderCache.putResult(FINDER_PATH_FETCH_BY_C_CSOVI, finderArgs, list); } else { CPDefinitionSpecificationOptionValue cpDefinitionSpecificationOptionValue = list.get(0); result = cpDefinitionSpecificationOptionValue; cacheResult(cpDefinitionSpecificationOptionValue); } } catch (Exception e) { finderCache.removeResult(FINDER_PATH_FETCH_BY_C_CSOVI, finderArgs); throw processException(e); } finally { closeSession(session); } } if (result instanceof List<?>) { return null; } else { return (CPDefinitionSpecificationOptionValue)result; } }
java
public void setField(String name, MLArray value, int m, int n) { setField(name, value, getIndex(m,n) ); }
python
def get(self, sid): """ Constructs a InstalledAddOnExtensionContext :param sid: The unique Extension Sid :returns: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext :rtype: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionContext """ return InstalledAddOnExtensionContext( self._version, installed_add_on_sid=self._solution['installed_add_on_sid'], sid=sid, )
java
protected boolean hasMessageBody(ClientHttpResponse response) throws IOException { HttpStatus responseStatus = response.getStatusCode(); if (responseStatus == HttpStatus.NO_CONTENT || responseStatus == HttpStatus.NOT_MODIFIED) { return false; } long contentLength = response.getHeaders().getContentLength(); return contentLength != 0; }
python
def convert(self, type_from, type_to, data): """Parsers data from with one format and composes with another. :param type_from: The unique name of the format to parse with :param type_to: The unique name of the format to compose with :param data: The text to convert """ try: return self.compose(type_to, self.parse(type_from, data)) except Exception as e: raise ValueError( "Couldn't convert '{from_}' to '{to}'. Possibly " "because the parser of '{from_}' generates a " "data structure incompatible with the composer " "of '{to}'. This is the original error: \n\n" "{error}: {message}".format(from_=type_from, to=type_to, error=e.__class__.__name__, message=e.message))
python
def pack_req(cls, modify_order_op, order_id, price, qty, adjust_limit, trd_env, acc_id, trd_mkt, conn_id): """Convert from user request for place order to PLS request""" from futuquant.common.pb.Trd_ModifyOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.orderID = int(order_id) req.c2s.modifyOrderOp = MODIFY_ORDER_OP_MAP[modify_order_op] req.c2s.forAll = False if modify_order_op == ModifyOrderOp.NORMAL: req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = adjust_limit != 0 req.c2s.adjustSideAndLimit = adjust_limit return pack_pb_req(req, ProtoId.Trd_ModifyOrder, conn_id, serial_no)
python
def add_device_items(self, item, device): """ Add the various items from the device to the node :param str item: item key :param dict device: dictionary containing items """ if item in ('aux', 'console'): self.node['properties'][item] = device[item] elif item.startswith('slot'): # if self.device_info['model'] == 'c7200': # if item != 'slot0': # self.node['properties'][item] = device[item] # else: self.node['properties'][item] = device[item] elif item == 'connections': self.connections = device[item] elif INTERFACE_RE.search(item) or VBQ_INT_RE.search(item): self.interfaces.append({'from': item, 'to': device[item]}) elif NUMBER_RE.search(item): if self.device_info['type'] == 'EthernetSwitch': self.calc_ethsw_port(item, device[item]) elif self.device_info['type'] == 'FrameRelaySwitch': self.calc_frsw_port(item, device[item]) elif MAPINT_RE.search(item): self.add_mapping((item, device[item])) elif item == 'cnfg': new_config = os.path.join('configs', 'i%s_startup-config.cfg' % self.node['id']) self.node['properties']['startup_config'] = new_config self.config.append({'old': fix_path(device[item]), 'new': new_config}) elif item.startswith('wic'): self.add_wic(item, device[item]) elif item == 'symbol': self.set_symbol(device[item]) elif item == 'nics': self.node['properties']['adapters'] = device[item] elif item == 'image': self.node['properties']['vmname'] = device[item] elif item == 'vbox_id' or item == 'qemu_id': self.node[item] = device[item]
java
public static SkbShellArgument[] newArgumentArray(SkbShellArgument ... args){ if(args==null){ return null; } Set<SkbShellArgument> ret = new HashSet<>(); for(SkbShellArgument arg : args){ if(arg!=null){ ret.add(arg); } } return ret.toArray(new SkbShellArgument[args.length]); }
java
private void createTasksForTable(Table table, Collection<IndexSnapshotRequestConfig.PartitionRanges> partitionRanges, Map<Integer, Long> pidToLocalHSIDs, AtomicInteger numTables, SnapshotRegistry.Snapshot snapshotRecord) { // no work on this node if (pidToLocalHSIDs.isEmpty()) { return; } // create a null data target final DevNullSnapshotTarget dataTarget = new DevNullSnapshotTarget(); final Runnable onClose = new TargetStatsClosure(dataTarget, table.getTypeName(), numTables, snapshotRecord); dataTarget.setOnCloseHandler(onClose); m_targets.add(dataTarget); // go over all local sites, create a task for each source site for (IndexSnapshotRequestConfig.PartitionRanges partitionRange : partitionRanges) { Long localHSId = pidToLocalHSIDs.get(partitionRange.partitionId); // The partition may not exist on this node. If so, keep calm and carry on if (localHSId != null) { // based on the source partition, the predicate is different final SnapshotTableTask task = new SnapshotTableTask(table, new SnapshotDataFilter[0], createIndexExpressionForTable(table, partitionRange.ranges), false); task.setTarget(dataTarget); placeTask(task, Arrays.asList(localHSId)); } } }
java
public void removeTableDef(TableDefinition tableDef) { assert tableDef != null; assert tableDef.getAppDef() == this; m_tableMap.remove(tableDef.getTableName()); }
python
def dumps(obj, mesh_filename=None, *args, **kwargs): # pylint: disable=unused-argument ''' obj: A dictionary mapping names to a 3-dimension array. mesh_filename: If provided, this value is included in the <DataFileName> attribute, which Meshlab doesn't seem to use. TODO Maybe reconstruct this using xml.etree ''' point_template = '<point x="%f" y="%f" z="%f" name="%s"/>\n' file_template = """ <!DOCTYPE PickedPoints> <PickedPoints> <DocumentData> <DateTime time="16:00:00" date="2014-12-31"/> <User name="bodylabs"/> <DataFileName name="%s"/> </DocumentData> %s </PickedPoints> """ from blmath.numerics import isnumericarray if not isinstance(obj, dict) or not all([isnumericarray(point) for point in obj.itervalues()]): raise ValueError('obj should be a dict of points') points = '\n'.join([point_template % (tuple(xyz) + (name,)) for name, xyz in obj.iteritems()]) return file_template % (mesh_filename, points)
java
public synchronized <CommandSubclass extends Command> void setupJacksonAnnotatedCommandSerializationAndDeserialization(Class<CommandSubclass> commandSubclassKlass) { checkState(!running); checkState(!initialized); checkState(!setupConversion); RaftRPC.setupJacksonAnnotatedCommandSerializationAndDeserialization(mapper, commandSubclassKlass); setupConversion = true; }
java
public void connectionClosed(ConnectionEvent event) { Object connection = event.getConnectionHandle(); removeConnection(connection); if (cm.getCachedConnectionManager() != null) { cm.getCachedConnectionManager().unregisterConnection(cm, this, connection); } if (connectionHandles.isEmpty() && !isEnlisted()) cm.returnConnectionListener(this, false); }
java
private void writeNewClassDesc(ObjectStreamClass classDesc) throws IOException { output.writeUTF(classDesc.getName()); output.writeLong(classDesc.getSerialVersionUID()); byte flags = classDesc.getFlags(); boolean externalizable = classDesc.isExternalizable(); if (externalizable) { if (protocolVersion == PROTOCOL_VERSION_1) { flags &= NOT_SC_BLOCK_DATA; } else { // Change for 1.2. Objects can be saved in old format // (PROTOCOL_VERSION_1) or in the 1.2 format (PROTOCOL_VERSION_2). flags |= SC_BLOCK_DATA; } } output.writeByte(flags); if ((SC_ENUM | SC_SERIALIZABLE) != classDesc.getFlags()) { writeFieldDescriptors(classDesc, externalizable); } else { // enum write no fields output.writeShort(0); } }
python
def createProduct(self, powerups): """ Create a new L{Product} instance which confers the given powerups. @type powerups: C{list} of powerup item types @rtype: L{Product} @return: The new product instance. """ types = [qual(powerup).decode('ascii') for powerup in powerups] for p in self.store.parent.query(Product): for t in types: if t in p.types: raise ValueError("%s is already included in a Product" % (t,)) return Product(store=self.store.parent, types=types)
java
public void trainOnInstanceImpl(Instance inst) { accumulatedError= Math.abs(this.prediction(inst)-inst.classValue())*inst.weight() + fadingFactor*accumulatedError; nError=inst.weight()+fadingFactor*nError; // Initialise Perceptron if necessary if (this.initialisePerceptron == true) { //Initialize numericAttributesIndex LinkedList<Integer> numericIndices= new LinkedList<Integer>(); for (int i = 0; i < inst.numAttributes(); i++) if(inst.attribute(i).isNumeric() && i!=inst.classIndex()) numericIndices.add(i); numericAttributesIndex=new int[numericIndices.size()]; int j=0; for(Integer index : numericIndices) numericAttributesIndex[j++]=index; this.fadingFactor=this.fadingFactorOption.getValue(); this.initialisePerceptron = false; // not in resetLearningImpl() because it needs Instance! this.weightAttribute = new double[numericAttributesIndex.length+1]; for (int i = 0; i < numericAttributesIndex.length+1; i++) { //if (inst.attribute(i).isNumeric()) weightAttribute[i] = 2 * this.classifierRandom.nextDouble() - 1; } // Update Learning Rate learningRatio = learningRatioOption.getValue(); this.learningRateDecay = learningRateDecayOption.getValue(); } // Update attribute statistics this.perceptronInstancesSeen+=inst.weight(); this.perceptronYSeen+=inst.weight(); for(int j = 0; j < numericAttributesIndex.length; j++) { int instAttIndex = modelAttIndexToInstanceAttIndex(numericAttributesIndex[j], inst); double value=inst.value(instAttIndex); perceptronattributeStatistics.addToValue(j, value*inst.weight()); squaredperceptronattributeStatistics.addToValue(j, value*value*inst.weight()); } double value=inst.classValue(); this.perceptronsumY += value*inst.weight(); this.squaredperceptronsumY += value * value*inst.weight(); if(constantLearningRatioDecayOption.isSet()==false){ learningRatio = learningRatioOption.getValue() / (1+ perceptronInstancesSeen*learningRateDecay); } //double prediction = this.updateWeights(inst,learningRatio); //accumulatedError= Math.abs(prediction-inst.classValue()) + fadingFactor*accumulatedError; this.updateWeights(inst,learningRatio); }
java
public static Map<QName, String> getAttributes(Node n) { Map<QName, String> map = new LinkedHashMap<QName, String>(); NamedNodeMap m = n.getAttributes(); if (m != null) { final int len = m.getLength(); for (int i = 0; i < len; i++) { Attr a = (Attr) m.item(i); map.put(getQName(a), a.getValue()); } } return map; }
java
public void setFile(Element el, String key, Resource value) { if (value != null && value.toString().length() > 0) el.setAttribute(key, value.getAbsolutePath()); }
python
def plot_item(self, funcname): """Plot item""" index = self.currentIndex() if self.__prepare_plot(): key = self.model.get_key(index) try: self.plot(key, funcname) except (ValueError, TypeError) as error: QMessageBox.critical(self, _( "Plot"), _("<b>Unable to plot data.</b>" "<br><br>Error message:<br>%s" ) % str(error))
java
public static<E> EntryList makeEntryList(final Iterable<E> entries, final StaticArrayEntry.GetColVal<E,ByteBuffer> getter, final StaticBuffer lastColumn, final int limit) { return StaticArrayEntryList.ofByteBuffer(new Iterable<E>() { @Override public Iterator<E> iterator() { return Iterators.filter(entries.iterator(),new FilterResultColumns<E>(lastColumn,limit,getter)); } },getter); }
java
@Override public <T> List<T> search(Name base, String filter, SearchControls controls, ContextMapper<T> mapper, DirContextProcessor processor) { assureReturnObjFlagSet(controls); ContextMapperCallbackHandler<T> handler = new ContextMapperCallbackHandler<T>(mapper); search(base, filter, controls, handler, processor); return handler.getList(); }
python
def ehh_decay(h, truncate=False): """Compute the decay of extended haplotype homozygosity (EHH) moving away from the first variant. Parameters ---------- h : array_like, int, shape (n_variants, n_haplotypes) Haplotype array. truncate : bool, optional If True, the return array will exclude trailing zeros. Returns ------- ehh : ndarray, float, shape (n_variants, ) EHH at successive variants from the first variant. """ # check inputs # N.B., ensure int8 so we can use cython optimisation h = HaplotypeArray(np.asarray(h), copy=False) if h.min() < 0: raise NotImplementedError('missing calls are not supported') # initialise n_variants = h.n_variants # number of rows, i.e., variants n_haplotypes = h.n_haplotypes # number of columns, i.e., haplotypes n_pairs = (n_haplotypes * (n_haplotypes - 1)) // 2 # compute the shared prefix length between all pairs of haplotypes spl = pairwise_shared_prefix_lengths(memoryview_safe(np.asarray(h))) # compute EHH by counting the number of shared prefixes extending beyond # each variant minlength = None if truncate else n_variants + 1 b = np.bincount(spl, minlength=minlength) c = np.cumsum(b[::-1])[:-1] ehh = (c / n_pairs)[::-1] return ehh
java
public static FieldCoordinates coordinates(String parentType, String fieldName) { assertValidName(parentType); assertValidName(fieldName); return new FieldCoordinates(parentType, fieldName); }
python
def list_l3_agent_hosting_routers(self, router, **_params): """Fetches a list of L3 agents hosting a router.""" return self.get((self.router_path + self.L3_AGENTS) % router, params=_params)
java
public static byte[] readAllBytes(java.nio.file.Path path) throws IOException { try (SeekableByteChannel channel = Files.newByteChannel(path); InputStream in = Channels.newInputStream(channel)) { long size = channel.size(); if (size > (long) MAX_BUFFER_SIZE) { throw new OutOfMemoryError("Required array size too large"); } return read(in, (int) size); } }
python
def follow(self, something, follow=True): """ๅ…ณๆณจ็”จๆˆทใ€้—ฎ้ข˜ใ€่ฏ้ข˜ๆˆ–ๆ”ถ่—ๅคน :param Author/Question/Topic something: ้œ€่ฆๅ…ณๆณจ็š„ๅฏน่ฑก :param bool follow: True-->ๅ…ณๆณจ๏ผŒFalse-->ๅ–ๆถˆๅ…ณๆณจ :return: ๆˆๅŠŸ่ฟ”ๅ›žTrue๏ผŒๅคฑ่ดฅ่ฟ”ๅ›žFalse :rtype: bool """ from .question import Question from .topic import Topic from .collection import Collection if isinstance(something, Author): if something.url == self.url: return False data = { '_xsrf': something.xsrf, 'method': ' follow_member' if follow else 'unfollow_member', 'params': json.dumps({'hash_id': something.hash_id}) } res = self._session.post(Follow_Author_Url, data=data) return res.json()['r'] == 0 elif isinstance(something, Question): data = { '_xsrf': something.xsrf, 'method': 'follow_question' if follow else 'unfollow_question', 'params': json.dumps({'question_id': str(something.qid)}) } res = self._session.post(Follow_Question_Url, data=data) return res.json()['r'] == 0 elif isinstance(something, Topic): data = { '_xsrf': something.xsrf, 'method': 'follow_topic' if follow else 'unfollow_topic', 'params': json.dumps({'topic_id': something.tid}) } res = self._session.post(Follow_Topic_Url, data=data) return res.json()['r'] == 0 elif isinstance(something, Collection): data = { '_xsrf': something.xsrf, 'favlist_id': something.cid } res = self._session.post( Follow_Collection_Url if follow else Unfollow_Collection_Url, data=data) return res.json()['r'] == 0 else: raise ValueError('argument something need to be ' 'zhihu.Author, zhihu.Question' ', Zhihu.Topic or Zhihu.Collection object.')
python
def validate(self, raw_data, **kwargs): """The raw_data is returned unchanged.""" super(DateTimeField, self).validate(raw_data, **kwargs) try: if isinstance(raw_data, datetime.datetime): self.converted = raw_data elif self.serial_format is None: # parse as iso8601 self.converted = parse(raw_data) else: self.converted = datetime.datetime.strptime(raw_data, self.serial_format) return raw_data except (ParseError, ValueError) as e: msg = self.messages['parse'] % dict(cls=self.__class__.__name__, data=raw_data, format=self.serial_format) raise ValidationException(msg, raw_data)
java
public void send(Address dst, Object obj) throws Exception { ch.send(dst, obj); }
python
def empty(self): """ Indicator whether DataFrame is empty. True if DataFrame is entirely empty (no items), meaning any of the axes are of length 0. Returns ------- bool If DataFrame is empty, return True, if not return False. See Also -------- Series.dropna DataFrame.dropna Notes ----- If DataFrame contains only NaNs, it is still not considered empty. See the example below. Examples -------- An example of an actual empty DataFrame. Notice the index is empty: >>> df_empty = pd.DataFrame({'A' : []}) >>> df_empty Empty DataFrame Columns: [A] Index: [] >>> df_empty.empty True If we only have NaNs in our DataFrame, it is not considered empty! We will need to drop the NaNs to make the DataFrame empty: >>> df = pd.DataFrame({'A' : [np.nan]}) >>> df A 0 NaN >>> df.empty False >>> df.dropna().empty True """ return any(len(self._get_axis(a)) == 0 for a in self._AXIS_ORDERS)
java
public static base_response restart(nitro_service client) throws Exception { dbsmonitors restartresource = new dbsmonitors(); return restartresource.perform_operation(client,"restart"); }
java
private static String[] decodeAuthAmqPlain(String response) { Logger logger = LoggerFactory.getLogger(SERVICE_AMQP_PROXY_LOGGER); String[] credentials = null; if ((response != null) && (response.trim().length() > 0)) { ByteBuffer buffer = ByteBuffer.wrap(response.getBytes()); @SuppressWarnings("unused") String loginKey = getShortString(buffer); @SuppressWarnings("unused") AmqpType ltype = getType(buffer); String username = getLongString(buffer); @SuppressWarnings("unused") String passwordKey = getShortString(buffer); @SuppressWarnings("unused") AmqpType ptype = getType(buffer); String password = getLongString(buffer); if (logger.isDebugEnabled()) { String s = ".decodeAuthAmqPlain(): Username = " + username; logger.debug(CLASS_NAME + s); } credentials = new String[] { username, password }; } return credentials; }
java
@Deprecated public void pushNotificationClickedEvent(final Bundle extras) { CleverTapAPI cleverTapAPI = weakReference.get(); if(cleverTapAPI == null){ Logger.d("CleverTap Instance is null."); } else { cleverTapAPI.pushNotificationClickedEvent(extras); } }
java
static Object wrapColor4(float red, float green, float blue, float alpha) { ByteBuffer temp = ByteBuffer.allocate(4 * 4); temp.putFloat(red); temp.putFloat(green); temp.putFloat(blue); temp.putFloat(alpha); temp.flip(); return s_wrapperProvider.wrapColor(temp, 0); }
java
public void setGroups(java.util.Collection<GroupIdentifier> groups) { if (groups == null) { this.groups = null; return; } this.groups = new com.amazonaws.internal.SdkInternalList<GroupIdentifier>(groups); }
java
public void getTraceSummaryLine(StringBuilder buff) { // Get the common fields for control messages super.getTraceSummaryLine(buff); buff.append(",reqeustID="); buff.append(getRequestID()); buff.append(",cardinality="); buff.append(getCardinality()); }
python
def title( self ): """ Returns the title for this scene based on its information. :return <str> """ if ( self.currentMode() == XCalendarScene.Mode.Day ): return self.currentDate().toString('dddd, MMMM dd, yyyy') elif ( self.currentMode() == XCalendarScene.Mode.Week ): title = nativestring(self.minimumDate().toString('dddd, MMMM dd')) title += ' - ' title += nativestring(self.maximumDate().toString('dddd, MMMM dd, yyyy')) return title elif ( self.currentMode() == XCalendarScene.Mode.Month ): return self.currentDate().toString('MMMM yyyy') else: return ''
python
def is_win_python35_or_earlier(): """ Convenience method to determine if the current platform is Windows and Python version 3.5 or earlier. Returns: bool: True if the current platform is Windows and the Python interpreter is 3.5 or earlier; False otherwise. """ return sys.platform.startswith("win") and sys.version_info.major < 3 or ( sys.version_info.major == 3 and sys.version_info.minor < 6)
python
def pts_rotate(pts=[], angle=[0.0], center=(0.0, 0.0)): '''Return given points rotated around a center point in N dimensions. Angle is list of rotation in radians for each pair of axis. ''' assert isinstance(pts, list) and len(pts) > 0 l_pt_prev = None for pt in pts: assert isinstance(pt, tuple) l_pt = len(pt) assert l_pt > 1 for i in pt: assert isinstance(i, float) if l_pt_prev is not None: assert l_pt == l_pt_prev l_pt_prev = l_pt assert isinstance(angle, list) l_angle = len(angle) assert l_angle == l_pt-1 for i in angle: assert isinstance(i, float) assert abs(i) <= 2*pi assert isinstance(center, tuple) assert len(center) == l_pt for i in center: assert isinstance(i, float) return [pt_rotate(pt, angle, center) for pt in pts]
java
@Override protected <T extends NodeInterface> Set<T> getRelatedNodesReverse(final SecurityContext securityContext, final NodeInterface obj, final Class destinationType, final Predicate<GraphObject> predicate) { Set<T> relatedNodes = new LinkedHashSet<>(); try { final Object source = relation.getSource().get(securityContext, obj, predicate); if (source != null) { if (source instanceof Iterable) { Iterable<T> nodes = (Iterable<T>)source; for (final T n : nodes) { relatedNodes.add(n); } } else { relatedNodes.add((T)source); } } } catch (Throwable t) { logger.warn("Unable to fetch related node: {}", t.getMessage()); } return relatedNodes; }
java
public EntityRole getPrebuiltEntityRole(UUID appId, String versionId, UUID entityId, UUID roleId) { return getPrebuiltEntityRoleWithServiceResponseAsync(appId, versionId, entityId, roleId).toBlocking().single().body(); }
java
public Observable<List<ApplicationInsightsComponentExportConfigurationInner>> createAsync(String resourceGroupName, String resourceName, ApplicationInsightsComponentExportRequest exportProperties) { return createWithServiceResponseAsync(resourceGroupName, resourceName, exportProperties).map(new Func1<ServiceResponse<List<ApplicationInsightsComponentExportConfigurationInner>>, List<ApplicationInsightsComponentExportConfigurationInner>>() { @Override public List<ApplicationInsightsComponentExportConfigurationInner> call(ServiceResponse<List<ApplicationInsightsComponentExportConfigurationInner>> response) { return response.body(); } }); }
java
@Override public void writeLong(long v) throws IOException { work[0] = (byte) (0xffL & v); work[1] = (byte) (0xffL & (v >> 8)); work[2] = (byte) (0xffL & (v >> 16)); work[3] = (byte) (0xffL & (v >> 24)); work[4] = (byte) (0xffL & (v >> 32)); work[5] = (byte) (0xffL & (v >> 40)); work[6] = (byte) (0xffL & (v >> 48)); work[7] = (byte) (0xffL & (v >> 56)); write(work, 0, 8); }
java
public static void applyToText(CharSequence charSequence, Typeface typeface) { applyToText(charSequence, typeface, 0, charSequence.length()); }
java
public static boolean waitForBundleStartup(BundleContext context, Bundle bundle, int secsToWait) { if ((bundle.getState() & Bundle.ACTIVE) == 0) { // Wait for it to start up! if (((bundle.getState() & Bundle.RESOLVED) != 0) || ((bundle.getState() & Bundle.INSTALLED) != 0)) { try { bundle.start(); } catch (BundleException e) { e.printStackTrace(); } } if ((bundle.getState() & Bundle.ACTIVE) == 0) { // Wait for it to start up! Thread thread = Thread.currentThread(); BundleStartupListener bundleStartupListener = null; context.addBundleListener(bundleStartupListener = new BundleStartupListener(thread, bundleContext, bundle)); // Wait 15 seconds for the ClassService to come up while the activator starts this service synchronized (thread) { try { thread.wait((secsToWait == -1) ? DEFAULT_SERVICE_WAIT_SECS * 1000 : secsToWait * 1000); // Will notify me when it is up } catch (InterruptedException ex) { ex.printStackTrace(); } } context.removeBundleListener(bundleStartupListener); } } return ((bundle.getState() & Bundle.ACTIVE) != 0); }
java
@Override public R visitTypeParameter(TypeParameterElement e, P p) { assert e.getKind() == TYPE_PARAMETER: "Bad kind on TypeParameterElement"; return defaultAction(e, p); }
python
def copytree(source_directory, destination_directory, ignore=None): """ Recursively copy the contents of a source directory into a destination directory. Both directories must exist. This function does not copy the root directory ``source_directory`` into ``destination_directory``. Since ``shutil.copytree(src, dst)`` requires ``dst`` not to exist, we cannot use for our purposes. Code adapted from http://stackoverflow.com/a/12686557 :param string source_directory: the source directory, already existing :param string destination_directory: the destination directory, already existing """ if os.path.isdir(source_directory): if not os.path.isdir(destination_directory): os.makedirs(destination_directory) files = os.listdir(source_directory) if ignore is not None: ignored = ignore(source_directory, files) else: ignored = set() for f in files: if f not in ignored: copytree( os.path.join(source_directory, f), os.path.join(destination_directory, f), ignore ) else: shutil.copyfile(source_directory, destination_directory)
java
@Override public Map<K, V> peekAll(final Iterable<? extends K> keys) { Map<K, CacheEntry<K, V>> map = new HashMap<K, CacheEntry<K, V>>(); for (K k : keys) { CacheEntry<K, V> e = execute(k, SPEC.peekEntry(k)); if (e != null) { map.put(k, e); } } return heapCache.convertCacheEntry2ValueMap(map); }
java
public void setHighlightSections(final boolean HIGHLIGHT) { if (null == highlightSections) { _highlightSections = HIGHLIGHT; fireTileEvent(REDRAW_EVENT); } else { highlightSections.set(HIGHLIGHT); } }
java
public static Class<?> rawTypeOf(Type type) { if (type instanceof Class) { return (Class) type; } else if (type instanceof ParameterizedType) { return (Class) ((ParameterizedType) type).getRawType(); } else { throw E.unexpected("type not recognized: %s", type); } }
python
def factor_schur(z, DPhival, G, A): M, N = G.shape P, N = A.shape """Multiplier for inequality constraints""" l = z[N+P:N+P+M] """Slacks""" s = z[N+P+M:] """Sigma matrix""" SIG = diags(l/s, 0) """Augmented Jacobian""" H = DPhival + mydot(G.T, mydot(SIG, G)) """Factor H""" LU_H = myfactor(H) """Compute H^{-1}A^{T}""" HinvAt = mysolve(LU_H, A.T) """Compute Schur complement AH^{-1}A^{T}""" S = mydot(A, HinvAt) """Factor Schur complement""" LU_S = myfactor(S) LU = (LU_S, LU_H) return LU
java
public static List<HostAndPort> splitToHostsAndPorts(String hostPortQuorumList) { // split an address hot String[] strings = StringUtils.getStrings(hostPortQuorumList); int len = 0; if (strings != null) { len = strings.length; } List<HostAndPort> list = new ArrayList<HostAndPort>(len); if (strings != null) { for (String s : strings) { list.add(HostAndPort.fromString(s.trim()).withDefaultPort(DEFAULT_PORT)); } } return list; }
java
public Set<String> getVisibleQueues(PermissionManager permissionManager) { Set<String> ret = new HashSet<String>(); for (QueueDefinition queueDefinition: m_queues) { if (permissionManager.hasPermission(FixedPermissions.ADMIN)) { ret.add(queueDefinition.getName()); continue; } if (!ret.contains(queueDefinition.getName())) { if (permissionManager.hasPermission(queueDefinition.getPermission()) || permissionManager.hasPermission(queueDefinition.getReadPermission())) { ret.add(queueDefinition.getName()); } } } return ret; }
java
private static void listAllOpenFileDescriptors(PrintWriter writer) throws IOException, InterruptedException { writer.println(); writer.println("All open files"); writer.println("=============="); File[] files = new File("/proc/self/fd").listFiles(); if (files != null) { for (File file : files) { try { writer.println(Util.resolveSymlink(file)); } catch (IOException e) { // If we fail to resolve the symlink, just print the file. writer.println(file.getCanonicalPath()); } } } }
python
def plugin_valid(self, filepath): """ checks to see if plugin ends with one of the approved extensions """ plugin_valid = False for extension in self.extensions: if filepath.endswith(".{}".format(extension)): plugin_valid = True break return plugin_valid
java
static PublicKey buildX509Key(AlgorithmId algid, BitArray key) throws IOException, InvalidKeyException { /* * Use the algid and key parameters to produce the ASN.1 encoding * of the key, which will then be used as the input to the * key factory. */ DerOutputStream x509EncodedKeyStream = new DerOutputStream(); encode(x509EncodedKeyStream, algid, key); X509EncodedKeySpec x509KeySpec = new X509EncodedKeySpec(x509EncodedKeyStream.toByteArray()); try { // Instantiate the key factory of the appropriate algorithm KeyFactory keyFac = KeyFactory.getInstance(algid.getName()); // Generate the public key return keyFac.generatePublic(x509KeySpec); } catch (NoSuchAlgorithmException e) { // Return generic X509Key with opaque key data (see below) } catch (InvalidKeySpecException e) { throw new InvalidKeyException(e.getMessage(), e); } /* * Try again using JDK1.1-style for backwards compatibility. */ String classname = ""; try { Properties props; String keytype; Provider sunProvider; sunProvider = Security.getProvider("SUN"); if (sunProvider == null) throw new InstantiationException(); classname = sunProvider.getProperty("PublicKey.X.509." + algid.getName()); if (classname == null) { throw new InstantiationException(); } Class keyClass = null; try { keyClass = Class.forName(classname); } catch (ClassNotFoundException e) { ClassLoader cl = ClassLoader.getSystemClassLoader(); if (cl != null) { keyClass = cl.loadClass(classname); } } Object inst = null; X509Key result; if (keyClass != null) inst = keyClass.newInstance(); if (inst instanceof X509Key) { result = (X509Key) inst; result.algid = algid; result.setKey(key); result.parseKeyBits(); return result; } } catch (ClassNotFoundException e) { } catch (InstantiationException e) { } catch (IllegalAccessException e) { // this should not happen. throw new IOException (classname + " [internal error]"); } X509Key result = new X509Key(algid, key); return result; }
java
public void write(char[] buf) throws IOException { if (writer != null) { writer.write(buf); } else { write(buf, 0, buf.length); } }
python
def collect_table_content(table_bboxes, elems): """ Returns a list of elements that are contained inside the corresponding supplied bbox. """ # list of table content chars table_contents = [[] for _ in range(len(table_bboxes))] prev_content = None prev_bbox = None for cid, c in enumerate(elems): # Annotations should not fall outside alone if isinstance(c, LTAnno): if prev_content is not None: prev_content.append(c) continue # Generally speaking table contents should be included sequentially # and we can avoid checking all tables for elems inside # Elements only need to intersect the bbox for table as some # formatting of fonts may result in slightly out of bbox text if prev_bbox is not None and intersect(prev_bbox, c.bbox): prev_content.append(c) continue # Search the rest of the tables for membership when done with # the current one for table_id, table_bbox in enumerate(table_bboxes): if intersect(table_bbox, c.bbox): prev_bbox = table_bbox prev_content = table_contents[table_id] prev_content.append(c) break return table_contents
java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); try { Field client = getClass().getDeclaredField("client"); client.setAccessible(true); client.set(this, buildClient(trustSelfSigned)); } catch (NoSuchFieldException e) { throw new InternalConfigurationException("Failed to set client fields", e); } catch (IllegalAccessException e) { throw new InternalConfigurationException("Failed to set client fields", e); } catch (SecurityException e) { throw new InternalConfigurationException("Failed to set client fields", e); } catch (IllegalArgumentException e) { throw new InternalConfigurationException("Failed to set client fields", e); } }
java
public InternalServerErrorException withErrorDetails(ErrorDetail... errorDetails) { if (this.errorDetails == null) { setErrorDetails(new java.util.ArrayList<ErrorDetail>(errorDetails.length)); } for (ErrorDetail ele : errorDetails) { this.errorDetails.add(ele); } return this; }
java
void outputComplementDirect(StringBuffer buf) { if (!surrogatesDirect && getContainsBmp() == NONE) buf.append("[\u0000-\uFFFF]"); else { buf.append("[^"); inClassOutputDirect(buf); buf.append(']'); } }
java
private ProposalResponse sendProposalSerially(TransactionRequest proposalRequest, Collection<Peer> peers) throws ProposalException { ProposalException lastException = new ProposalException("ProposalRequest failed."); for (Peer peer : peers) { proposalRequest.submitted = false; try { Collection<ProposalResponse> proposalResponses = sendProposal(proposalRequest, Collections.singletonList(peer)); if (proposalResponses.isEmpty()) { logger.warn(format("Proposal request to peer %s failed", peer)); } ProposalResponse proposalResponse = proposalResponses.iterator().next(); ChaincodeResponse.Status status = proposalResponse.getStatus(); if (status.getStatus() < 400) { return proposalResponse; } else if (status.getStatus() > 499) { // server error may work on other peer. lastException = new ProposalException(format("Channel %s got exception on peer %s %d. %s ", name, peer, status.getStatus(), proposalResponse.getMessage())); } else { // 400 to 499 throw new ProposalException(format("Channel %s got exception on peer %s %d. %s ", name, peer, status.getStatus(), proposalResponse.getMessage())); } } catch (Exception e) { lastException = new ProposalException(format("Channel %s failed proposal on peer %s %s", name, peer.getName(), e.getMessage()), e); logger.warn(lastException.getMessage()); } } throw lastException; }
python
def addPartsToVSLC( self, vslc_id, allele1_id, allele2_id, zygosity_id=None, allele1_rel=None, allele2_rel=None): """ Here we add the parts to the VSLC. While traditionally alleles (reference or variant loci) are traditionally added, you can add any node (such as sequence_alterations for unlocated variations) to a vslc if they are known to be paired. However, if a sequence_alteration's loci is unknown, it probably should be added directly to the GVC. :param vslc_id: :param allele1_id: :param allele2_id: :param zygosity_id: :param allele1_rel: :param allele2_rel: :return: """ # vslc has parts allele1/allele2 if allele1_id is not None: self.addParts(allele1_id, vslc_id, allele1_rel) if allele2_id is not None and allele2_id.strip() != '': self.addParts(allele2_id, vslc_id, allele2_rel) # figure out zygosity if it's not supplied if zygosity_id is None: if allele1_id == allele2_id: zygosity_id = self.globaltt['homozygous'] else: zygosity_id = self.globaltt['heterozygous'] if zygosity_id is not None: self.graph.addTriple(vslc_id, self.globaltt['has_zygosity'], zygosity_id) return
java
private SetterTarget findSetterTarget(String path) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { String[] _accessStack = path.split(REGEXP__CHAR_DOT); if (0 == _accessStack.length) { throw new IllegalArgumentException(path); } int _setterIndex = _accessStack.length - 1; Object _toChange = getObject(); for (int _index = 0; _index < _setterIndex; _index++) { String _getterName = computeGetterName(_accessStack[_index]); Method _getter = _toChange.getClass().getMethod(_getterName, (Class<?>[]) null); _toChange = _getter.invoke(_toChange, (Object[]) null); } String _setterName = computeSetterName(_accessStack[_setterIndex]); SetterTarget _target = new SetterTarget(_toChange, _setterName); return _target; }
java
public static MozuUrl deleteOrderItemUrl(String returnId, String returnItemId) { UrlFormatter formatter = new UrlFormatter("/api/commerce/returns/{orderId}/items/{orderItemId}?updatemode={updateMode}&version={version}"); formatter.formatUrl("returnId", returnId); formatter.formatUrl("returnItemId", returnItemId); return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ; }
python
def from_fp(self, file_pointer, comment_lead=['c']): """ Read a CNF formula from a file pointer. A file pointer should be specified as an argument. The only default argument is ``comment_lead``, which can be used for parsing specific comment lines. :param file_pointer: a file pointer to read the formula from. :param comment_lead: a list of characters leading comment lines :type file_pointer: file pointer :type comment_lead: list(str) Usage example: .. code-block:: python >>> with open('some-file.cnf', 'r') as fp: ... cnf1 = CNF() ... cnf1.from_fp(fp) >>> >>> with open('another-file.cnf', 'r') as fp: ... cnf2 = CNF(from_fp=fp) """ self.nv = 0 self.clauses = [] self.comments = [] comment_lead = tuple('p') + tuple(comment_lead) for line in file_pointer: line = line.strip() if line: if line[0] not in comment_lead: cl = [int(l) for l in line.split()[:-1]] self.nv = max([abs(l) for l in cl] + [self.nv]) self.clauses.append(cl) elif not line.startswith('p cnf '): self.comments.append(line)
python
def space_time_cluster(catalog, t_thresh, d_thresh): """ Cluster detections in space and time. Use to separate repeaters from other events. Clusters by distance first, then removes events in those groups that are at different times. :type catalog: obspy.core.event.Catalog :param catalog: Catalog of events to clustered :type t_thresh: float :param t_thresh: Maximum inter-event time threshold in seconds :type d_thresh: float :param d_thresh: Maximum inter-event distance in km :returns: list of :class:`obspy.core.event.Catalog` objects :rtype: list >>> from eqcorrscan.utils.clustering import space_time_cluster >>> from obspy.clients.fdsn import Client >>> from obspy import UTCDateTime >>> client = Client("https://earthquake.usgs.gov") >>> starttime = UTCDateTime("2002-01-01") >>> endtime = UTCDateTime("2002-02-01") >>> cat = client.get_events(starttime=starttime, endtime=endtime, ... minmagnitude=6) >>> groups = space_time_cluster(catalog=cat, t_thresh=86400, d_thresh=1000) """ initial_spatial_groups = space_cluster(catalog=catalog, d_thresh=d_thresh, show=False) # Need initial_spatial_groups to be lists at the moment initial_spatial_lists = [] for group in initial_spatial_groups: initial_spatial_lists.append(list(group)) # Check within these groups and throw them out if they are not close in # time. groups = [] for group in initial_spatial_lists: for master in group: for event in group: if abs(event.preferred_origin().time - master.preferred_origin().time) > t_thresh: # If greater then just put event in on it's own groups.append([event]) group.remove(event) groups.append(group) return [Catalog(group) for group in groups]
python
def allow_blank(self, form, name): """ Allow blank determines if the form might be completely empty. If it's empty it will result in a None as the saved value for the ForeignKey. """ if self.blank is not None: return self.blank model = form._meta.model field = model._meta.get_field(self.get_field_name(form, name)) return field.blank
python
def _parse_track_spread(self,d1,d2,interp=True,phys=False, simple=_USESIMPLE): """Determine the spread around the track""" if not hasattr(self,'_allErrCovs'): self._determine_stream_spread(simple=simple) okaySpreadR= ['r','vr','vt','z','vz','phi'] okaySpreadXY= ['x','y','z','vx','vy','vz'] okaySpreadLB= ['ll','bb','dist','vlos','pmll','pmbb'] #Determine which coordinate system we're in coord= [False,False,False] #R, XY, LB if d1.lower() in okaySpreadR and d2.lower() in okaySpreadR: coord[0]= True elif d1.lower() in okaySpreadXY and d2.lower() in okaySpreadXY: coord[1]= True elif d1.lower() in okaySpreadLB and d2.lower() in okaySpreadLB: coord[2]= True else: raise NotImplementedError("plotting the spread for coordinates from different systems not implemented yet ...") #Get the right 2D Jacobian indxDict= {} indxDict['r']= 0 indxDict['vr']= 1 indxDict['vt']= 2 indxDict['z']= 3 indxDict['vz']= 4 indxDict['phi']= 5 indxDictXY= {} indxDictXY['x']= 0 indxDictXY['y']= 1 indxDictXY['z']= 2 indxDictXY['vx']= 3 indxDictXY['vy']= 4 indxDictXY['vz']= 5 indxDictLB= {} indxDictLB['ll']= 0 indxDictLB['bb']= 1 indxDictLB['dist']= 2 indxDictLB['vlos']= 3 indxDictLB['pmll']= 4 indxDictLB['pmbb']= 5 if coord[0]: relevantCov= self._allErrCovs relevantDict= indxDict if phys:#apply scale factors tcov= copy.copy(relevantCov) scaleFac= numpy.array([self._ro,self._vo,self._vo, self._ro,self._vo,1.]) tcov*= numpy.tile(scaleFac,(6,1)) tcov*= numpy.tile(scaleFac,(6,1)).T relevantCov= tcov elif coord[1]: relevantCov= self._allErrCovsXY relevantDict= indxDictXY if phys:#apply scale factors tcov= copy.copy(relevantCov) scaleFac= numpy.array([self._ro,self._ro,self._ro, self._vo,self._vo,self._vo]) tcov*= numpy.tile(scaleFac,(6,1)) tcov*= numpy.tile(scaleFac,(6,1)).T relevantCov= tcov elif coord[2]: relevantCov= self._allErrCovsLBUnscaled relevantDict= indxDictLB indx0= numpy.array([[relevantDict[d1.lower()],relevantDict[d1.lower()]], [relevantDict[d2.lower()],relevantDict[d2.lower()]]]) indx1= numpy.array([[relevantDict[d1.lower()],relevantDict[d2.lower()]], [relevantDict[d1.lower()],relevantDict[d2.lower()]]]) cov= relevantCov[:,indx0,indx1] #cov contains all nTrackChunks covs if not interp: out= numpy.empty((self._nTrackChunks,2)) eigDir= numpy.array([1.,0.]) for ii in range(self._nTrackChunks): covEig= numpy.linalg.eig(cov[ii]) minIndx= numpy.argmin(covEig[0]) minEigvec= covEig[1][:,minIndx] #this is the direction of the transverse spread if numpy.sum(minEigvec*eigDir) < 0.: minEigvec*= -1. #Keep them pointing in the same direction out[ii]= minEigvec*numpy.sqrt(covEig[0][minIndx]) eigDir= minEigvec else: #We slerp the minor eigenvector and interpolate the eigenvalue #First store all of the eigenvectors on the track allEigval= numpy.empty(self._nTrackChunks) allEigvec= numpy.empty((self._nTrackChunks,2)) eigDir= numpy.array([1.,0.]) for ii in range(self._nTrackChunks): covEig= numpy.linalg.eig(cov[ii]) minIndx= numpy.argmin(covEig[0]) minEigvec= covEig[1][:,minIndx] #this is the direction of the transverse spread if numpy.sum(minEigvec*eigDir) < 0.: minEigvec*= -1. #Keep them pointing in the same direction allEigval[ii]= numpy.sqrt(covEig[0][minIndx]) allEigvec[ii]= minEigvec eigDir= minEigvec #Now interpolate where needed interpEigval=\ interpolate.InterpolatedUnivariateSpline(self._thetasTrack, allEigval,k=3) interpolatedEigval= interpEigval(self._interpolatedThetasTrack) #Interpolate in chunks interpolatedEigvec= numpy.empty((len(self._interpolatedThetasTrack), 2)) for ii in range(self._nTrackChunks-1): slerpOmega= numpy.arccos(numpy.sum(allEigvec[ii]*allEigvec[ii+1])) slerpts= (self._interpolatedThetasTrack-self._thetasTrack[ii])/\ (self._thetasTrack[ii+1]-self._thetasTrack[ii]) slerpIndx= (slerpts >= 0.)*(slerpts <= 1.) for jj in range(2): interpolatedEigvec[slerpIndx,jj]=\ (numpy.sin((1-slerpts[slerpIndx])*slerpOmega)*allEigvec[ii,jj] +numpy.sin(slerpts[slerpIndx]*slerpOmega)*allEigvec[ii+1,jj])/numpy.sin(slerpOmega) out= numpy.tile(interpolatedEigval.T,(2,1)).T*interpolatedEigvec if coord[2]: #if LB, undo rescalings that were applied before out[:,0]*= self._ErrCovsLBScale[relevantDict[d1.lower()]] out[:,1]*= self._ErrCovsLBScale[relevantDict[d2.lower()]] return (out[:,0],out[:,1])
python
def find_method(self, decl): """Find class method to call for declaration based on name.""" name = decl.name method = None try: method = getattr(self, u'do_{}'.format( (name).replace('-', '_'))) except AttributeError: if name.startswith('data-'): method = getattr(self, 'do_data_any') elif name.startswith('attr-'): method = getattr(self, 'do_attr_any') else: log(WARN, u'Missing method {}'.format( (name).replace('-', '_')).encode('utf-8')) if method: self.record_coverage_line(decl.source_line) return method else: return lambda x, y, z: None
java
public boolean barIsEmptyForAllVoices(Bar bar) { for (Object m_voice : m_voices) { Voice v = (Voice) m_voice; if (!v.barIsEmpty(bar)) return false; } return true; }
java
@ObjectiveCName("appendArrayGenericType:types:") public static void appendArrayGenericType(StringBuilder out, Type[] types) { if (types.length == 0) { return; } appendGenericType(out, types[0]); for (int i = 1; i < types.length; i++) { out.append(','); appendGenericType(out, types[i]); } }
python
def sparse_dot_product_attention(q, k, v, bi, use_map_fn, experts_params): """Sparse multihead self attention. Perform an approximation of the full multihead attention by dispatching the tokens using their keys/values. Thus the attention matrix are only computed each times on a subset of the tokens. Notes: * The function don't perform scaling here (multihead_attention does the /sqrt(depth)). * The padding should have been removed (so batch size should be 1 but length contains the elements from all different batches) * Right now, only self attention is supported so length_q and length_kv should be identical and the function will add triangular mask. * If bi.order is not None, The bias is added inside this function to prevent attention to the future. Args: q (tf.Tensor): Queries of shape [batch, heads, length_q, depth_k] k (tf.Tensor): Keys of shape [batch, heads, length_q, depth_k] v (tf.Tensor): Values of shape [batch, heads, length_kv, depth_v] bi (BatchInfo): Contains the batch coordinates and sequence order use_map_fn (bool): Use either tf.map_fn of python for loop to compute the heads separately experts_params (dict): Additional params for the local expert Returns: tf.Tensor: Approximation of Softmax(Q.K) * V, of shape [batch, heads, length_q, depth_v] """ batch_size, nb_heads, _, depth = common_layers.shape_list(q) @expert_utils.add_name_scope() def flatten_first_dims(x): """Reshape such that x is [num_heads, -1, depth].""" # Case 1: Either constant batch size of size 1 or batch already flattened if x.get_shape().as_list()[0] == 1: return tf.squeeze(x, axis=0) # Case 2: Flatten batch dimension x = tf.transpose(x, perm=[1, 0, 2, 3]) x = tf.reshape(x, [nb_heads, -1, depth]) return x def flatten_batch(x): if x is None: return x return expert_utils.flatten_all_but_last(x) q = flatten_first_dims(q) k = flatten_first_dims(k) v = flatten_first_dims(v) bi = BatchInfo( coordinates=flatten_batch(bi.coordinates), order=flatten_batch(bi.order), ) # Unstack heads list_q = tf.unstack(q) # list[tf.Tensor(shape=[batch * length, depth])] list_k = tf.unstack(k) list_v = tf.unstack(v) list_gates_q = [] list_gates_k = [] total_loss = 0.0 # There might be a more optimized way to compute all heads at once for single_q, single_k, _ in zip(list_q, list_k, list_v): # Each head get its own dispatcher lhs_gating = LshGating( depth=single_q.get_shape().as_list()[-1], **experts_params) list_gates_q.append(lhs_gating.get_gates(single_q)) list_gates_k.append(lhs_gating.get_gates(single_k)) gates_q = tf.stack(list_gates_q) gates_k = tf.stack(list_gates_k) # Process each head separately. v_out = map_fn_switch( lambda args: dot_product_single_head(bi=bi, *args), elems=(q, k, v, gates_q, gates_k), dtype=(tf.float32), parallel_iterations=2, use_map_fn=use_map_fn, ) # Restore original shape as expected by multihead_attention if isinstance(batch_size, int) and batch_size == 1: v_out = tf.expand_dims(v_out, axis=0) # Restore batch_size = 1 else: v_out = tf.reshape(v_out, [nb_heads, batch_size, -1, depth]) v_out = tf.transpose(v_out, [1, 0, 2, 3]) return v_out, total_loss / nb_heads
java
private SearchResult<String> searchObjectIds(String indexName, QueryBuilder queryBuilder, int start, int size, List<String> sortOptions, String docType) throws IOException { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(queryBuilder); searchSourceBuilder.from(start); searchSourceBuilder.size(size); if (sortOptions != null && !sortOptions.isEmpty()) { for (String sortOption : sortOptions) { SortOrder order = SortOrder.ASC; String field = sortOption; int index = sortOption.indexOf(":"); if (index > 0) { field = sortOption.substring(0, index); order = SortOrder.valueOf(sortOption.substring(index + 1)); } searchSourceBuilder.sort(new FieldSortBuilder(field).order(order)); } } // Generate the actual request to send to ES. SearchRequest searchRequest = new SearchRequest(indexName); searchRequest.types(docType); searchRequest.source(searchSourceBuilder); SearchResponse response = elasticSearchClient.search(searchRequest); List<String> result = new LinkedList<>(); response.getHits().forEach(hit -> result.add(hit.getId())); long count = response.getHits().getTotalHits(); return new SearchResult<>(count, result); }
python
def make_url(path, protocol=None, hosts=None): """Make an URL given a path, and optionally, a protocol and set of hosts to select from randomly. :param path: The Archive.org path. :type path: str :param protocol: (optional) The HTTP protocol to use. "https://" is used by default. :type protocol: str :param hosts: (optional) A set of hosts. A host will be chosen at random. The default host is "archive.org". :type hosts: iterable :rtype: str :returns: An Absolute URI. """ protocol = 'https://' if not protocol else protocol host = hosts[random.randrange(len(hosts))] if hosts else 'archive.org' return protocol + host + path.strip()
python
def comparable(self): """str: comparable representation of the path specification.""" string_parts = [] if self.data_stream: string_parts.append('data stream: {0:s}'.format(self.data_stream)) if self.inode is not None: string_parts.append('inode: {0:d}'.format(self.inode)) if self.location is not None: string_parts.append('location: {0:s}'.format(self.location)) return self._GetComparable(sub_comparable_string=', '.join(string_parts))
java
public static Container findContainerByIdOrByName( String name, DockerClient dockerClient ) { Container result = null; List<Container> containers = dockerClient.listContainersCmd().withShowAll( true ).exec(); for( Container container : containers ) { List<String> names = Arrays.asList( container.getNames()); // Docker containers are prefixed with '/'. // At least, those we created, since their parent is the Docker daemon. if( container.getId().equals( name ) || names.contains( "/" + name )) { result = container; break; } } return result; }
python
def top(self, topn, by='counts'): """ Get the top ``topn`` features in the :class:`.FeatureSet`\. Parameters ---------- topn : int Number of features to return. by : str (default: 'counts') How features should be sorted. Must be 'counts' or 'documentcounts'. Returns ------- list """ if by not in ['counts', 'documentCounts']: raise NameError('kwarg `by` must be "counts" or "documentCounts"') cvalues = getattr(self, by) order = argsort(list(cvalues.values()))[::-1][:topn] keys = list(cvalues.keys()) return [(self.index[keys[i]], cvalues[keys[i]]) for i in order]
python
def delete_group_action(model, request): """Delete group from database. """ try: groups = model.parent.backend uid = model.model.name del groups[uid] groups() model.parent.invalidate() except Exception as e: return { 'success': False, 'message': str(e) } localizer = get_localizer(request) message = localizer.translate(_( 'deleted_group', default='Deleted group from database' )) return { 'success': True, 'message': message }