language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public boolean quotesRequired(final String csvColumn, final CsvContext context, final CsvPreference preference) { return columnNumbers.contains(context.getColumnNumber()); }
python
def is_dicom(filename): '''returns Boolean of whether the given file has the DICOM magic number''' try: with open(filename) as f: d = f.read(132) return d[128:132]=="DICM" except: return False
java
@Nullable public VATCountryData getVATCountryData (@Nonnull final Locale aLocale) { ValueEnforcer.notNull (aLocale, "Locale"); final Locale aCountry = CountryCache.getInstance ().getCountry (aLocale); return m_aVATItemsPerCountry.get (aCountry); }
java
public static void combineWlpAndRaXmls(final String adapterName, RaConnector raConnector, WlpRaConnector wlpRaConnector) throws InvalidPropertyException, UnavailableException { raConnector.copyWlpSettings(wlpRaConnector); WlpRaResourceAdapter wlpResourceAdapter = wlpRaConnector.getResourceAdapter(); RaResourceAdapter raResourceAdapter = raConnector.getResourceAdapter(); if (wlpResourceAdapter != null) { raResourceAdapter.copyWlpSettings(wlpResourceAdapter); // process resourceadapter level config-property if (wlpResourceAdapter.getConfigProperties() != null) { List<WlpRaConfigProperty> wlpConfigProperties = wlpResourceAdapter.getConfigProperties(); for (WlpRaConfigProperty wlpConfigProperty : wlpConfigProperties) { if (wlpConfigProperty.addWlpPropertyToMetatype()) { if (raResourceAdapter.isConfigPropertyAlreadyDefined(wlpConfigProperty.getWlpPropertyName())) throw new InvalidPropertyException(Tr.formatMessage(tc, "J2CA9908.duplicate.copy", wlpConfigProperty.getWlpPropertyName(), adapterName)); else { RaConfigProperty property = new RaConfigProperty(); property.copyWlpSettings(wlpConfigProperty); raResourceAdapter.getConfigProperties().add(property); } } else { RaConfigProperty raConfigProperty = raResourceAdapter.getConfigPropertyById(wlpConfigProperty.getWlpPropertyName()); if (raConfigProperty == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9909.missing.matching.config.prop", wlpConfigProperty.getWlpPropertyName(), adapterName)); else raConfigProperty.copyWlpSettings(wlpConfigProperty); } } } // process resourceadapter level adminobject List<WlpRaAdminObject> wlpAdminObjects = wlpResourceAdapter.getAdminObjects(); if (wlpAdminObjects != null) { for (WlpRaAdminObject wlpAdminObject : wlpAdminObjects) { RaAdminObject raAdminObject = raResourceAdapter.getAdminObject(wlpAdminObject.getAdminObjectInterface(), wlpAdminObject.getAdminObjectClass()); if (raAdminObject == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9910.missing.matching.adminobject", wlpAdminObject.getAdminObjectInterface(), wlpAdminObject.getAdminObjectClass(), adapterName)); else { raAdminObject.copyWlpSettings(wlpAdminObject); if (wlpAdminObject.getConfigProperties() != null) { List<WlpRaConfigProperty> wlpConfigProperties = wlpAdminObject.getConfigProperties(); for (WlpRaConfigProperty wlpConfigProperty : wlpConfigProperties) { if (wlpConfigProperty.addWlpPropertyToMetatype()) { if (raAdminObject.isConfigPropertyAlreadyDefined(wlpConfigProperty.getWlpPropertyName())) throw new InvalidPropertyException(Tr.formatMessage(tc, "J2CA9908.duplicate.copy", wlpConfigProperty.getWlpPropertyName(), adapterName)); else { RaConfigProperty property = new RaConfigProperty(); property.copyWlpSettings(wlpConfigProperty); raAdminObject.getConfigProperties().add(property); } } else { RaConfigProperty raConfigProperty = raAdminObject.getConfigPropertyById(wlpConfigProperty.getWlpPropertyName()); if (raConfigProperty == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9909.missing.matching.config.prop", wlpConfigProperty.getWlpPropertyName(), adapterName)); else raConfigProperty.copyWlpSettings(wlpConfigProperty); } } } } } } // process outbound-resourceadapter WlpRaOutboundResourceAdapter wlpOutboundAdapter = wlpResourceAdapter.getOutboundResourceAdapter(); if (wlpOutboundAdapter != null) { RaOutboundResourceAdapter raOutboundAdapter = raResourceAdapter.getOutboundResourceAdapter(); if (wlpOutboundAdapter.getConnectionDefinitions() != null) { List<WlpRaConnectionDefinition> wlpConnectionDefinitions = wlpOutboundAdapter.getConnectionDefinitions(); for (WlpRaConnectionDefinition wlpConnectionDefinition : wlpConnectionDefinitions) { RaConnectionDefinition raConnectionDefinition = raOutboundAdapter.getConnectionDefinitionByInterface(wlpConnectionDefinition.getConnectionFactoryInterface()); if (raConnectionDefinition == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9911.missing.matching.type", "connection-definition", wlpConnectionDefinition.getConnectionFactoryInterface(), ConnectionDefinition.class.getSimpleName(), adapterName)); else { raConnectionDefinition.copyWlpSettings(wlpConnectionDefinition); if (wlpConnectionDefinition.getConfigProperties() != null) { List<WlpRaConfigProperty> wlpConfigProperties = wlpConnectionDefinition.getConfigProperties(); // process connection-definition config-property for (WlpRaConfigProperty wlpConfigProperty : wlpConfigProperties) { if (wlpConfigProperty.addWlpPropertyToMetatype()) { if (raConnectionDefinition.isConfigPropertyAlreadyDefined(wlpConfigProperty.getWlpPropertyName())) throw new InvalidPropertyException(Tr.formatMessage(tc, "J2CA9908.duplicate.copy", wlpConfigProperty.getWlpPropertyName(), adapterName)); else { RaConfigProperty property = new RaConfigProperty(); property.copyWlpSettings(wlpConfigProperty); raConnectionDefinition.getConfigProperties().add(property); } } else { RaConfigProperty raConfigProperty = raConnectionDefinition.getConfigPropertyById(wlpConfigProperty.getWlpPropertyName()); if (raConfigProperty == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9909.missing.matching.config.prop", wlpConfigProperty.getWlpPropertyName(), adapterName)); else raConfigProperty.copyWlpSettings(wlpConfigProperty); } } } } } } } // process inbound-resourceadapter WlpRaInboundResourceAdapter wlpInboundAdapter = wlpResourceAdapter.getInboundResourceAdapter(); if (wlpInboundAdapter != null) { RaInboundResourceAdapter raInboundAdapter = raResourceAdapter.getInboundResourceAdapter(); // process messageadapter WlpRaMessageAdapter wlpMessageAdapter = wlpInboundAdapter.getMessageAdapter(); if (wlpMessageAdapter != null) { if (wlpMessageAdapter.getMessageListeners() != null) { RaMessageAdapter raMessageAdapter = raInboundAdapter.getMessageAdapter(); List<WlpRaMessageListener> wlpMessageListeners = wlpMessageAdapter.getMessageListeners(); for (WlpRaMessageListener wlpMessageListener : wlpMessageListeners) { RaMessageListener raMessageListener = raMessageAdapter == null ? null : raMessageAdapter.getMessageListenerByType(wlpMessageListener.getMessageListenerType()); if (raMessageListener == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9911.missing.matching.type", "messagelistener", wlpMessageListener.getMessageListenerType(), Activation.class.getSimpleName(), adapterName)); else { raMessageListener.copyWlpSettings(wlpMessageListener); if (wlpMessageListener.getActivationSpec() != null) { WlpRaActivationSpec wlpActivationSpec = wlpMessageListener.getActivationSpec(); RaActivationSpec raActivationSpec = raMessageListener.getActivationSpec(); if (raActivationSpec == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9911.missing.matching.type", "activationspec", raMessageListener.getMessageListenerType(), Activation.class.getSimpleName(), adapterName)); else if (wlpActivationSpec.getConfigProperties() != null) { List<WlpRaConfigProperty> wlpConfigProperties = wlpActivationSpec.getConfigProperties(); for (WlpRaConfigProperty wlpConfigProperty : wlpConfigProperties) { if (wlpConfigProperty.addWlpPropertyToMetatype()) if (raActivationSpec.isConfigPropertyAlreadyDefined(wlpConfigProperty.getWlpPropertyName())) throw new InvalidPropertyException(Tr.formatMessage(tc, "J2CA9908.duplicate.copy", wlpConfigProperty.getWlpPropertyName(), adapterName)); else { RaConfigProperty property = new RaConfigProperty(); property.copyWlpSettings(wlpConfigProperty); raActivationSpec.getConfigProperties().add(property); } else { RaConfigProperty raConfigProperty = raActivationSpec.getConfigPropertyById(wlpConfigProperty.getWlpPropertyName()); if (raConfigProperty == null) throw new UnavailableException(Tr.formatMessage(tc, "J2CA9909.missing.matching.config.prop", wlpConfigProperty.getWlpPropertyName(), adapterName)); else raConfigProperty.copyWlpSettings(wlpConfigProperty); } } } } } } } } } } }
python
def from_iso_datetime(datetimestring, use_dateutil=True): """Parse an ISO8601-formatted datetime string and return a datetime object. Use dateutil's parser if possible and return a timezone-aware datetime. """ if not _iso8601_datetime_re.match(datetimestring): raise ValueError('Not a valid ISO8601-formatted datetime string') # Use dateutil's parser if possible if dateutil_available and use_dateutil: return parser.isoparse(datetimestring) else: # Strip off timezone info. return datetime.datetime.strptime(datetimestring[:19], '%Y-%m-%dT%H:%M:%S')
python
def main(): """ NAME plotdi_e.py DESCRIPTION plots equal area projection from dec inc data and cones of confidence (Fisher, kent or Bingham or bootstrap). INPUT FORMAT takes dec/inc as first two columns in space delimited file SYNTAX plotdi_e.py [command line options] OPTIONS -h prints help message and quits -i for interactive parameter entry -f FILE, sets input filename on command line -Fish plots unit vector mean direction, alpha95 -Bing plots Principal direction, Bingham confidence ellipse -Kent plots unit vector mean direction, confidence ellipse -Boot E plots unit vector mean direction, bootstrapped confidence ellipse -Boot V plots unit vector mean direction, distribution of bootstrapped means """ dist='F' # default distribution is Fisherian mode=1 title="" EQ={'eq':1} if len(sys.argv) > 0: if '-h' in sys.argv: # check if help is needed print(main.__doc__) sys.exit() # graceful quit if '-i' in sys.argv: # ask for filename file=input("Enter file name with dec, inc data: ") dist=input("Enter desired distrubution: [Fish]er, [Bing]ham, [Kent] [Boot] [default is Fisher]: ") if dist=="":dist="F" if dist=="Bing":dist="B" if dist=="Kent":dist="K" if dist=="Boot": type=input(" Ellipses or distribution of vectors? [E]/V ") if type=="" or type=="E": dist="BE" else: dist="BE" else: # if '-f' in sys.argv: ind=sys.argv.index('-f') file=sys.argv[ind+1] else: print('you must specify a file name') print(main.__doc__) sys.exit() if '-Bing' in sys.argv:dist='B' if '-Kent' in sys.argv:dist='K' if '-Boot' in sys.argv: ind=sys.argv.index('-Boot') type=sys.argv[ind+1] if type=='E': dist='BE' elif type=='V': dist='BV' EQ['bdirs']=2 pmagplotlib.plot_init(EQ['bdirs'],5,5) else: print(main.__doc__) sys.exit() pmagplotlib.plot_init(EQ['eq'],5,5) # # get to work f=open(file,'r') data=f.readlines() # DIs= [] # set up list for dec inc data DiRecs=[] pars=[] nDIs,rDIs,npars,rpars=[],[],[],[] mode =1 for line in data: # read in the data from standard input DiRec={} rec=line.split() # split each line on space to get records DIs.append((float(rec[0]),float(rec[1]),1.)) DiRec['dec']=rec[0] DiRec['inc']=rec[1] DiRec['direction_type']='l' DiRecs.append(DiRec) # split into two modes ppars=pmag.doprinc(DIs) # get principal directions for rec in DIs: angle=pmag.angle([rec[0],rec[1]],[ppars['dec'],ppars['inc']]) if angle>90.: rDIs.append(rec) else: nDIs.append(rec) if dist=='B': # do on whole dataset title="Bingham confidence ellipse" bpars=pmag.dobingham(DIs) for key in list(bpars.keys()): if key!='n':print(" ",key, '%7.1f'%(bpars[key])) if key=='n':print(" ",key, ' %i'%(bpars[key])) npars.append(bpars['dec']) npars.append(bpars['inc']) npars.append(bpars['Zeta']) npars.append(bpars['Zdec']) npars.append(bpars['Zinc']) npars.append(bpars['Eta']) npars.append(bpars['Edec']) npars.append(bpars['Einc']) if dist=='F': title="Fisher confidence cone" if len(nDIs)>3: fpars=pmag.fisher_mean(nDIs) print("mode ",mode) for key in list(fpars.keys()): if key!='n':print(" ",key, '%7.1f'%(fpars[key])) if key=='n':print(" ",key, ' %i'%(fpars[key])) mode+=1 npars.append(fpars['dec']) npars.append(fpars['inc']) npars.append(fpars['alpha95']) # Beta npars.append(fpars['dec']) isign=abs(fpars['inc']) / fpars['inc'] npars.append(fpars['inc']-isign*90.) #Beta inc npars.append(fpars['alpha95']) # gamma npars.append(fpars['dec']+90.) # Beta dec npars.append(0.) #Beta inc if len(rDIs)>3: fpars=pmag.fisher_mean(rDIs) print("mode ",mode) for key in list(fpars.keys()): if key!='n':print(" ",key, '%7.1f'%(fpars[key])) if key=='n':print(" ",key, ' %i'%(fpars[key])) mode+=1 rpars.append(fpars['dec']) rpars.append(fpars['inc']) rpars.append(fpars['alpha95']) # Beta rpars.append(fpars['dec']) isign=abs(fpars['inc']) / fpars['inc'] rpars.append(fpars['inc']-isign*90.) #Beta inc rpars.append(fpars['alpha95']) # gamma rpars.append(fpars['dec']+90.) # Beta dec rpars.append(0.) #Beta inc if dist=='K': title="Kent confidence ellipse" if len(nDIs)>3: kpars=pmag.dokent(nDIs,len(nDIs)) print("mode ",mode) for key in list(kpars.keys()): if key!='n':print(" ",key, '%7.1f'%(kpars[key])) if key=='n':print(" ",key, ' %i'%(kpars[key])) mode+=1 npars.append(kpars['dec']) npars.append(kpars['inc']) npars.append(kpars['Zeta']) npars.append(kpars['Zdec']) npars.append(kpars['Zinc']) npars.append(kpars['Eta']) npars.append(kpars['Edec']) npars.append(kpars['Einc']) if len(rDIs)>3: kpars=pmag.dokent(rDIs,len(rDIs)) print("mode ",mode) for key in list(kpars.keys()): if key!='n':print(" ",key, '%7.1f'%(kpars[key])) if key=='n':print(" ",key, ' %i'%(kpars[key])) mode+=1 rpars.append(kpars['dec']) rpars.append(kpars['inc']) rpars.append(kpars['Zeta']) rpars.append(kpars['Zdec']) rpars.append(kpars['Zinc']) rpars.append(kpars['Eta']) rpars.append(kpars['Edec']) rpars.append(kpars['Einc']) else: # assume bootstrap if dist=='BE': if len(nDIs)>5: BnDIs=pmag.di_boot(nDIs) Bkpars=pmag.dokent(BnDIs,1.) print("mode ",mode) for key in list(Bkpars.keys()): if key!='n':print(" ",key, '%7.1f'%(Bkpars[key])) if key=='n':print(" ",key, ' %i'%(Bkpars[key])) mode+=1 npars.append(Bkpars['dec']) npars.append(Bkpars['inc']) npars.append(Bkpars['Zeta']) npars.append(Bkpars['Zdec']) npars.append(Bkpars['Zinc']) npars.append(Bkpars['Eta']) npars.append(Bkpars['Edec']) npars.append(Bkpars['Einc']) if len(rDIs)>5: BrDIs=pmag.di_boot(rDIs) Bkpars=pmag.dokent(BrDIs,1.) print("mode ",mode) for key in list(Bkpars.keys()): if key!='n':print(" ",key, '%7.1f'%(Bkpars[key])) if key=='n':print(" ",key, ' %i'%(Bkpars[key])) mode+=1 rpars.append(Bkpars['dec']) rpars.append(Bkpars['inc']) rpars.append(Bkpars['Zeta']) rpars.append(Bkpars['Zdec']) rpars.append(Bkpars['Zinc']) rpars.append(Bkpars['Eta']) rpars.append(Bkpars['Edec']) rpars.append(Bkpars['Einc']) title="Bootstrapped confidence ellipse" elif dist=='BV': if len(nDIs)>5: pmagplotlib.plot_eq(EQ['eq'],nDIs,'Data') BnDIs=pmag.di_boot(nDIs) pmagplotlib.plot_eq(EQ['bdirs'],BnDIs,'Bootstrapped Eigenvectors') if len(rDIs)>5: BrDIs=pmag.di_boot(rDIs) if len(nDIs)>5: # plot on existing plots pmagplotlib.plot_di(EQ['eq'],rDIs) pmagplotlib.plot_di(EQ['bdirs'],BrDIs) else: pmagplotlib.plot_eq(EQ['eq'],rDIs,'Data') pmagplotlib.plot_eq(EQ['bdirs'],BrDIs,'Bootstrapped Eigenvectors') pmagplotlib.draw_figs(EQ) ans=input('s[a]ve, [q]uit ') if ans=='q':sys.exit() if ans=='a': files={} for key in list(EQ.keys()): files[key]='BE_'+key+'.svg' pmagplotlib.save_plots(EQ,files) sys.exit() if len(nDIs)>5: pmagplotlib.plot_conf(EQ['eq'],title,DiRecs,npars,1) if len(rDIs)>5 and dist!='B': pmagplotlib.plot_conf(EQ['eq'],title,[],rpars,0) elif len(rDIs)>5 and dist!='B': pmagplotlib.plot_conf(EQ['eq'],title,DiRecs,rpars,1) pmagplotlib.draw_figs(EQ) ans=input('s[a]ve, [q]uit ') if ans=='q':sys.exit() if ans=='a': files={} for key in list(EQ.keys()): files[key]=key+'.svg' pmagplotlib.save_plots(EQ,files)
java
@Override public int read(SegmentIndexBuffer sib, File sibFile) throws IOException { check(sibFile); RandomAccessFile raf = new RandomAccessFile(sibFile, "r"); FileChannel channel = raf.getChannel(); readVersion(channel); int length = sib.read(channel); length += STORAGE_VERSION_LENGTH; channel.close(); raf.close(); if(_logger.isTraceEnabled()) { _logger.trace("read " + sibFile.getAbsolutePath()); } return length; }
java
public String substringData(int offset, int count) throws DOMException { if ((offset < 0) || (count < 0)) { throw new DOMException(DOMException.INDEX_SIZE_ERR, null); } String data = getData(); if (offset > data.length()) { throw new DOMException(DOMException.INDEX_SIZE_ERR, null); } if (offset + count > data.length()) { return data.substring(offset); } else { return data.substring(offset, offset + count); } }
python
def batch_size(self, batch_size): """Limits the number of documents returned in one batch. Each batch requires a round trip to the server. It can be adjusted to optimize performance and limit data transfer. .. note:: batch_size can not override MongoDB's internal limits on the amount of data it will return to the client in a single batch (i.e if you set batch size to 1,000,000,000, MongoDB will currently only return 4-16MB of results per batch). Raises :exc:`TypeError` if `batch_size` is not an integer. Raises :exc:`ValueError` if `batch_size` is less than ``0``. :Parameters: - `batch_size`: The size of each batch of results requested. """ if not isinstance(batch_size, integer_types): raise TypeError("batch_size must be an integer") if batch_size < 0: raise ValueError("batch_size must be >= 0") self.__batch_size = batch_size == 1 and 2 or batch_size return self
java
protected static Charset getCharset(final Map<String, String> properties) { String charsetName = properties.get("charset"); try { return charsetName == null ? Charset.defaultCharset() : Charset.forName(charsetName); } catch (IllegalArgumentException ex) { InternalLogger.log(Level.ERROR, "Invalid charset: " + charsetName); return Charset.defaultCharset(); } }
java
public DataSetIterator getTrainIterator() { return new DataSetIterator() { @Override public DataSet next(int i) { throw new UnsupportedOperationException(); } @Override public List<String> getLabels() { return backedIterator.getLabels(); } @Override public int inputColumns() { return backedIterator.inputColumns(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public int totalOutcomes() { return backedIterator.totalOutcomes(); } @Override public boolean resetSupported() { return backedIterator.resetSupported(); } @Override public boolean asyncSupported() { return backedIterator.asyncSupported(); } @Override public void reset() { resetPending.set(true); } @Override public int batch() { return backedIterator.batch(); } @Override public void setPreProcessor(DataSetPreProcessor dataSetPreProcessor) { backedIterator.setPreProcessor(dataSetPreProcessor); } @Override public DataSetPreProcessor getPreProcessor() { return backedIterator.getPreProcessor(); } @Override public boolean hasNext() { if (resetPending.get()) { if (resetSupported()) { backedIterator.reset(); counter.set(0); resetPending.set(false); } else throw new UnsupportedOperationException("Reset isn't supported by underlying iterator"); } val state = backedIterator.hasNext(); if (state && counter.get() < numTrain) return true; else return false; } @Override public DataSet next() { counter.incrementAndGet(); val p = backedIterator.next(); if (counter.get() == 1 && firstTrain == null) { // first epoch ever, we'll save first dataset and will use it to check for equality later firstTrain = p.copy(); firstTrain.detach(); } else if (counter.get() == 1) { // epoch > 1, comparing first dataset to previously stored dataset. they should be equal int cnt = 0; if (!p.getFeatures().equalsWithEps(firstTrain.getFeatures(), 1e-5)) throw new ND4JIllegalStateException("First examples do not match. Randomization was used?"); } return p; } }; }
python
def has_privileged_access(executable): """ Check if an executable have the right to attach to Ethernet and TAP adapters. :param executable: executable path :returns: True or False """ if sys.platform.startswith("win"): # do not check anything on Windows return True if sys.platform.startswith("darwin"): if os.stat(executable).st_uid == 0: return True if os.geteuid() == 0: # we are root, so we should have privileged access. return True if os.stat(executable).st_uid == 0 and (os.stat(executable).st_mode & stat.S_ISUID or os.stat(executable).st_mode & stat.S_ISGID): # the executable has set UID bit. return True # test if the executable has the CAP_NET_RAW capability (Linux only) try: if sys.platform.startswith("linux") and "security.capability" in os.listxattr(executable): caps = os.getxattr(executable, "security.capability") # test the 2nd byte and check if the 13th bit (CAP_NET_RAW) is set if struct.unpack("<IIIII", caps)[1] & 1 << 13: return True except (AttributeError, OSError) as e: log.error("could not determine if CAP_NET_RAW capability is set for {}: {}".format(executable, e)) return False
python
def append(self, cls, infer_hidden: bool = False, **kwargs) -> Encoder: """ Extends sequence with new Encoder. 'dtype' gets passed into Encoder instance if not present in parameters and supported by specific Encoder type. :param cls: Encoder type. :param infer_hidden: If number of hidden should be inferred from previous encoder. :param kwargs: Named arbitrary parameters for Encoder. :return: Instance of Encoder. """ params = dict(kwargs) if infer_hidden: params['num_hidden'] = self.get_num_hidden() sig_params = inspect.signature(cls.__init__).parameters if 'dtype' in sig_params and 'dtype' not in kwargs: params['dtype'] = self.dtype encoder = cls(**params) self.encoders.append(encoder) return encoder
python
def visit_Name(self, node: ast.Name) -> Any: """Load the variable by looking it up in the variable look-up and in the built-ins.""" if not isinstance(node.ctx, ast.Load): raise NotImplementedError("Can only compute a value of Load on a name {}, but got context: {}".format( node.id, node.ctx)) result = None # type: Optional[Any] if node.id in self._name_to_value: result = self._name_to_value[node.id] if result is None and hasattr(builtins, node.id): result = getattr(builtins, node.id) if result is None and node.id != "None": # The variable refers to a name local of the lambda (e.g., a target in the generator expression). # Since we evaluate generator expressions with runtime compilation, None is returned here as a placeholder. return PLACEHOLDER self.recomputed_values[node] = result return result
java
public Observable<Page<RedisResourceInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<RedisResourceInner>>, Page<RedisResourceInner>>() { @Override public Page<RedisResourceInner> call(ServiceResponse<Page<RedisResourceInner>> response) { return response.body(); } }); }
java
public void stroke(Canvas canvas, Align align, float x, float y) { float sy = y + bounds.y(); for (TextLayout line : lines) { float sx = x + bounds.x() + align.getX(line.width(), textWidth()); canvas.strokeText(line, sx, sy); sy += line.ascent() + line.descent() + line.leading(); } }
java
@Override public synchronized void resolve(JingleSession session) throws XMPPException, SmackException, InterruptedException { this.setResolveInit(); for (TransportCandidate candidate : this.getCandidatesList()) { if (candidate instanceof ICECandidate) { ICECandidate iceCandidate = (ICECandidate) candidate; iceCandidate.removeCandidateEcho(); } } this.clear(); // Create a transport candidate for each ICE negotiator candidate we have. ICENegociator iceNegociator = negociatorsMap.get(server); for (Candidate candidate : iceNegociator.getSortedCandidates()) try { Candidate.CandidateType type = candidate.getCandidateType(); ICECandidate.Type iceType; if (type.equals(Candidate.CandidateType.ServerReflexive)) iceType = ICECandidate.Type.srflx; else if (type.equals(Candidate.CandidateType.PeerReflexive)) iceType = ICECandidate.Type.prflx; else if (type.equals(Candidate.CandidateType.Relayed)) iceType = ICECandidate.Type.relay; else iceType = ICECandidate.Type.host; // JBW/GW - 17JUL08: Figure out the zero-based NIC number for this candidate. short nicNum = 0; try { Enumeration<NetworkInterface> nics = NetworkInterface.getNetworkInterfaces(); short i = 0; NetworkInterface nic = NetworkInterface.getByInetAddress(candidate.getAddress().getInetAddress()); while (nics.hasMoreElements()) { NetworkInterface checkNIC = nics.nextElement(); if (checkNIC.equals(nic)) { nicNum = i; break; } i++; } } catch (SocketException e1) { LOGGER.log(Level.WARNING, "exeption", e1); } TransportCandidate transportCandidate = new ICECandidate(candidate.getAddress().getInetAddress().getHostAddress(), 1, nicNum, String.valueOf(random.nextInt(Integer.MAX_VALUE)), candidate.getPort(), "1", candidate.getPriority(), iceType); transportCandidate.setLocalIp(candidate.getBase().getAddress().getInetAddress().getHostAddress()); transportCandidate.setPort(getFreePort()); try { transportCandidate.addCandidateEcho(session); } catch (SocketException e) { LOGGER.log(Level.WARNING, "exception", e); } this.addCandidate(transportCandidate); LOGGER.fine("Candidate addr: " + candidate.getAddress().getInetAddress() + "|" + candidate.getBase().getAddress().getInetAddress() + " Priority:" + candidate.getPriority()); } catch (UtilityException e) { LOGGER.log(Level.WARNING, "exception", e); } catch (UnknownHostException e) { LOGGER.log(Level.WARNING, "exception", e); } // Get a Relay Candidate from XMPP Server if (RTPBridge.serviceAvailable(connection)) { // try { String localIp; int network; // JBW/GW - 17JUL08: ICENegotiator.getPublicCandidate() always returned null in JSTUN 1.7.0, and now the API doesn't exist in JSTUN 1.7.1 // if (iceNegociator.getPublicCandidate() != null) { // localIp = iceNegociator.getPublicCandidate().getBase().getAddress().getInetAddress().getHostAddress(); // network = iceNegociator.getPublicCandidate().getNetwork(); // } // else { { localIp = BridgedResolver.getLocalHost(); network = 0; } sid = random.nextInt(Integer.MAX_VALUE); RTPBridge rtpBridge = RTPBridge.getRTPBridge(connection, String.valueOf(sid)); TransportCandidate localCandidate = new ICECandidate( rtpBridge.getIp(), 1, network, String.valueOf(random.nextInt(Integer.MAX_VALUE)), rtpBridge.getPortA(), "1", 0, ICECandidate.Type.relay); localCandidate.setLocalIp(localIp); TransportCandidate remoteCandidate = new ICECandidate( rtpBridge.getIp(), 1, network, String.valueOf(random.nextInt(Integer.MAX_VALUE)), rtpBridge.getPortB(), "1", 0, ICECandidate.Type.relay); remoteCandidate.setLocalIp(localIp); localCandidate.setSymmetric(remoteCandidate); remoteCandidate.setSymmetric(localCandidate); localCandidate.setPassword(rtpBridge.getPass()); remoteCandidate.setPassword(rtpBridge.getPass()); localCandidate.setSessionId(rtpBridge.getSid()); remoteCandidate.setSessionId(rtpBridge.getSid()); localCandidate.setConnection(this.connection); remoteCandidate.setConnection(this.connection); addCandidate(localCandidate); // } // catch (UtilityException e) { // LOGGER.log(Level.WARNING, "exception", e); // } // catch (UnknownHostException e) { // LOGGER.log(Level.WARNING, "exception", e); // } // Get Public Candidate From XMPP Server // JBW/GW - 17JUL08 - ICENegotiator.getPublicCandidate() always returned null in JSTUN 1.7.0, and now it doesn't exist in JSTUN 1.7.1 // if (iceNegociator.getPublicCandidate() == null) { if (true) { String publicIp = RTPBridge.getPublicIP(connection); if (publicIp != null && !publicIp.equals("")) { Enumeration<NetworkInterface> ifaces = null; try { ifaces = NetworkInterface.getNetworkInterfaces(); } catch (SocketException e) { LOGGER.log(Level.WARNING, "exception", e); } // If detect this address in local machine, don't use it. boolean found = false; while (ifaces.hasMoreElements() && !false) { NetworkInterface iface = ifaces.nextElement(); Enumeration<InetAddress> iaddresses = iface.getInetAddresses(); while (iaddresses.hasMoreElements()) { InetAddress iaddress = iaddresses.nextElement(); if (iaddress.getHostAddress().indexOf(publicIp) > -1) { found = true; break; } } } if (!found) { try { TransportCandidate publicCandidate = new ICECandidate( publicIp, 1, 0, String.valueOf(random.nextInt(Integer.MAX_VALUE)), getFreePort(), "1", 0, ICECandidate.Type.srflx); publicCandidate.setLocalIp(InetAddress.getLocalHost().getHostAddress()); try { publicCandidate.addCandidateEcho(session); } catch (SocketException e) { LOGGER.log(Level.WARNING, "exception", e); } addCandidate(publicCandidate); } catch (UnknownHostException e) { LOGGER.log(Level.WARNING, "exception", e); } } } } } this.setResolveEnd(); }
java
public static <T extends Comparable<? super T>> List<T> sortInplace(List<T> list) { Collections.sort(list); return list; }
python
def rewind(self): '''rewind to start''' self._index = 0 self.percent = 0 self.messages = {} self._flightmode_index = 0 self._timestamp = None self.flightmode = None self.params = {}
java
public void warnf(String format, Object... params) { doLogf(Level.WARN, FQCN, format, params, null); }
python
def execute(self, input_data): ''' Okay this worker is going build graphs from PCAP Bro output logs ''' # Grab the Bro log handles from the input bro_logs = input_data['pcap_bro'] # Weird log if 'weird_log' in bro_logs: stream = self.workbench.stream_sample(bro_logs['weird_log']) self.weird_log_graph(stream) # HTTP log gsleep() stream = self.workbench.stream_sample(bro_logs['http_log']) self.http_log_graph(stream) # Files log gsleep() stream = self.workbench.stream_sample(bro_logs['files_log']) self.files_log_graph(stream) return {'output':'go to http://localhost:7474/browser and execute this query "match (s:origin), (t:file), p=allShortestPaths((s)--(t)) return p"'}
java
private void writeObject(java.io.ObjectOutputStream out) throws IOException { if (delegateMethods != null) { delegateMethods.clear(); } out.defaultWriteObject(); }
java
public IfcMedicalDeviceTypeEnum createIfcMedicalDeviceTypeEnumFromString(EDataType eDataType, String initialValue) { IfcMedicalDeviceTypeEnum result = IfcMedicalDeviceTypeEnum.get(initialValue); if (result == null) throw new IllegalArgumentException( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; }
java
private void processXMLResourceEnvRefs(List<? extends ResourceEnvRef> resourceEnvRefs) throws InjectionException { for (ResourceEnvRef resourceEnvRef : resourceEnvRefs) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "processing : " + resourceEnvRef); //If XML has previously been read and an injection binding with the same //jndi name has been created, get the current injection binding and merge //the new env entry into it. String jndiName = resourceEnvRef.getName(); InjectionBinding<Resource> injectionBinding = ivAllAnnotationsCollection.get(jndiName); if (injectionBinding != null) { ((ResourceInjectionBinding) injectionBinding).merge(resourceEnvRef); } else { Class<?> injectionType = null; // d367834.10 String injectionTypeName = null; String targetName = null; String targetClassName = null; String mappedName = resourceEnvRef.getMappedName(); String description = getDescription(resourceEnvRef); String lookup = resourceEnvRef.getLookupName(); // F743-21028.4 if (lookup != null) { lookup = lookup.trim(); } List<InjectionTarget> targets = resourceEnvRef.getInjectionTargets(); try { injectionTypeName = resourceEnvRef.getTypeName(); injectionType = loadTypeClass(injectionTypeName, jndiName); // d476227 d476227.1 if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "injectionType : " + injectionType); Resource resourceAnnotation = new ResourceImpl(jndiName, injectionType == null ? Object.class : injectionType, // d701306.1 mappedName, description, lookup); // F743-21028.4 injectionBinding = new ResourceInjectionBinding(resourceAnnotation, injectionTypeName, lookup, ResourceXMLType.RESOURCE_ENV_REF, ivNameSpaceConfig); // d479669 addInjectionBinding(injectionBinding); // Support multiple injection targets if (!targets.isEmpty()) { for (InjectionTarget target : targets) { targetClassName = target.getInjectionTargetClassName(); targetName = target.getInjectionTargetName(); // Would have been nice if an exception were thrown // here if the type was NOT one of the supported types // for resoruce-env-ref, but since that has not been done // historically, it cannot be done now without the // possibility of breaking an existing application. injectionBinding.addInjectionTarget(injectionType, targetName, targetClassName); } } } catch (Exception e) { FFDCFilter.processException(e, CLASS_NAME + ".processXMLResourceEnvRefs", "454", this, new Object[] { resourceEnvRef, jndiName, injectionBinding, targetName, targetClassName }); InjectionException icex; icex = new InjectionException("Failed to process the XML for " + "resource-env-ref " + resourceEnvRef, e); Tr.error(tc, "FAILED_TO_PROCESS_XML_CWNEN0032E", "resource-env-ref", resourceEnvRef); throw icex; } } } }
java
public void fieldsToControls() { super.fieldsToControls(); for (int iRowIndex = 0; iRowIndex < m_vComponentCache.size(); iRowIndex++) { ComponentCache componentCache = (ComponentCache)m_vComponentCache.elementAt(iRowIndex); if (componentCache != null) { // Move the data to the model for (int iColumnIndex = 0; iColumnIndex < componentCache.m_rgcompoments.length; iColumnIndex++) { this.fieldToData(componentCache, iRowIndex, iColumnIndex); } } } }
python
def detect_version(basedir, compiler=None, **compiler_attrs): """Compile, link & execute a test program, in empty directory `basedir`. The C compiler will be updated with any keywords given via setattr. Parameters ---------- basedir : path The location where the test program will be compiled and run compiler : str The distutils compiler key (e.g. 'unix', 'msvc', or 'mingw32') **compiler_attrs : dict Any extra compiler attributes, which will be set via ``setattr(cc)``. Returns ------- A dict of properties for zmq compilation, with the following two keys: vers : tuple The ZMQ version as a tuple of ints, e.g. (2,2,0) settings : dict The compiler options used to compile the test function, e.g. `include_dirs`, `library_dirs`, `libs`, etc. """ if compiler is None: compiler = get_default_compiler() cfile = pjoin(basedir, 'vers.cpp') shutil.copy(pjoin(os.path.dirname(__file__), 'vers.cpp'), cfile) # check if we need to link against Realtime Extensions library if sys.platform.startswith('linux'): cc = ccompiler.new_compiler(compiler=compiler) cc.output_dir = basedir if not cc.has_function('timer_create'): if 'libraries' not in compiler_attrs: compiler_attrs['libraries'] = [] compiler_attrs['libraries'].append('rt') cc = get_compiler(compiler=compiler, **compiler_attrs) efile = test_compilation(cfile, compiler=cc) patch_lib_paths(efile, cc.library_dirs) rc, so, se = get_output_error([efile]) if rc: msg = "Error running version detection script:\n%s\n%s" % (so,se) logging.error(msg) raise IOError(msg) handlers = {'vers': lambda val: tuple(int(v) for v in val.split('.'))} props = {} for line in (x for x in so.split('\n') if x): key, val = line.split(':') props[key] = handlers[key](val) return props
python
def set(self, name, value): """ Set the I{value} of a property by I{name}. The value is validated against the definition and set to the default when I{value} is None. @param name: The property name. @type name: str @param value: The new property value. @type value: any @return: self @rtype: L{Properties} """ self.provider(name).__set(name, value) return self
python
def _set_routing_system(self, v, load=False): """ Setter method for routing_system, mapped from YANG variable /routing_system (container) If this variable is read-only (config: false) in the source YANG file, then _set_routing_system is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_routing_system() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=routing_system.routing_system, is_container='container', presence=False, yang_name="routing-system", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'RUNNCFG_LEVEL_RBRIDGE'}}, namespace='urn:brocade.com:mgmt:brocade-common-def', defining_module='brocade-common-def', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """routing_system must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=routing_system.routing_system, is_container='container', presence=False, yang_name="routing-system", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'sort-priority': u'RUNNCFG_LEVEL_RBRIDGE'}}, namespace='urn:brocade.com:mgmt:brocade-common-def', defining_module='brocade-common-def', yang_type='container', is_config=True)""", }) self.__routing_system = t if hasattr(self, '_set'): self._set()
java
public static RepeatTests repeatDefault(String server) { return RepeatTests.with(mp20Features(server)) .andWith(ft20Features(server)); }
java
private static void scatter(SparseVector v, double[] z) { int[] index = v.getIndex(); int used = v.getUsed(); double[] data = v.getData(); Arrays.fill(z, 0); for (int i = 0; i < used; ++i) z[index[i]] = data[i]; }
python
def fmt_account(account, title=None): """Format an Account or a DirectedAccount.""" if title is None: title = account.__class__.__name__ # `Account` or `DirectedAccount` title = '{} ({} causal link{})'.format( title, len(account), '' if len(account) == 1 else 's') body = '' body += 'Irreducible effects\n' body += '\n'.join(fmt_ac_ria(m) for m in account.irreducible_effects) body += '\nIrreducible causes\n' body += '\n'.join(fmt_ac_ria(m) for m in account.irreducible_causes) return '\n' + header(title, body, under_char='*')
java
public void addListener(KeyMatcher matcher, VehicleMessage.Listener listener) { Log.i(TAG, "Adding listener " + listener + " to " + matcher); mNotifier.register(matcher, listener); }
python
def encrypters(self): """A ``set`` containing all key ids (if any) to which this message was encrypted.""" return set(m.encrypter for m in self._sessionkeys if isinstance(m, PKESessionKey))
python
def _convert(x, factor1, factor2): """ Converts mixing ratio x in comp1 - comp2 tie line to that in c1 - c2 tie line. Args: x (float): Mixing ratio x in comp1 - comp2 tie line, a float between 0 and 1. factor1 (float): Compositional ratio between composition c1 and processed composition comp1. E.g., factor for Composition('SiO2') and Composition('O') is 2.0. factor2 (float): Compositional ratio between composition c2 and processed composition comp2. Returns: Mixing ratio in c1 - c2 tie line, a float between 0 and 1. """ return x * factor2 / ((1-x) * factor1 + x * factor2)
python
def _set_vrrp_rbridge_global(self, v, load=False): """ Setter method for vrrp_rbridge_global, mapped from YANG variable /vrrp_rbridge_global (container) If this variable is read-only (config: false) in the source YANG file, then _set_vrrp_rbridge_global is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_vrrp_rbridge_global() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=vrrp_rbridge_global.vrrp_rbridge_global, is_container='container', presence=False, yang_name="vrrp-rbridge-global", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'vrrpv3GlobalConf', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """vrrp_rbridge_global must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=vrrp_rbridge_global.vrrp_rbridge_global, is_container='container', presence=False, yang_name="vrrp-rbridge-global", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'callpoint': u'vrrpv3GlobalConf', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)""", }) self.__vrrp_rbridge_global = t if hasattr(self, '_set'): self._set()
python
def _raise_exception(cls, reason, data=None): """ Raise aiohttp exception and pass payload/reason into it. """ text_dict = { "error": reason } if data is not None: text_dict["errors"] = data raise cls( text=json.dumps(text_dict), content_type="application/json" )
python
def probePlane(img, origin=(0, 0, 0), normal=(1, 0, 0)): """ Takes a ``vtkImageData`` and probes its scalars on a plane. .. hint:: |probePlane| |probePlane.py|_ """ plane = vtk.vtkPlane() plane.SetOrigin(origin) plane.SetNormal(normal) planeCut = vtk.vtkCutter() planeCut.SetInputData(img) planeCut.SetCutFunction(plane) planeCut.Update() cutActor = Actor(planeCut.GetOutput(), c=None) # ScalarVisibilityOn cutActor.mapper.SetScalarRange(img.GetPointData().GetScalars().GetRange()) return cutActor
java
public boolean startSession() { synchronized (sharedLock) { SessionData session = loadSession(); if (isSessionActive(session)) { sharedLock.notifyAll(); return false; } else { clearAll(); } sharedLock.notifyAll(); } return true; }
java
public void setCaseValue(com.google.api.ads.adwords.axis.v201809.cm.ProductDimension caseValue) { this.caseValue = caseValue; }
java
Map<String, Object> getCopyOfProperties() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "getCopyOfProperties"); Map<String, Object> temp = null; // Make sure no-one changes the properties underneath us. synchronized (properties) { temp = new HashMap<String, Object>(properties); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "getCopyOfProperties", temp); return temp; }
java
private BigDecimal getSunTrueLongitude(BigDecimal meanAnomaly) { BigDecimal sinMeanAnomaly = new BigDecimal(Math.sin(convertDegreesToRadians(meanAnomaly).doubleValue())); BigDecimal sinDoubleMeanAnomaly = new BigDecimal(Math.sin(multiplyBy(convertDegreesToRadians(meanAnomaly), BigDecimal.valueOf(2)) .doubleValue())); BigDecimal firstPart = meanAnomaly.add(multiplyBy(sinMeanAnomaly, new BigDecimal("1.916"))); BigDecimal secondPart = multiplyBy(sinDoubleMeanAnomaly, new BigDecimal("0.020")).add(new BigDecimal("282.634")); BigDecimal trueLongitude = firstPart.add(secondPart); if (trueLongitude.doubleValue() > 360) { trueLongitude = trueLongitude.subtract(BigDecimal.valueOf(360)); } return setScale(trueLongitude); }
java
public double getDoubleProperty(final String name, final double defaultValue) { final String prop = getStringProperty(name); if (prop != null) { try { return Double.parseDouble(prop); } catch (final Exception ignored) { return defaultValue; } } return defaultValue; }
python
def create_states_geo_zone(cls, states_geo_zone, **kwargs): """Create StatesGeoZone Create a new StatesGeoZone This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_states_geo_zone(states_geo_zone, async=True) >>> result = thread.get() :param async bool :param StatesGeoZone states_geo_zone: Attributes of statesGeoZone to create (required) :return: StatesGeoZone If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._create_states_geo_zone_with_http_info(states_geo_zone, **kwargs) else: (data) = cls._create_states_geo_zone_with_http_info(states_geo_zone, **kwargs) return data
python
def up_by_name(*filters, local_dir=".", remote_dir=DEFAULT_REMOTE_DIR, count=1): """Sync files whose filename attribute is highest in alphanumeric order""" remote_files = command.map_files_raw(remote_dir=remote_dir) local_files = list_local_files(*filters, local_dir=local_dir) greatest = sorted(local_files, key=lambda f: f.filename) to_sync = greatest[-count:] _notify_sync(Direction.up, to_sync) up_by_files(to_sync[::-1], remote_dir, remote_files)
java
public static ResourceReference getJQueryResourceReference() { ResourceReference reference; if (Application.exists()) { reference = Application.get().getJavaScriptLibrarySettings().getJQueryReference(); } else { reference = JQueryResourceReference.get(); } return reference; }
java
@Override public void onInitializeAccessibilityNodeInfo(@NonNull AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); if (Build.VERSION.SDK_INT >= 21) { info.addAction(AccessibilityNodeInfo.AccessibilityAction.ACTION_SCROLL_BACKWARD); info.addAction(AccessibilityNodeInfo.AccessibilityAction.ACTION_SCROLL_FORWARD); } else { info.addAction(AccessibilityNodeInfo.ACTION_SCROLL_FORWARD); info.addAction(AccessibilityNodeInfo.ACTION_SCROLL_BACKWARD); } }
python
def get_speaker_volume(self): """Return the volume setting of the speaker.""" if not self.camera_extended_properties: return None speaker = self.camera_extended_properties.get('speaker') if not speaker: return None return speaker.get('volume')
python
def find_discordant_snps( self, individual1, individual2, individual3=None, save_output=False ): """ Find discordant SNPs between two or three individuals. Parameters ---------- individual1 : Individual reference individual (child if `individual2` and `individual3` are parents) individual2 : Individual comparison individual individual3 : Individual other parent if `individual1` is child and `individual2` is a parent save_output : bool specifies whether to save output to a CSV file in the output directory Returns ------- pandas.DataFrame discordant SNPs and associated genetic data References ---------- ..[1] David Pike, "Search for Discordant SNPs in Parent-Child Raw Data Files," David Pike's Utilities, http://www.math.mun.ca/~dapike/FF23utils/pair-discord.php ..[2] David Pike, "Search for Discordant SNPs when given data for child and both parents," David Pike's Utilities, http://www.math.mun.ca/~dapike/FF23utils/trio-discord.php """ self._remap_snps_to_GRCh37([individual1, individual2, individual3]) df = individual1.snps # remove nulls for reference individual df = df.loc[df["genotype"].notnull()] # add SNPs shared with `individual2` df = df.join(individual2.snps["genotype"], rsuffix="2") genotype1 = "genotype_" + individual1.get_var_name() genotype2 = "genotype_" + individual2.get_var_name() if individual3 is None: df = df.rename(columns={"genotype": genotype1, "genotype2": genotype2}) # find discordant SNPs between reference and comparison individuals df = df.loc[ df[genotype2].notnull() & ( (df[genotype1].str.len() == 1) & (df[genotype2].str.len() == 1) & (df[genotype1] != df[genotype2]) ) | ( (df[genotype1].str.len() == 2) & (df[genotype2].str.len() == 2) & (df[genotype1].str[0] != df[genotype2].str[0]) & (df[genotype1].str[0] != df[genotype2].str[1]) & (df[genotype1].str[1] != df[genotype2].str[0]) & (df[genotype1].str[1] != df[genotype2].str[1]) ) ] if save_output: save_df_as_csv( df, self._output_dir, "discordant_snps_" + individual1.get_var_name() + "_" + individual2.get_var_name() + "_GRCh37.csv", ) else: # add SNPs shared with `individual3` df = df.join(individual3.snps["genotype"], rsuffix="3") genotype3 = "genotype_" + individual3.get_var_name() df = df.rename( columns={ "genotype": genotype1, "genotype2": genotype2, "genotype3": genotype3, } ) # find discordant SNPs between child and two parents df = df.loc[ ( df[genotype2].notnull() & ( (df[genotype1].str.len() == 1) & (df[genotype2].str.len() == 1) & (df[genotype1] != df[genotype2]) ) | ( (df[genotype1].str.len() == 2) & (df[genotype2].str.len() == 2) & (df[genotype1].str[0] != df[genotype2].str[0]) & (df[genotype1].str[0] != df[genotype2].str[1]) & (df[genotype1].str[1] != df[genotype2].str[0]) & (df[genotype1].str[1] != df[genotype2].str[1]) ) ) | ( df[genotype3].notnull() & ( (df[genotype1].str.len() == 1) & (df[genotype3].str.len() == 1) & (df[genotype1] != df[genotype3]) ) | ( (df[genotype1].str.len() == 2) & (df[genotype3].str.len() == 2) & (df[genotype1].str[0] != df[genotype3].str[0]) & (df[genotype1].str[0] != df[genotype3].str[1]) & (df[genotype1].str[1] != df[genotype3].str[0]) & (df[genotype1].str[1] != df[genotype3].str[1]) ) ) | ( df[genotype2].notnull() & df[genotype3].notnull() & (df[genotype2].str.len() == 2) & (df[genotype2].str[0] == df[genotype2].str[1]) & (df[genotype2] == df[genotype3]) & (df[genotype1] != df[genotype2]) ) ] if save_output: save_df_as_csv( df, self._output_dir, "discordant_snps_" + individual1.get_var_name() + "_" + individual2.get_var_name() + "_" + individual3.get_var_name() + "_GRCh37.csv", ) return df
python
def encrypt_file(src, dest, csv_keys): """Encrypt a file with the specific GPG keys and write out to the specified path""" keys = massage_keys(csv_keys.split(',')) cryptorito.encrypt(src, dest, keys)
python
def print_brokers(cluster_config, brokers): """Print the list of brokers that will be restarted. :param cluster_config: the cluster configuration :type cluster_config: map :param brokers: the brokers that will be restarted :type brokers: map of broker ids and host names """ print("Will restart the following brokers in {0}:".format(cluster_config.name)) for id, host in brokers: print(" {0}: {1}".format(id, host))
python
def register_for_deleted_resource(self, resource_id): """Registers for notification of a deleted resource. ``ResourceReceiver.deletedResources()`` is invoked when the specified resource is deleted or removed from this bin. arg: resource_id (osid.id.Id): the ``Id`` of the ``Resource`` to monitor raise: NullArgument - ``resource_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceNotificationSession.register_for_deleted_resource if not MONGO_LISTENER.receivers[self._ns][self._receiver]['d']: MONGO_LISTENER.receivers[self._ns][self._receiver]['d'] = [] if isinstance(MONGO_LISTENER.receivers[self._ns][self._receiver]['d'], list): MONGO_LISTENER.receivers[self._ns][self._receiver]['d'].append(resource_id.get_identifier())
python
def temp(self, name, templates=None, n_indents=None, skipping=False): """ Get specific template of chosen programming language. Parameters ---------- :param param name : string The key name of the template. :param param templates : string, default: None The template with placeholders. :param param n_indents : int, default: None The number of indentations. :param param skipping : bool, default: False Whether to skip the initial indentation. Returns ------- return : string The wanted template string. """ if templates is None: templates = self.TEMPLATES.get(self.target_language) keys = name.split('.') key = keys.pop(0).lower() template = templates.get(key, None) if template is not None: if isinstance(template, str): if n_indents is not None: template = self.indent(template, n_indents, skipping) return template else: keys = '.'.join(keys) return self.temp(keys, template, skipping=False) else: class_name = self.__class__.__name__ estimator_type = getattr(self, 'estimator_type') if \ hasattr(self, 'estimator_type') else 'classifier' path = join(dirname(__file__), 'estimator', estimator_type, class_name, 'templates', self.target_language, name + '.txt') if isfile(path): with open(path, 'r') as file_: template = file_.read() if n_indents is not None: template = self.indent(template, n_indents, skipping) return template else: err = "Template '{}' wasn't found.".format(name) raise AttributeError(err)
java
public String getRawWMIObjectOutput(String wmiClass) throws WMIException { String rawData; try { if (this.properties != null || this.filters != null) { rawData = getWMIStub().queryObject(wmiClass, this.properties, this.filters, this.namespace, this.computerName); } else { rawData = getWMIStub().listObject(wmiClass, this.namespace, this.computerName); } } catch (WMIException ex) { Logger.getLogger(WMI4Java.class.getName()).log(Level.SEVERE, GENERIC_ERROR_MSG, ex); throw new WMIException(ex); } return rawData; }
java
@Override public String getUserFacingMessage() { final StringBuilder bldr = new StringBuilder(); bldr.append("RESOLUTION FAILURE "); final String name = getName(); if (name != null) { bldr.append(" in "); bldr.append(name); } bldr.append("\n\treason: "); final String msg = getMessage(); if (msg != null) { bldr.append(msg); } else { bldr.append("Unknown"); } bldr.append("\n"); return bldr.toString(); }
java
public static Compressor getCompressor(byte code) { Compressor compressor = TYPE_COMPRESSOR_MAP.get(code); if (compressor == null) { throw new SofaRpcRuntimeException("Compressor Not Found :\"" + code + "\"!"); } return compressor; }
java
private void quicksort(short[] a, int[] indexes, int left, int right) { if (right <= left) return; int i = partition(a, indexes, left, right); quicksort(a, indexes, left, i - 1); quicksort(a, indexes, i + 1, right); }
python
def release_singleton(self): '''deletes the data that lets our program know if it is running as singleton when calling check_if_open, i.e check_if_open will return fals after calling this ''' with suppress(KeyError): del self.cfg['is_programming_running_info'] with suppress(FileNotFoundError, AttributeError): os.remove(self.check_file)
java
@Override protected void renderTagEnd() { try { if (StringUtils.isNotEmpty(this.output)) { this.output = toString(this.escape ? escape(this.output) : this.output); JspWriter writer = this.pageContext.getOut(); boolean renderTag = renderTag() && (StringUtils.isNotBlank(this.tagName) || StringUtils.isNotBlank(getClasses())); if (renderTag) { super.renderTagStart(); } writer.write(this.output); if (renderTag) { super.renderTagEnd(); } } } catch (IOException ioex) { LOG.error(ioex.getMessage(), ioex); } }
java
private void delete(Node node) { if (handHot == node) { handHot = node.next; } if (handCold == node) { handCold = node.next; } if (handTest == node) { handTest = node.next; } node.remove(); }
java
@Override public IfixResourceWritable parseFileToResource(File assetFile, File metadataFile, String contentUrl) throws RepositoryException { ArtifactMetadata artifactMetadata = explodeArtifact(assetFile, metadataFile); // Throw an exception if there is no metadata and properties, we get the name and readme from it if (artifactMetadata == null) { throw new RepositoryArchiveException("Unable to find sibling metadata zip for " + assetFile.getName() + " so do not have the required information", assetFile); } extractFiles(artifactMetadata); _jarPayload = assetFile; // Extract iFix xml file from iFix jar file ParserBase.ExtractedFileInformation xmlFileInfo = extractFileFromArchive(_jarPayload.getAbsolutePath(), ".*lib\\/fixes.*\\.xml"); IFixInfo ifixInfo; try { DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document doc = docBuilder.parse(xmlFileInfo.getExtractedFile()); ifixInfo = IFixInfo.fromDocument(doc); } catch (Exception e) { throw new RepositoryArchiveInvalidEntryException("Parse failure", xmlFileInfo.getSourceArchive(), xmlFileInfo.getSelectedPathFromArchive(), e); } // create asset and update with info from iFix jar file IfixResourceWritable resource = WritableResourceFactory.createIfix(null); resource.setName(getFixId(ifixInfo, xmlFileInfo)); resource.setDisplayPolicy(DisplayPolicy.HIDDEN); resource.setWebDisplayPolicy(DisplayPolicy.HIDDEN); // create the provider info and store in local asset resource.setProviderName("IBM"); // parse the jar manifest String appliesTo = parseManifestForAppliesTo(_jarPayload); // set the local extension information resource.setAppliesTo(appliesTo); resource.setProvideFix(getProvides(ifixInfo, xmlFileInfo)); // a list of fixed APARs // add the readme as an attachment resource.addAttachment(_readmePayload, AttachmentType.DOCUMENTATION); // Find the date with the most recent update date resource.setDate(getLatestDateOfAnyFile(ifixInfo, xmlFileInfo)); // add content and upload addContent(resource, _jarPayload, _jarPayload.getName(), artifactMetadata, contentUrl); return resource; }
java
@Override public int doEndTag() throws JspException { if (!scopeSpecified) { pageContext.removeAttribute(var); } else { pageContext.removeAttribute(var, scope); } return EVAL_PAGE; }
python
def _time_to_expiry(expires): """ Determines the seconds until a HTTP header "Expires" timestamp :param expires: HTTP response "Expires" header :return: seconds until "Expires" time """ try: expires_dt = datetime.strptime(str(expires), '%a, %d %b %Y %H:%M:%S %Z') delta = expires_dt - datetime.utcnow() return delta.seconds except ValueError: return 0
java
public boolean doLocalCriteria(StringBuffer strFilter, boolean bIncludeFileName, Vector<BaseField> vParamList) { // Default BaseListener return super.doLocalCriteria(strFilter, bIncludeFileName, vParamList); // If can't handle remote }
python
def _batch_json_to_instances(self, json_dicts: List[JsonDict]) -> List[Instance]: """ Converts a list of JSON objects into a list of :class:`~allennlp.data.instance.Instance`s. By default, this expects that a "batch" consists of a list of JSON blobs which would individually be predicted by :func:`predict_json`. In order to use this method for batch prediction, :func:`_json_to_instance` should be implemented by the subclass, or if the instances have some dependency on each other, this method should be overridden directly. """ instances = [] for json_dict in json_dicts: instances.append(self._json_to_instance(json_dict)) return instances
java
@Override public Collection<CRL> engineGetCRLs(CRLSelector selector) throws CertStoreException { if (selector == null) { Set<CRL> matches = new HashSet<>(); matchX509CRLs(new X509CRLSelector(), matches); matches.addAll(otherCRLs); return matches; } if (selector instanceof X509CRLSelector == false) { Set<CRL> matches = new HashSet<>(); matchX509CRLs(selector, matches); for (CRL crl : otherCRLs) { if (selector.match(crl)) { matches.add(crl); } } return matches; } if (crlIssuers.isEmpty()) { return Collections.<CRL>emptySet(); } X509CRLSelector x509Selector = (X509CRLSelector)selector; // see if the issuer is specified Collection<X500Principal> issuers = x509Selector.getIssuers(); if (issuers != null) { HashSet<CRL> matches = new HashSet<>(16); for (X500Principal issuer : issuers) { Object entry = crlIssuers.get(issuer); if (entry == null) { // empty } else if (entry instanceof X509CRL) { X509CRL crl = (X509CRL)entry; if (x509Selector.match(crl)) { matches.add(crl); } } else { // List // See crlIssuers javadoc. @SuppressWarnings("unchecked") List<X509CRL> list = (List<X509CRL>)entry; for (X509CRL crl : list) { if (x509Selector.match(crl)) { matches.add(crl); } } } } return matches; } // cannot use index, iterate all Set<CRL> matches = new HashSet<>(16); matchX509CRLs(x509Selector, matches); return matches; }
python
def recv_data(self): """ Grab the next frame and put it on the matrix. """ data, addr = self.sock.recvfrom(self.packetsize) matrix = map(ord, data.strip()) if len(matrix) == self.packetsize: self.matrix = matrix[:-4]
java
public void download(GenericUrl requestUrl, OutputStream outputStream) throws IOException { download(requestUrl, null, outputStream); }
python
def status(name, maximum=None, minimum=None, absolute=False, free=False): ''' Return the current disk usage stats for the named mount point name Disk mount or directory for which to check used space maximum The maximum disk utilization minimum The minimum disk utilization absolute By default, the utilization is measured in percentage. Set the `absolute` flag to use kilobytes. .. versionadded:: 2016.11.0 free By default, `minimum` & `maximum` refer to the amount of used space. Set to `True` to evaluate the free space instead. ''' # Monitoring state, no changes will be made so no test interface needed ret = {'name': name, 'result': False, 'comment': '', 'changes': {}, 'data': {}} # Data field for monitoring state # Validate extrema if maximum is not None: if not absolute: maximum, comment = _validate_int('maximum', maximum, [0, 100]) else: maximum, comment = _validate_int('maximum', maximum, strip='KB') ret['comment'] += comment if minimum is not None: if not absolute: minimum, comment = _validate_int('minimum', minimum, [0, 100]) else: minimum, comment = _validate_int('minimum', minimum, strip='KB') ret['comment'] += comment if minimum is not None and maximum is not None: if minimum >= maximum: ret['comment'] += 'minimum must be less than maximum ' if ret['comment']: return ret data = __salt__['disk.usage']() # Validate name if name not in data: ret['comment'] += ('Disk mount {0} not present. '.format(name)) return _status_path(name, ret, minimum, maximum, absolute, free) else: return _status_mount(name, ret, minimum, maximum, absolute, free, data)
java
public void marshall(RegisterPatchBaselineForPatchGroupRequest registerPatchBaselineForPatchGroupRequest, ProtocolMarshaller protocolMarshaller) { if (registerPatchBaselineForPatchGroupRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(registerPatchBaselineForPatchGroupRequest.getBaselineId(), BASELINEID_BINDING); protocolMarshaller.marshall(registerPatchBaselineForPatchGroupRequest.getPatchGroup(), PATCHGROUP_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def postcmd(self, stop: bool, line: str) -> bool: """Hook method executed just after a command dispatch is finished. :param stop: if True, the command has indicated the application should exit :param line: the command line text for this command :return: if this is True, the application will exit after this command and the postloop() will run """ """Override this so prompt always displays cwd.""" self._set_prompt() return stop
python
def _collapse_device(self, node, flat): """Collapse device hierarchy into a flat folder.""" items = [item for branch in node.branches for item in self._collapse_device(branch, flat) if item] show_all = not flat or self._quickmenu_actions == 'all' methods = node.methods if show_all else [ method for method in node.methods if method.method in self._quickmenu_actions ] if flat: items.extend(methods) else: items.append(MenuSection(None, methods)) return items
python
def get_field_info(model): """ Given a model class, returns a `FieldInfo` instance, which is a `namedtuple`, containing metadata about the various field types on the model including information about their relationships. """ opts = model._meta.concrete_model._meta pk = _get_pk(opts) fields = _get_fields(opts) forward_relations = _get_forward_relationships(opts) reverse_relations = _get_reverse_relationships(opts) fields_and_pk = _merge_fields_and_pk(pk, fields) relationships = _merge_relationships(forward_relations, reverse_relations) return FieldInfo(pk, fields, forward_relations, reverse_relations, fields_and_pk, relationships)
java
private Set<IGroupMember> toGroupMembers(List<String> groupNames, String fname) { final Set<IGroupMember> groups = new HashSet<>(); for (String groupName : groupNames) { // Assumes the groupName case matches the DB values. EntityIdentifier[] gs = GroupService.searchForGroups( groupName, IGroupConstants.SearchMethod.DISCRETE, IPerson.class); IGroupMember group; if (gs != null && gs.length > 0) { group = GroupService.findGroup(gs[0].getKey()); } else { // An actual group key might be specified, so try looking up group directly group = GroupService.findGroup(groupName); } if (group == null) { throw new IllegalArgumentException( "No group '" + groupName + "' found when importing portlet: " + fname); } groups.add(group); } return groups; }
python
def make_proxy_method(cls, name): """Creates a proxy function that can be used by Flasks routing. The proxy instantiates the Mocha subclass and calls the appropriate method. :param name: the name of the method to create a proxy for """ i = cls() view = getattr(i, name) for decorator in cls.decorators: view = decorator(view) @functools.wraps(view) def proxy(**forgettable_view_args): # Always use the global request object's view_args, because they # can be modified by intervening function before an endpoint or # wrapper gets called. This matches Flask's behavior. del forgettable_view_args if hasattr(i, "before_request"): response = i.before_request(name, **request.view_args) if response is not None: return response before_view_name = "before_" + name if hasattr(i, before_view_name): before_view = getattr(i, before_view_name) response = before_view(**request.view_args) if response is not None: return response response = view(**request.view_args) # You can also return a dict or None, it will pass it to render if isinstance(response, dict) or response is None: response = response or {} if hasattr(i, "_renderer"): response = i._renderer(response) else: _template = build_endpoint_route_name(cls, view.__name__) _template = utils.list_replace([".", ":"], "/", _template) _template = "%s.%s" % (_template, cls.template_markup) # Set the title from the nav title, if not set _meta_title = getattr(g, "__META__", {}).get("title") if (not _meta_title or _meta_title == "") and get_view_attr(view, "title"): page_attr(title=get_view_attr(view, "title")) response.setdefault("_template", _template) response = i.render(**response) if not isinstance(response, Response): response = make_response(response) for ext in cls._ext: response = ext(response) after_view_name = "after_" + name if hasattr(i, after_view_name): after_view = getattr(i, after_view_name) response = after_view(response) if hasattr(i, "after_request"): response = i.after_request(name, response) return response return proxy
java
public final BELScriptParser.set_statement_group_return set_statement_group() throws RecognitionException { BELScriptParser.set_statement_group_return retval = new BELScriptParser.set_statement_group_return(); retval.start = input.LT(1); Object root_0 = null; Token string_literal19=null; Token STATEMENT_GROUP_KEYWORD20=null; Token char_literal21=null; Token OBJECT_IDENT23=null; BELScriptParser.quoted_value_return quoted_value22 = null; Object string_literal19_tree=null; Object STATEMENT_GROUP_KEYWORD20_tree=null; Object char_literal21_tree=null; Object OBJECT_IDENT23_tree=null; paraphrases.push("in set statement group."); try { // BELScript.g:66:5: ( 'SET' STATEMENT_GROUP_KEYWORD '=' ( quoted_value | OBJECT_IDENT ) ) // BELScript.g:67:5: 'SET' STATEMENT_GROUP_KEYWORD '=' ( quoted_value | OBJECT_IDENT ) { root_0 = (Object)adaptor.nil(); string_literal19=(Token)match(input,24,FOLLOW_24_in_set_statement_group226); string_literal19_tree = (Object)adaptor.create(string_literal19); adaptor.addChild(root_0, string_literal19_tree); STATEMENT_GROUP_KEYWORD20=(Token)match(input,STATEMENT_GROUP_KEYWORD,FOLLOW_STATEMENT_GROUP_KEYWORD_in_set_statement_group228); STATEMENT_GROUP_KEYWORD20_tree = (Object)adaptor.create(STATEMENT_GROUP_KEYWORD20); adaptor.addChild(root_0, STATEMENT_GROUP_KEYWORD20_tree); char_literal21=(Token)match(input,25,FOLLOW_25_in_set_statement_group230); char_literal21_tree = (Object)adaptor.create(char_literal21); adaptor.addChild(root_0, char_literal21_tree); // BELScript.g:67:39: ( quoted_value | OBJECT_IDENT ) int alt4=2; int LA4_0 = input.LA(1); if ( (LA4_0==QUOTED_VALUE) ) { alt4=1; } else if ( (LA4_0==OBJECT_IDENT) ) { alt4=2; } else { NoViableAltException nvae = new NoViableAltException("", 4, 0, input); throw nvae; } switch (alt4) { case 1 : // BELScript.g:67:40: quoted_value { pushFollow(FOLLOW_quoted_value_in_set_statement_group233); quoted_value22=quoted_value(); state._fsp--; adaptor.addChild(root_0, quoted_value22.getTree()); } break; case 2 : // BELScript.g:67:55: OBJECT_IDENT { OBJECT_IDENT23=(Token)match(input,OBJECT_IDENT,FOLLOW_OBJECT_IDENT_in_set_statement_group237); OBJECT_IDENT23_tree = (Object)adaptor.create(OBJECT_IDENT23); adaptor.addChild(root_0, OBJECT_IDENT23_tree); } break; } } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); paraphrases.pop(); } catch (RecognitionException re) { reportError(re); recover(input,re); retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); } finally { } return retval; }
java
public Quaternionf rotateAxis(float angle, Vector3fc axis, Quaternionf dest) { return rotateAxis(angle, axis.x(), axis.y(), axis.z(), dest); }
java
public void setBucketLifeCycle(String bucketName, String lifeCycle) throws InvalidBucketNameException, NoSuchAlgorithmException, InsufficientDataException, IOException, InvalidKeyException, NoResponseException, XmlPullParserException, ErrorResponseException, InternalException,InvalidArgumentException { if ((lifeCycle == null) || "".equals(lifeCycle)) { throw new InvalidArgumentException("life cycle cannot be empty"); } Map<String, String> headerMap = new HashMap<>(); headerMap.put("Content-Length", Integer.toString(lifeCycle.length())); Map<String, String> queryParamMap = new HashMap<>(); queryParamMap.put("lifecycle", ""); HttpResponse response = executePut(bucketName, null, headerMap, queryParamMap, lifeCycle, 0); response.body().close(); }
python
def stops_when(iterable, condition): # type: (Iterable, Union[Callable, Any]) -> Iterable """Stop yielding items when a condition arise. Args: iterable: the iterable to filter. condition: if the callable returns True once, stop yielding items. If it's not a callable, it will be converted to one as `lambda condition: condition == item`. Example: >>> list(stops_when(range(10), lambda x: x > 5)) [0, 1, 2, 3, 4, 5] >>> list(stops_when(range(10), 7)) [0, 1, 2, 3, 4, 5, 6] """ if not callable(condition): cond_value = condition def condition(x): return x == cond_value return itertools.takewhile(lambda x: not condition(x), iterable)
python
def __find_caller(stack_info=False): """ Find the stack frame of the caller so that we can note the source file name, line number and function name. """ frame = logging.currentframe() # On some versions of IronPython, currentframe() returns None if # IronPython isn't run with -X:Frames. if frame: frame = frame.f_back caller_info = '(unknown file)', 0, '(unknown function)', None while hasattr(frame, 'f_code'): co = frame.f_code if _logone_src in os.path.normcase(co.co_filename): frame = frame.f_back continue tb_info = None if stack_info: with StringIO() as _buffer: _buffer.write('Traceback (most recent call last):\n') traceback.print_stack(frame, file=_buffer) tb_info = _buffer.getvalue().strip() caller_info = co.co_filename, frame.f_lineno, co.co_name, tb_info break return caller_info
java
private SourceRefRange findSourceRefRange(List<Reference> refList, InputId inputId) { checkNotNull(inputId); // TODO(bashir): We can do binary search here, but since this is fast enough // right now, we just do a linear search for simplicity. int lastBefore = -1; int firstAfter = refList.size(); int index = 0; checkState(inputOrder.containsKey(inputId), inputId.getIdName()); int sourceInputOrder = inputOrder.get(inputId); for (Reference ref : refList) { checkNotNull(ref.getInputId()); int order = inputOrder.get(ref.getInputId()); if (order < sourceInputOrder) { lastBefore = index; } else if (order > sourceInputOrder) { firstAfter = index; break; } index++; } return new SourceRefRange(refList, lastBefore, firstAfter); }
java
private static boolean setName(String ns, String name, Schema property) { boolean apply = false; final String cleanName = StringUtils.trimToNull(name); final String useName; if (!isEmpty(cleanName) && !cleanName.equals(((SchemaImpl) property).getName())) { useName = cleanName; apply = true; } else { useName = null; } final String cleanNS = StringUtils.trimToNull(ns); final String useNS; if (!isEmpty(cleanNS)) { useNS = cleanNS; apply = true; } else { useNS = null; } // Set everything or nothing if (apply) { getXml(property).name(useName).namespace(useNS); } return apply; }
java
public CertificateDescriptionInner createOrUpdate(String resourceGroupName, String resourceName, String certificateName) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, resourceName, certificateName).toBlocking().single().body(); }
java
public static List<SecStrucInfo> getSecStrucInfo(Structure s) { List<SecStrucInfo> listSSI = new ArrayList<SecStrucInfo>(); GroupIterator iter = new GroupIterator(s); while (iter.hasNext()) { Group g = iter.next(); if (g.hasAminoAtoms()) { Object p = g.getProperty(Group.SEC_STRUC); if (!(p == null)) { SecStrucInfo ss = (SecStrucInfo) p; listSSI.add(ss); } } } return listSSI; }
python
def write_external_data_tensors(model, filepath): # type: (ModelProto, Text) -> ModelProto """ Write external data of all tensors to files on disk. Note: This function also strips basepath information from all tensors' external_data fields. @params model: Model object which is the source of tensors to serialize. filepath: System path to the directory which should be treated as base path for external data. @return The modified model object. """ for tensor in _get_all_tensors(model): if uses_external_data(tensor): save_external_data(tensor, filepath) tensor.ClearField(str('raw_data')) return model
python
def convert_to_row_table(self, add_units=True): ''' Converts the block into row titled elements. These elements are copied into the return table, which can be much longer than the original block. Args: add_units: Indicates if units should be appened to each row item. Returns: A row-titled table representing the data in the block. ''' rtable = [] if add_units: relavent_units = self.get_relavent_units() # Create a row for each data element for row_index in range(self.start[0], self.end[0]): for column_index in range(self.start[1], self.end[1]): cell = self.table[row_index][column_index] if cell != None and isinstance(cell, (int, float, long)): titles = self._find_titles(row_index, column_index) titles.append(cell) if add_units: titles.append(relavent_units.get((row_index, column_index))) rtable.append(titles) # If we had all 'titles', just return the original block if not rtable: for row_index in range(self.start[0], self.end[0]): row = [] rtable.append(row) for column_index in range(self.start[1], self.end[1]): row.append(self.table[row_index][column_index]) if add_units: row.append(relavent_units.get((row_index, column_index))) return rtable
python
def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_files, stat_files, background_file, veto_file, veto_name, out_dir, tags=None): """ This function sets up exact match coincidence and background estimation using a folded interval technique. """ if tags is None: tags = [] make_analysis_dir(out_dir) logging.info('Setting up coincidence for injection') if len(hdfbank) > 1: raise ValueError('This coincidence method only supports a ' 'pregenerated template bank') hdfbank = hdfbank[0] if len(workflow.ifos) > 2: raise ValueError('This coincidence method only supports two-ifo searches') # Wall time knob and memory knob factor = int(workflow.cp.get_opt_tags('workflow-coincidence', 'parallelization-factor', tags)) ffiles = {} ifiles = {} for ifo, ffi in zip(*full_data_trig_files.categorize_by_attr('ifo')): ffiles[ifo] = ffi[0] ifos, files = inj_trig_files.categorize_by_attr('ifo') # ifos list is used later for ifo, ifi in zip(ifos, files): ifiles[ifo] = ifi[0] ifo0, ifo1 = ifos[0], ifos[1] combo = [(FileList([ifiles[ifo0], ifiles[ifo1]]), "injinj"), (FileList([ifiles[ifo0], ffiles[ifo1]]), "injfull"), (FileList([ifiles[ifo1], ffiles[ifo0]]), "fullinj"), ] bg_files = {'injinj':[], 'injfull':[], 'fullinj':[]} for trig_files, ctag in combo: findcoinc_exe = PyCBCFindCoincExecutable(workflow.cp, 'coinc', ifos=workflow.ifos, tags=tags + [ctag], out_dir=out_dir) for i in range(factor): group_str = '%s/%s' % (i, factor) coinc_node = findcoinc_exe.create_node(trig_files, hdfbank, stat_files, veto_file, veto_name, group_str, tags=[str(i)]) bg_files[ctag] += coinc_node.output_files workflow.add_node(coinc_node) return setup_statmap_inj(workflow, bg_files, background_file, hdfbank, out_dir, tags=tags)
java
public long nextLong(long origin, long bound) { if (origin >= bound) throw new IllegalArgumentException(BAD_RANGE); return internalNextLong(origin, bound); }
python
async def _flush(self, request: 'Request', stacks: List[Stack]): """ Perform the actual sending to platform. This is separated from `flush()` since it needs to be inside a middleware call. """ for stack in stacks: await self.platform.send(request, stack)
java
@SuppressWarnings("unchecked") public <S, D> TypeMap<S, D> get(Class<S> sourceType, Class<D> destinationType, String typeMapName) { TypeMap<S, D> typeMap = (TypeMap<S, D>) typeMaps.get(TypePair.of(sourceType, destinationType, typeMapName)); if (typeMap != null) return typeMap; for (TypePair<?, ?> typePair : getPrimitiveWrapperTypePairs(sourceType, destinationType, typeMapName)) { typeMap = (TypeMap<S, D>) typeMaps.get(typePair); if (typeMap != null) return typeMap; } return null; }
python
def load_tar_lzma_data(tlfile): """Load example sinogram data from a .tar.lzma file""" tmpname = extract_lzma(tlfile) # open tar file fields_real = [] fields_imag = [] phantom = [] parms = {} with tarfile.open(tmpname, "r") as t: members = t.getmembers() members.sort(key=lambda x: x.name) for m in members: n = m.name f = t.extractfile(m) if n.startswith("fdtd_info"): for ln in f.readlines(): ln = ln.decode() if ln.count("=") == 1: key, val = ln.split("=") parms[key.strip()] = float(val.strip()) elif n.startswith("phantom"): phantom.append(np.loadtxt(f)) elif n.startswith("field"): if n.endswith("imag.txt"): fields_imag.append(np.loadtxt(f)) elif n.endswith("real.txt"): fields_real.append(np.loadtxt(f)) try: os.remove(tmpname) except OSError: pass phantom = np.array(phantom) sino = np.array(fields_real) + 1j * np.array(fields_imag) angles = np.linspace(0, 2 * np.pi, sino.shape[0], endpoint=False) return sino, angles, phantom, parms
java
public static boolean incrementColexicographically( LongTuple t, LongTuple min, LongTuple max, MutableLongTuple result) { Utils.checkForEqualSize(t, min); Utils.checkForEqualSize(t, max); Utils.checkForEqualSize(t, result); if (result != t) { result.set(t); } return incrementColexicographically( result, min, max, 0); }
python
def restart(self, subthread=None): """Restarts the loop function Tries to restart the loop thread using the current thread. Raises RuntimeError if a previous call to Loop.start was not made. :param subthread: True/False value used when calling Loop.start(subthread=subthread). If set to None it uses the same value as the last call to Loop.start. """ if self._in_subthread is None: raise RuntimeError('A call to start must first be placed before restart') self.stop(silent=True) if subthread is None: subthread = self._in_subthread self.__init__(self.target, self.args, self.kwargs, self.on_stop) self.start(subthread=subthread)
python
def program_rtr_all_nwk_next_hop(self, tenant_id, rout_id, next_hop, excl_list): """Program the next hop for all networks of a tenant. """ namespace = self.find_rtr_namespace(rout_id) if namespace is None: LOG.error("Unable to find namespace for router %s", rout_id) return False net_list = self.get_network_by_tenant(tenant_id) for net in net_list: subnet_lst = self.get_subnets_for_net(net.get('id')) for subnet_elem in subnet_lst: subnet = subnet_elem.get('cidr').split('/')[0] subnet_and_mask = subnet_elem.get('cidr') if subnet not in excl_list: args = ['route', 'add', '-net', subnet_and_mask, 'gw', next_hop] ret = self.program_rtr(args, rout_id, namespace=namespace) if not ret: LOG.error("Program router returned error for %s", rout_id) return False return True
java
protected void addPartialInfo(ClassDoc cd, Content contentTree) { addPreQualifiedStrongClassLink(LinkInfoImpl.Kind.TREE, cd, contentTree); }
python
def _id(self): r""" The `SSHKey`'s ``id`` field, or if that is not defined, its ``fingerprint`` field. If neither field is defined, accessing this attribute raises a `TypeError`. """ if self.get("id") is not None: return self.id elif self.get("fingerprint") is not None: return self.fingerprint else: raise TypeError('SSHKey has neither .id nor .fingerprint')
python
def load(self): """ Extract tabular data as |TableData| instances from an Excel file. |spreadsheet_load_desc| :return: Loaded |TableData| iterator. |TableData| created for each sheet in the workbook. |load_table_name_desc| =================== ==================================== Format specifier Value after the replacement =================== ==================================== ``%(filename)s`` Filename of the workbook ``%(sheet)s`` Name of the sheet ``%(format_name)s`` ``"spreadsheet"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ==================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the header row is not found. :raises pytablereader.error.OpenError: If failed to open the source file. """ import xlrd self._validate() self._logger.logging_load() try: workbook = xlrd.open_workbook(self.source) except xlrd.biffh.XLRDError as e: raise OpenError(e) for worksheet in workbook.sheets(): self._worksheet = worksheet if self._is_empty_sheet(): continue self.__extract_not_empty_col_idx() try: start_row_idx = self._get_start_row_idx() except DataError: continue rows = [ self.__get_row_values(row_idx) for row_idx in range(start_row_idx + 1, self._row_count) ] self.inc_table_count() headers = self.__get_row_values(start_row_idx) yield TableData( self._make_table_name(), headers, rows, dp_extractor=self.dp_extractor, type_hints=self._extract_type_hints(headers), )
python
def _get_vispy_font_filename(face, bold, italic): """Fetch a remote vispy font""" name = face + '-' name += 'Regular' if not bold and not italic else '' name += 'Bold' if bold else '' name += 'Italic' if italic else '' name += '.ttf' return load_data_file('fonts/%s' % name)
java
@Override public StopFleetActionsResult stopFleetActions(StopFleetActionsRequest request) { request = beforeClientExecution(request); return executeStopFleetActions(request); }
java
public com.squareup.okhttp.Call getCharactersCharacterIdWalletJournalAsync(Integer characterId, String datasource, String ifNoneMatch, Integer page, String token, final ApiCallback<List<CharacterWalletJournalResponse>> callback) throws ApiException { com.squareup.okhttp.Call call = getCharactersCharacterIdWalletJournalValidateBeforeCall(characterId, datasource, ifNoneMatch, page, token, callback); Type localVarReturnType = new TypeToken<List<CharacterWalletJournalResponse>>() { }.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; }
python
def remove(self, handler_id=None): """Remove a previously added handler and stop sending logs to its sink. Parameters ---------- handler_id : |int| or ``None`` The id of the sink to remove, as it was returned by the |add| method. If ``None``, all handlers are removed. The pre-configured handler is guaranteed to have the index ``0``. Raises ------ ValueError If ``handler_id`` is not ``None`` but there is no active handler with such id. Examples -------- >>> i = logger.add(sys.stderr, format="{message}") >>> logger.info("Logging") Logging >>> logger.remove(i) >>> logger.info("No longer logging") """ with self._lock: handlers = self._handlers.copy() if handler_id is None: for handler in handlers.values(): handler.stop() handlers.clear() else: try: handler = handlers.pop(handler_id) except KeyError: raise ValueError("There is no existing handler with id '%s'" % handler_id) handler.stop() levelnos = (h.levelno for h in handlers.values()) self.__class__._min_level = min(levelnos, default=float("inf")) self.__class__._handlers = handlers