language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def mount_http_adapter(self, protocol=None, max_retries=None, status_forcelist=None, host=None): """Mount an HTTP adapter to the :class:`ArchiveSession <ArchiveSession>` object. :type protocol: str :param protocol: HTTP protocol to mount your adapter to (e.g. 'https://'). :type max_retries: int, object :param max_retries: The number of times to retry a failed request. This can also be an `urllib3.Retry` object. :type status_forcelist: list :param status_forcelist: A list of status codes (as int's) to retry on. :type host: str :param host: The host to mount your adapter to. """ protocol = protocol if protocol else self.protocol host = host if host else 'archive.org' if max_retries is None: max_retries = self.http_adapter_kwargs.get('max_retries', 3) if not status_forcelist: status_forcelist = [500, 501, 502, 503, 504] if max_retries and isinstance(max_retries, (int, float)): max_retries = Retry(total=max_retries, connect=max_retries, read=max_retries, redirect=False, method_whitelist=Retry.DEFAULT_METHOD_WHITELIST, status_forcelist=status_forcelist, backoff_factor=1) self.http_adapter_kwargs['max_retries'] = max_retries max_retries_adapter = HTTPAdapter(**self.http_adapter_kwargs) # Don't mount on s3.us.archive.org, only archive.org! # IA-S3 requires a more complicated retry workflow. self.mount('{0}//{1}'.format(protocol, host), max_retries_adapter)
python
def cli(env): """Summary info about tickets.""" mask = ('openTicketCount, closedTicketCount, ' 'openBillingTicketCount, openOtherTicketCount, ' 'openSalesTicketCount, openSupportTicketCount, ' 'openAccountingTicketCount') account = env.client['Account'].getObject(mask=mask) table = formatting.Table(['Status', 'count']) nested = formatting.Table(['Type', 'count']) nested.add_row(['Accounting', account['openAccountingTicketCount']]) nested.add_row(['Billing', account['openBillingTicketCount']]) nested.add_row(['Sales', account['openSalesTicketCount']]) nested.add_row(['Support', account['openSupportTicketCount']]) nested.add_row(['Other', account['openOtherTicketCount']]) nested.add_row(['Total', account['openTicketCount']]) table.add_row(['Open', nested]) table.add_row(['Closed', account['closedTicketCount']]) env.fout(table)
java
@Override public void setTrafficClass(int trafficClass) { try { channel.socket().setTrafficClass(trafficClass); } catch (SocketException e) { throw new RuntimeIoException(e); } }
java
@NonNull public EmailIntentBuilder cc(@NonNull String cc) { checkEmail(cc); this.cc.add(cc); return this; }
java
public WrappedByteBuffer put(byte[] v, int offset, int length) { _autoExpand(length); for (int i = 0; i < length; i++) { _buf.put(v[offset + i]); } return this; }
python
def poll_for_server_running(job_id): """ Poll for the job to start running and post the SERVER_READY_TAG. """ sys.stdout.write('Waiting for server in {0} to initialize ...'.format(job_id)) sys.stdout.flush() desc = dxpy.describe(job_id) # Keep checking until the server has begun or it has failed. while(SERVER_READY_TAG not in desc['tags'] and desc['state'] != 'failed'): time.sleep(SLEEP_PERIOD) sys.stdout.write('.') sys.stdout.flush() desc = dxpy.describe(job_id) # If the server job failed, provide friendly advice. if desc['state'] == 'failed': msg = RED('Error:') + ' Server failed to run.\n' msg += 'You may want to check the job logs by running:' msg += BOLD('dx watch {0}'.format(job_id)) err_exit(msg)
java
public void parameterizeChannel(Channel channel) { if (this.ordering != null) { channel.setLocalStrategy(LocalStrategy.SORT, this.ordering.getInvolvedIndexes(), this.ordering.getFieldSortDirections()); } else if (this.groupedFields != null) { boolean[] dirs = new boolean[this.groupedFields.size()]; Arrays.fill(dirs, true); channel.setLocalStrategy(LocalStrategy.SORT, Utils.createOrderedFromSet(this.groupedFields), dirs); } }
java
public com.google.appengine.v1.ScriptHandlerOrBuilder getScriptOrBuilder() { if (handlerTypeCase_ == 3) { return (com.google.appengine.v1.ScriptHandler) handlerType_; } return com.google.appengine.v1.ScriptHandler.getDefaultInstance(); }
python
def strip_tashkeel(text): """Strip vowels from a text, include Shadda. The striped marks are : - FATHA, DAMMA, KASRA - SUKUN - SHADDA - FATHATAN, DAMMATAN, KASRATAN, , , . @param text: arabic text. @type text: unicode. @return: return a striped text. @rtype: unicode. """ if not text: return text elif is_vocalized(text): for char in TASHKEEL: text = text.replace(char, '') return text
java
public void marshall(AuthenticationResultType authenticationResultType, ProtocolMarshaller protocolMarshaller) { if (authenticationResultType == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(authenticationResultType.getAccessToken(), ACCESSTOKEN_BINDING); protocolMarshaller.marshall(authenticationResultType.getExpiresIn(), EXPIRESIN_BINDING); protocolMarshaller.marshall(authenticationResultType.getTokenType(), TOKENTYPE_BINDING); protocolMarshaller.marshall(authenticationResultType.getRefreshToken(), REFRESHTOKEN_BINDING); protocolMarshaller.marshall(authenticationResultType.getIdToken(), IDTOKEN_BINDING); protocolMarshaller.marshall(authenticationResultType.getNewDeviceMetadata(), NEWDEVICEMETADATA_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public void deletePersistentAttributes() { if (persistenceAdapter == null) { throw new IllegalStateException("Attempting to delete persistence attributes without a configured persistence adapter"); } if (persistenceAttributesSet) { persistenceAdapter.deleteAttributes(requestEnvelope); persistentAttributes = null; persistenceAttributesSet = false; } }
python
def _is_catalysis(bpe): """Return True if the element is Catalysis.""" if isinstance(bpe, _bp('Catalysis')) or \ isinstance(bpe, _bpimpl('Catalysis')): return True else: return False
java
public TRMFacade getTRMFacade() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getTRMFacade", this); // Instantiate DA manager to interface to WLM if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getTRMFacade",_trmFacade); return _trmFacade; }
java
@Override protected void initializeImpl(Map<String,String> configuration) { //create configuration holder ConfigurationHolder configurationHolder=new ConfigurationHolderImpl(configuration); this.fileContentParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.FILE_CONTENT_PARAMETER_NAME_PROPERTY_KEY); if(this.fileContentParameter==null) { this.fileContentParameter="file"; } this.fileNameParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.FILE_NAME_PARAMETER_NAME_PROPERTY_KEY); if(this.fileNameParameter==null) { this.fileNameParameter="filename"; } this.priorityParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.PRIORITY_PARAMETER_NAME_PROPERTY_KEY); if(this.priorityParameter==null) { this.priorityParameter="priority"; } this.targetAddressParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.TARGET_ADDRESS_PARAMETER_NAME_PROPERTY_KEY); if(this.targetAddressParameter==null) { this.targetAddressParameter="targetaddress"; } this.targetNameParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.TARGET_NAME_PARAMETER_NAME_PROPERTY_KEY); if(this.targetNameParameter==null) { this.targetNameParameter="targetname"; } this.senderNameParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.SENDER_NAME_PARAMETER_NAME_PROPERTY_KEY); if(this.senderNameParameter==null) { this.senderNameParameter="sendername"; } this.senderFaxNumberParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.SENDER_FAX_NUMBER_PARAMETER_NAME_PROPERTY_KEY); if(this.senderFaxNumberParameter==null) { this.senderFaxNumberParameter="senderfaxnumber"; } this.senderEMailParameter=configurationHolder.getConfigurationValue(HTTPRequestParserConfigurationConstants.SENDER_EMAIL_PARAMETER_NAME_PROPERTY_KEY); if(this.senderEMailParameter==null) { this.senderEMailParameter="senderemail"; } }
python
def runExperiment(args): """ Run experiment. What did you think this does? args is a dict representing the parameters. We do it this way to support multiprocessing. args contains one or more of the following keys: @param featureNoise (float) Noise level to add to the features during inference. Default: None @param locationNoise (float) Noise level to add to the locations during inference. Default: None @param numObjects (int) The number of objects we will train. Default: 10 @param numPoints (int) The number of points on each object. Default: 10 @param numLocations (int) For each point, the number of locations to choose from. Default: 10 @param numFeatures (int) For each point, the number of features to choose from. Default: 10 @param numColumns (int) The total number of cortical columns in network. Default: 2 @param networkType (string)The type of network to use. Options are: "MultipleL4L2Columns", "MultipleL4L2ColumnsWithTopology" and "MultipleL4L2ColumnsWithRandomTopology". Default: "MultipleL4L2Columns" @param longDistanceConnections (float) The probability that a column will connect to a distant column. Only relevant when using the random topology network type. If > 1, will instead be taken as desired number of long-distance connections per column. @param settlingTime (int) Number of iterations we wait to let columns stabilize. Important for multicolumn experiments with lateral connections. @param includeRandomLocation (bool) If True, a random location SDR will be generated during inference for each feature. @param enableFeedback (bool) If True, enable feedback, default is True @param numAmbiguousLocations (int) number of ambiguous locations. Ambiguous locations will present during inference if this parameter is set to be a positive number The method returns the args dict updated with multiple additional keys representing accuracy metrics. """ numObjects = args.get("numObjects", 10) numLocations = args.get("numLocations", 10) numFeatures = args.get("numFeatures", 10) numColumns = args.get("numColumns", 2) networkType = args.get("networkType", "MultipleL4L2Columns") longDistanceConnections = args.get("longDistanceConnections", 0) locationNoise = args.get("locationNoise", 0.0) featureNoise = args.get("featureNoise", 0.0) numPoints = args.get("numPoints", 10) trialNum = args.get("trialNum", 42) plotInferenceStats = args.get("plotInferenceStats", True) settlingTime = args.get("settlingTime", 3) includeRandomLocation = args.get("includeRandomLocation", False) enableFeedback = args.get("enableFeedback", True) numAmbiguousLocations = args.get("numAmbiguousLocations", 0) numInferenceRpts = args.get("numInferenceRpts", 1) l2Params = args.get("l2Params", None) l4Params = args.get("l4Params", None) # Create the objects objects = createObjectMachine( machineType="simple", numInputBits=20, sensorInputSize=150, externalInputSize=2400, numCorticalColumns=numColumns, numFeatures=numFeatures, numLocations=numLocations, seed=trialNum ) objects.createRandomObjects(numObjects, numPoints=numPoints, numLocations=numLocations, numFeatures=numFeatures) r = objects.objectConfusion() print "Average common pairs in objects=", r[0], print ", locations=",r[1],", features=",r[2] # print "Total number of objects created:",len(objects.getObjects()) # print "Objects are:" # for o in objects: # pairs = objects[o] # pairs.sort() # print str(o) + ": " + str(pairs) # Setup experiment and train the network name = "convergence_O%03d_L%03d_F%03d_C%03d_T%03d" % ( numObjects, numLocations, numFeatures, numColumns, trialNum ) exp = L4L2Experiment( name, numCorticalColumns=numColumns, L2Overrides=l2Params, L4Overrides=l4Params, networkType = networkType, longDistanceConnections=longDistanceConnections, inputSize=150, externalInputSize=2400, numInputBits=20, seed=trialNum, enableFeedback=enableFeedback, ) exp.learnObjects(objects.provideObjectsToLearn()) # For inference, we will check and plot convergence for each object. For each # object, we create a sequence of random sensations for each column. We will # present each sensation for settlingTime time steps to let it settle and # ensure it converges. numCorrectClassifications=0 classificationPerSensation = numpy.zeros(settlingTime*numPoints) for objectId in objects: exp.sendReset() obj = objects[objectId] objectSensations = {} for c in range(numColumns): objectSensations[c] = [] if numColumns > 1: # Create sequence of random sensations for this object for all columns At # any point in time, ensure each column touches a unique loc,feature pair # on the object. It is ok for a given column to sense a loc,feature pair # more than once. The total number of sensations is equal to the number of # points on the object. for sensationNumber in range(len(obj)): # Randomly shuffle points for each sensation objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for c in range(numColumns): # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[c].append(objectCopy[c]) else: # Create sequence of sensations for this object for one column. The total # number of sensations is equal to the number of points on the object. No # point should be visited more than once. objectCopy = [pair for pair in obj] random.shuffle(objectCopy) for pair in objectCopy: # stay multiple steps on each sensation for _ in xrange(settlingTime): objectSensations[0].append(pair) inferConfig = { "object": objectId, "numSteps": len(objectSensations[0]), "pairs": objectSensations, "noiseLevel": featureNoise, "locationNoise": locationNoise, "includeRandomLocation": includeRandomLocation, "numAmbiguousLocations": numAmbiguousLocations, } inferenceSDRs = objects.provideObjectToInfer(inferConfig) exp.infer(inferenceSDRs, objectName=objectId, reset=False) classificationPerSensation += numpy.array( exp.statistics[objectId]["Correct classification"]) if exp.isObjectClassified(objectId, minOverlap=30): numCorrectClassifications += 1 if plotInferenceStats: exp.plotInferenceStats( fields=["L2 Representation", "Overlap L2 with object", "L4 Representation"], experimentID=objectId, onePlot=False, ) convergencePoint, accuracy = exp.averageConvergencePoint("L2 Representation", 30, 40, settlingTime) classificationAccuracy = float(numCorrectClassifications) / numObjects classificationPerSensation = classificationPerSensation / numObjects print "# objects {} # features {} # locations {} # columns {} trial # {} network type {}".format( numObjects, numFeatures, numLocations, numColumns, trialNum, networkType) print "Average convergence point=",convergencePoint print "Classification accuracy=",classificationAccuracy print # Return our convergence point as well as all the parameters and objects args.update({"objects": objects.getObjects()}) args.update({"convergencePoint":convergencePoint}) args.update({"classificationAccuracy":classificationAccuracy}) args.update({"classificationPerSensation":classificationPerSensation.tolist()}) # Can't pickle experiment so can't return it for batch multiprocessing runs. # However this is very useful for debugging when running in a single thread. if plotInferenceStats: args.update({"experiment": exp}) return args
java
public void connectionReleased(IManagedConnectionEvent<C> event) { IPhynixxManagedConnection<C> proxy = event.getManagedConnection(); if (!proxy.hasCoreConnection()) { return; } else { this.releaseConnection(proxy); } if (LOG.isDebugEnabled()) { LOG.debug("Proxy " + proxy + " released"); } }
java
public Tuple3<ReadOnlyStyledDocument<PS, SEG, S>, RichTextChange<PS, SEG, S>, MaterializedListModification<Paragraph<PS, SEG, S>>> replace( int from, int to, ReadOnlyStyledDocument<PS, SEG, S> replacement) { return replace(from, to, x -> replacement); }
python
def _set_cngn_mon_del_pkt(self, v, load=False): """ Setter method for cngn_mon_del_pkt, mapped from YANG variable /tm_state/cngn_mon_del_pkt (container) If this variable is read-only (config: false) in the source YANG file, then _set_cngn_mon_del_pkt is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_cngn_mon_del_pkt() directly. YANG Description: TM delete pkt config """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=cngn_mon_del_pkt.cngn_mon_del_pkt, is_container='container', presence=False, yang_name="cngn-mon-del-pkt", rest_name="cngn-mon-del-pkt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'sysdiag-cngn-mon-del-pkt', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-sysdiag-operational', defining_module='brocade-sysdiag-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """cngn_mon_del_pkt must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=cngn_mon_del_pkt.cngn_mon_del_pkt, is_container='container', presence=False, yang_name="cngn-mon-del-pkt", rest_name="cngn-mon-del-pkt", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'sysdiag-cngn-mon-del-pkt', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-sysdiag-operational', defining_module='brocade-sysdiag-operational', yang_type='container', is_config=False)""", }) self.__cngn_mon_del_pkt = t if hasattr(self, '_set'): self._set()
java
public static void writeHeader(Writer writer, int dpi, String rankDir, String id, List<String> attribLines) throws IOException { // Default settings if (attribLines == null) { attribLines = new ArrayList<>(); } else { attribLines = new ArrayList<>(attribLines); } attribLines.add("node [shape=box];"); // add ... // DPI and Rankdir StringBuilder header = new StringBuilder("digraph " + id + " {\n"); if (dpi > 0) { header.append("dpi=").append(dpi).append(";\n"); } header.append("rankdir=").append(StringUtils.isNotBlank(rankDir) ? rankDir : "LR").append(";\n"); // Additional lines for (String line : attribLines) { line = line.trim(); header.append(line).append(line.endsWith(";") ? "\n" : ";\n"); } DotUtils.writeln(writer, header.toString()); }
java
public void marshall(AlgorithmStatusItem algorithmStatusItem, ProtocolMarshaller protocolMarshaller) { if (algorithmStatusItem == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(algorithmStatusItem.getName(), NAME_BINDING); protocolMarshaller.marshall(algorithmStatusItem.getStatus(), STATUS_BINDING); protocolMarshaller.marshall(algorithmStatusItem.getFailureReason(), FAILUREREASON_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
private void ensureCapacity(final int index, final int length) { if (index < 0) { throw new IndexOutOfBoundsException("index cannot be negative: index=" + index); } final long resultingPosition = index + (long)length; final int currentArrayLength = byteArray.length; if (resultingPosition > currentArrayLength) { if (currentArrayLength >= MAX_ARRAY_LENGTH) { throw new IndexOutOfBoundsException( "index=" + index + " length=" + length + " maxCapacity=" + MAX_ARRAY_LENGTH); } byteArray = Arrays.copyOf(byteArray, calculateExpansion(currentArrayLength, (int)resultingPosition)); } }
java
public Nfs3MkdirResponse sendMkdir(NfsMkdirRequest request) throws IOException { Nfs3MkdirResponse response = new Nfs3MkdirResponse(); _rpcWrapper.callRpcNaked(request, response); return response; }
python
def _get(self, route, stream=False): """ run a get request against an url. Returns the response which can optionally be streamed """ log.debug("Running GET request against %s" % route) return r.get(self._url(route), auth=c.auth, stream=stream)
java
public void merge(String parity, String source, String codecId, int[] checksums) throws IOException { if(FSNamesystem.LOG.isDebugEnabled()) { FSNamesystem.LOG.debug("merge " + parity + " to " + source); } if (isInSafeMode()) { throw new SafeModeException("merge: cannot merge " + parity + " to " + source, safeMode); } // Verify parity and source if (source == null || source.isEmpty() || parity == null || parity.isEmpty()) { throw new IOException( "merge: source file name or parity file name is empty"); } // Verify checksums if (checksums == null || checksums.length == 0) { throw new IOException("merge: checksum array is empty or null"); } // Verify codec RaidCodec codec = RaidCodec.getCodec(codecId); if (codec == null) { throw new IOException("merge: codec " + codecId + " doesn't exist"); } INode[] sourceINodes = dir.getExistingPathINodes(source); INode[] parityINodes = dir.getExistingPathINodes(parity); writeLock(); try { // write permissions for the source if (isPermissionEnabled) { if (isPermissionCheckingEnabled(sourceINodes)) { checkPathAccess(source, sourceINodes, FsAction.WRITE); } if (isPermissionCheckingEnabled(parityINodes)) { checkPathAccess(parity, parityINodes, FsAction.READ); // read the file checkParentAccess(parity, parityINodes, FsAction.WRITE); // for delete } } INode sinode = sourceINodes[sourceINodes.length - 1]; INode pinode = parityINodes[parityINodes.length - 1]; if (sinode == null || pinode == null) { throw new IOException( "merge: source file or parity file doesn't exist"); } if (sinode.isUnderConstruction() || pinode.isUnderConstruction()) { throw new IOException( "merge: source file or parity file is under construction"); } if (sinode.isDirectory() || pinode.isDirectory()) { throw new IOException( "merge: source file or parity file is a directory"); } if (sinode instanceof INodeHardLinkFile || pinode instanceof INodeHardLinkFile) { throw new IOException("merge: source file or parity file is hardlinked"); } INodeFile sourceINode = (INodeFile) sinode; INodeFile parityINode = (INodeFile) pinode; if (sourceINode.getStorageType() != StorageType.REGULAR_STORAGE || parityINode.getStorageType() != StorageType.REGULAR_STORAGE) { throw new IOException( "merge: source file or parity file doesn't support merge"); } if (sourceINode.getModificationTime() != parityINode.getModificationTime()) { throw new IOException( "merge: source file and parity file doesn't have the same modification time"); } if (parityINode.getReplication() != codec.parityReplication) { throw new IOException( "merge: parity file's replication doesn't match codec's parity replication"); } BlockInfo[] sourceBlks = sourceINode.getBlocks(); BlockInfo[] parityBlks = parityINode.getBlocks(); if (sourceBlks == null || sourceBlks.length == 0) { throw new IOException("merge: " + source + " is empty"); } if (parityBlks == null || parityBlks.length == 0) { throw new IOException("merge: " + parity + " is empty"); } if (checksums.length != sourceBlks.length) { throw new IOException("merge: checksum length " + checksums.length + " doesn't match number of source blocks " + sourceBlks.length); } int expectedParityBlocks = codec.getNumStripes(sourceBlks.length) * codec.numParityBlocks; if (expectedParityBlocks != parityBlks.length) { throw new IOException("merge: expect parity blocks " + expectedParityBlocks + " doesn't match number of parity blocks " + parityBlks.length); } if(NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("DIR* NameSystem.merge: " + parity + " to " + source); } dir.mergeInternal(parityINodes, sourceINodes, parity, source, codec, checksums); } finally { writeUnlock(); } getEditLog().logSync(); if (auditLog.isInfoEnabled()) { logAuditEvent(getCurrentUGI(), Server.getRemoteIp(), "merge", parity, source, getLastINode(sourceINodes)); } }
python
def removeSubscriber(self, email): """Remove a subscriber from this workitem If the subscriber has not been added, no more actions will be performed. :param email: the subscriber's email """ headers, raw_data = self._perform_subscribe() missing_flag, raw_data = self._remove_subscriber(email, raw_data) if missing_flag: return self._update_subscribe(headers, raw_data) self.log.info("Successfully remove a subscriber: %s for <Workitem %s>", email, self)
java
public Map<X500Principal, SigningPolicy> parse(Reader reader) throws SigningPolicyException { Map<X500Principal, SigningPolicy> policies = new HashMap<X500Principal, SigningPolicy>(); BufferedReader bufferedReader = new BufferedReader(reader); try { String line; while ((line = bufferedReader.readLine()) != null) { line = line.trim(); // read line until some line that needs to be parsed. if (!isValidLine(line)) { continue; } logger.debug("Line to parse: " + line); String caDN = null; if (line.startsWith(ACCESS_ID_PREFIX)) { logger.debug("Check if it is CA and get the DN " + line); caDN = getCaDN(line, caDN); boolean usefulEntry = true; Boolean posNegRights = null; // check for neg or pos rights with restrictions checkRights(policies, bufferedReader, caDN, usefulEntry, posNegRights); } // JGLOBUS-94 } } catch (IOException exp) { throw new SigningPolicyException("", exp); } finally { cleanupReaders(reader, bufferedReader); } return policies; }
java
public static boolean isTodoItem(final Document todoItemDoc) { return todoItemDoc.containsKey(ID_KEY) && todoItemDoc.containsKey(TASK_KEY) && todoItemDoc.containsKey(CHECKED_KEY); }
python
def _get_md_files(self): """Get all markdown files.""" all_f = _all_files_matching_ext(os.getcwd(), "md") exclusions = [ "*.egg/*", "*.eggs/*", "*build/*" ] + self.exclusions return sorted([f for f in all_f if not _is_excluded(f, exclusions)])
java
@Override public DefaultJsonWriter value( JavaScriptObject value ) { if (value == null) { return nullValue(); } writeDeferredName(); beforeValue(false); out.append(stringify( value )); return this; }
java
protected void initialize() { if (m_folderList != null) { // ensure folders are sorted starting with parent folders Collections.sort(m_folderList, I_CmsResource.COMPARE_ROOT_PATH); } if (m_fileList != null) { // ensure files are sorted starting with files in parent folders Collections.sort(m_fileList, I_CmsResource.COMPARE_ROOT_PATH); } if (m_deletedFolderList != null) { // ensure deleted folders are sorted starting with child folders Collections.sort(m_deletedFolderList, I_CmsResource.COMPARE_ROOT_PATH); Collections.reverse(m_deletedFolderList); } }
java
public static Geometry singleSideBuffer(Geometry geometry, double distance){ if(geometry==null){ return null; } return computeSingleSideBuffer(geometry, distance, new BufferParameters()); }
python
def coords(self) -> Iterator[Tuple[float, float, float]]: """Iterates on longitudes, latitudes and altitudes. """ data = self.data[self.data.longitude.notnull()] yield from zip(data["longitude"], data["latitude"], data["altitude"])
python
def get_venv(self, requirements=None, interpreter='', uuid='', options=None): """Find a venv that serves these requirements, if any.""" lines = self._read_cache() return self._select(lines, requirements, interpreter, uuid=uuid, options=options)
python
def tvd(x0, rho, gamma): """ Proximal operator for the total variation denoising penalty Requires scikit-image be installed Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step Raises ------ ImportError If scikit-image fails to be imported """ try: from skimage.restoration import denoise_tv_bregman except ImportError: print('Error: scikit-image not found. TVD will not work.') return x0 return denoise_tv_bregman(x0, rho / gamma)
python
def _set_required_secrets(self, required_secrets, token_secrets): """ Sets required secrets """ if self.user_params.build_type.value == BUILD_TYPE_ORCHESTRATOR: required_secrets += token_secrets if not required_secrets: return secrets = self.template['spec']['strategy']['customStrategy'].setdefault('secrets', []) existing = set(secret_mount['secretSource']['name'] for secret_mount in secrets) required_secrets = set(required_secrets) already_set = required_secrets.intersection(existing) if already_set: logger.debug("secrets %s are already set", already_set) for secret in required_secrets - existing: secret_path = os.path.join(SECRETS_PATH, secret) logger.info("Configuring %s secret at %s", secret, secret_path) secrets.append({ 'secretSource': { 'name': secret, }, 'mountPath': secret_path, })
java
public static boolean isValueReference(String expression) { if (null != expression) { int start = expression.indexOf("#{"); if ((start >= 0) && (expression.indexOf('}', start + 1) >= 0)) { return true; } } return false; }
python
def easeInOutCubic(n): """A cubic tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). """ _checkRange(n) n = 2 * n if n < 1: return 0.5 * n**3 else: n = n - 2 return 0.5 * (n**3 + 2)
java
public static boolean copyFile(final File source, final File destination) throws IOException, FileIsADirectoryException { return copyFile(source, destination, true); }
python
def iterate_elements(parent): """ Helper function that iterates over child Nodes/Elements of parent Node/Element. :param parent: object of Element class, representing parent element. """ element = parent.firstChild while element is not None: yield element element = element.nextSibling
python
def _init_helper(self, vars_): """Overwrite defaults (if they exist) with arguments passed to constructor""" for k in vars_: if k == 'kwargs': for kwarg in vars_[k]: setattr(self, kwarg, vars_[k][kwarg]) elif k != 'self': setattr(self, k, vars_[k])
java
@Programmatic // for use by fixtures public SummernoteEditorToDoItem newToDo( final String description, final SummernoteEditorToDoItem.Category category, final SummernoteEditorToDoItem.Subcategory subcategory, final String userName, final LocalDate dueBy, final BigDecimal cost, final BigDecimal previousCost) { final SummernoteEditorToDoItem toDoItem = container.newTransientInstance(SummernoteEditorToDoItem.class); toDoItem.setDescription(description); toDoItem.setCategory(category); toDoItem.setSubcategory(subcategory); toDoItem.setOwnedBy(userName); toDoItem.setDueBy(dueBy); toDoItem.setCost(cost); toDoItem.setPreviousCost(previousCost); container.persist(toDoItem); container.flush(); return toDoItem; }
python
def list_more(fn, offset, size, batch_size, *args): """list all data using the fn """ if size < 0: expected_total_size = six.MAXSIZE else: expected_total_size = size batch_size = min(size, batch_size) response = None total_count_got = 0 while True: ret = fn(*args, offset=offset, size=batch_size) if response is None: response = ret else: response.merge(ret) count = ret.get_count() total = ret.get_total() offset += count total_count_got += count batch_size = min(batch_size, expected_total_size - total_count_got) if count == 0 or offset >= total or total_count_got >= expected_total_size: break return response
java
public double getAccumulatedExecutionTimeCurrentThread(Device device) { KernelProfile profile = KernelManager.instance().getProfile(getClass()); synchronized (profile) { KernelDeviceProfile deviceProfile = profile.getDeviceProfile(device); if (deviceProfile == null) { return Double.NaN; } return deviceProfile.getCumulativeElapsedTimeAllCurrentThread() / KernelProfile.MILLION; } }
python
def _compute(self, seed, gsim, num_events, imt): """ :param seed: a random seed or None if the seed is already set :param gsim: a GSIM instance :param num_events: the number of seismic events :param imt: an IMT instance :returns: (gmf(num_sites, num_events), stddev_inter(num_events), epsilons(num_events)) """ rctx = getattr(self.rupture, 'rupture', self.rupture) if seed is not None: numpy.random.seed(seed) dctx = self.dctx.roundup(gsim.minimum_distance) if self.truncation_level == 0: assert self.correlation_model is None mean, _stddevs = gsim.get_mean_and_stddevs( self.sctx, rctx, dctx, imt, stddev_types=[]) mean = gsim.to_imt_unit_values(mean) mean.shape += (1, ) mean = mean.repeat(num_events, axis=1) return (mean, numpy.zeros(num_events, F32), numpy.zeros(num_events, F32)) elif self.truncation_level is None: distribution = scipy.stats.norm() else: assert self.truncation_level > 0 distribution = scipy.stats.truncnorm( - self.truncation_level, self.truncation_level) num_sids = len(self.sids) if gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {StdDev.TOTAL}: # If the GSIM provides only total standard deviation, we need # to compute mean and total standard deviation at the sites # of interest. # In this case, we also assume no correlation model is used. if self.correlation_model: raise CorrelationButNoInterIntraStdDevs( self.correlation_model, gsim) mean, [stddev_total] = gsim.get_mean_and_stddevs( self.sctx, rctx, dctx, imt, [StdDev.TOTAL]) stddev_total = stddev_total.reshape(stddev_total.shape + (1, )) mean = mean.reshape(mean.shape + (1, )) total_residual = stddev_total * rvs( distribution, num_sids, num_events) gmf = gsim.to_imt_unit_values(mean + total_residual) stddev_inter = numpy.empty(num_events, F32) stddev_inter.fill(numpy.nan) epsilons = numpy.empty(num_events, F32) epsilons.fill(numpy.nan) else: mean, [stddev_inter, stddev_intra] = gsim.get_mean_and_stddevs( self.sctx, rctx, dctx, imt, [StdDev.INTER_EVENT, StdDev.INTRA_EVENT]) stddev_intra = stddev_intra.reshape(stddev_intra.shape + (1, )) stddev_inter = stddev_inter.reshape(stddev_inter.shape + (1, )) mean = mean.reshape(mean.shape + (1, )) intra_residual = stddev_intra * rvs( distribution, num_sids, num_events) if self.correlation_model is not None: ir = self.correlation_model.apply_correlation( self.sites, imt, intra_residual, stddev_intra) # this fixes a mysterious bug: ir[row] is actually # a matrix of shape (E, 1) and not a vector of size E intra_residual = numpy.zeros(ir.shape) for i, val in numpy.ndenumerate(ir): intra_residual[i] = val epsilons = rvs(distribution, num_events) inter_residual = stddev_inter * epsilons gmf = gsim.to_imt_unit_values( mean + intra_residual + inter_residual) return gmf, stddev_inter.max(axis=0), epsilons
python
def close(self): """ Cleans up resources and closes connection :return: """ if self._closed: return self._closed = True self.queue("close", None) if not self._flushed.wait(timeout=self._max_flush_time): raise ValueError("close timed out")
python
def _assemble_and_send_request(self): """ Fires off the Fedex request. @warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(), WHICH RESIDES ON FedexBaseService AND IS INHERITED. """ # We get an exception like this when specifying an IntegratorId: # suds.TypeNotFound: Type not found: 'IntegratorId' # Setting it to None does not seem to appease it. del self.ClientDetail.IntegratorId self.logger.debug(self.WebAuthenticationDetail) self.logger.debug(self.ClientDetail) self.logger.debug(self.TransactionDetail) self.logger.debug(self.VersionId) # Fire off the query. return self.client.service.serviceAvailability( WebAuthenticationDetail=self.WebAuthenticationDetail, ClientDetail=self.ClientDetail, TransactionDetail=self.TransactionDetail, Version=self.VersionId, Origin=self.Origin, Destination=self.Destination, ShipDate=self.ShipDate, CarrierCode=self.CarrierCode, Service=self.Service, Packaging=self.Packaging)
java
@Override public List<CommerceNotificationAttachment> findAll(int start, int end) { return findAll(start, end, null); }
python
def Get(self, project_id): """Returns an existing emulator instance for the provided project_id. If an emulator instance doesn't yet exist, it creates one. Args: project_id: project ID Returns: a DatastoreEmulator """ if project_id in self._emulators: return self._emulators[project_id] emulator = self.Create(project_id) self._emulators[project_id] = emulator return emulator
python
def print(self): """Print self.""" print( '{dim}Identifier:{none} {cyan}{identifier}{none}\n' '{dim}Name:{none} {name}\n' '{dim}Description:{none}\n{description}'.format( dim=Style.DIM, cyan=Fore.CYAN, none=Style.RESET_ALL, identifier=self.identifier, name=self.name, description=pretty_description(self.description, indent=2) ) ) if hasattr(self, 'argument_list') and self.argument_list: print('{dim}Arguments:{none}'.format( dim=Style.DIM, none=Style.RESET_ALL)) for argument in self.argument_list: argument.print(indent=2)
python
def getSQLQuery(self, count = False) : "Returns the query without performing it. If count, the query returned will be a SELECT COUNT() instead of a SELECT" sqlFilters = [] sqlValues = [] # print self.filters for f in self.filters : filt = [] for k, vv in f.iteritems() : if type(vv) is types.ListType or type(vv) is types.TupleType : sqlValues.extend(vv) kk = 'OR %s ? '%k * len(vv) kk = "(%s)" % kk[3:] else : kk = k sqlValues.append(vv) filt.append(kk) sqlFilters.append('(%s ?)' % ' ? AND '.join(filt)) if len(sqlValues) > stp.SQLITE_LIMIT_VARIABLE_NUMBER : raise ValueError("""The limit number of parameters imposed by sqlite is %s. You will have to break your query into several smaller one. Sorry about that. (actual number of parameters is: %s)""" % (stp.SQLITE_LIMIT_VARIABLE_NUMBER, len(sqlValues))) sqlFilters =' OR '.join(sqlFilters) if len(self.tables) < 2 : tablesStr = self.rabaClass.__name__ else : tablesStr = ', '.join(self.tables) if len(sqlFilters) == 0 : sqlFilters = '1' if count : sql = 'SELECT COUNT(*) FROM %s WHERE %s' % (tablesStr, sqlFilters) else : sql = 'SELECT %s.raba_id FROM %s WHERE %s' % (self.rabaClass.__name__, tablesStr, sqlFilters) return (sql, sqlValues)
java
public void activate() { if (appender instanceof OptionHandler) { ((OptionHandler) appender).activateOptions(); if (LoggingLogger.ROOT_LOGGER.isDebugEnabled()) { LoggingLogger.ROOT_LOGGER.debugf("Invoking OptionHandler.activateOptions() on appender %s (%s)", appender.getName(), appender.getClass().getCanonicalName()); } } }
python
def clean_jobs(self, link, job_dict=None, clean_all=False): """ Clean up all the jobs associated with this link. Returns a `JobStatus` enum """ failed = False if job_dict is None: job_dict = link.jobs for job_details in job_dict.values(): # clean failed jobs if job_details.status == JobStatus.failed or clean_all: # clean_job(job_details.logfile, job_details.outfiles, self._dry_run) clean_job(job_details.logfile, {}, self._dry_run) job_details.status = JobStatus.ready if failed: return JobStatus.failed return JobStatus.done
java
public static Predicate<InetAddress> ofCidr(InetAddress baseAddress, String subnetMask) { requireNonNull(baseAddress, "baseAddress"); requireNonNull(subnetMask, "subnetMask"); checkArgument(NetUtil.isValidIpV4Address(subnetMask), "subnetMask: %s (expected: an IPv4 address string)", subnetMask); final int maskBits = toMaskBits(subnetMask); return ofCidr(baseAddress, maskBits, maskBits + 96); }
java
@SuppressWarnings("unchecked") void fetchFromRepository(Collection<ProductDefinition> installDefinition) throws RepositoryException { Collection<ResourceType> interestingTypes = new HashSet<ResourceType>(); interestingTypes.add(ResourceType.FEATURE); interestingTypes.add(ResourceType.OPENSOURCE); interestingTypes.add(ResourceType.PRODUCTSAMPLE); Map<ResourceType, Collection<? extends RepositoryResource>> resources = repoConnections.getResources(installDefinition, interestingTypes, null); Collection<EsaResource> features = (Collection<EsaResource>) resources.get(ResourceType.FEATURE); if (features != null) { repoFeatures = features; } else { repoFeatures = Collections.emptySet(); } repoSamples = new ArrayList<>(); Collection<SampleResource> samples = (Collection<SampleResource>) resources.get(ResourceType.PRODUCTSAMPLE); if (samples != null) { repoSamples.addAll(samples); } Collection<SampleResource> osiSamples = (Collection<SampleResource>) resources.get(ResourceType.OPENSOURCE); if (osiSamples != null) { repoSamples.addAll(osiSamples); } }
java
public void combine(IntSummaryStatistics other) { count += other.count; sum += other.sum; min = Math.min(min, other.min); max = Math.max(max, other.max); }
python
def get_shelveset_work_items(self, shelveset_id): """GetShelvesetWorkItems. Get work items associated with a shelveset. :param str shelveset_id: Shelveset's unique ID :rtype: [AssociatedWorkItem] """ query_parameters = {} if shelveset_id is not None: query_parameters['shelvesetId'] = self._serialize.query('shelveset_id', shelveset_id, 'str') response = self._send(http_method='GET', location_id='a7a0c1c1-373e-425a-b031-a519474d743d', version='5.0', query_parameters=query_parameters) return self._deserialize('[AssociatedWorkItem]', self._unwrap_collection(response))
java
public static Token<EsTokenIdentifier> obtainToken(RestClient client, User user) { EsToken esToken = obtainEsToken(client, user); return EsTokenIdentifier.createTokenFrom(esToken); }
python
def config(self, charm_id, channel=None): '''Get the config data for a charm. @param charm_id The charm's id. @param channel Optional channel name. ''' url = '{}/{}/meta/charm-config'.format(self.url, _get_path(charm_id)) data = self._get(_add_channel(url, channel)) return data.json()
python
def do_ctrlc(self, arg): ''' Ctrl-C sends a STOP command to the arm. ''' print('STOP') if self.arm.is_connected(): self.arm.write('STOP')
java
public int synthesis_blockin(Block vb){ // Shift out any PCM/multipliers that we returned previously // centerW is currently the center of the last block added if(centerW>vi.blocksizes[1]/2&&pcm_returned>8192){ // don't shift too much; we need to have a minimum PCM buffer of // 1/2 long block int shiftPCM=centerW-vi.blocksizes[1]/2; shiftPCM=(pcm_returned<shiftPCM ? pcm_returned : shiftPCM); pcm_current-=shiftPCM; centerW-=shiftPCM; pcm_returned-=shiftPCM; if(shiftPCM!=0){ for(int i=0; i<vi.channels; i++){ System.arraycopy(pcm[i], shiftPCM, pcm[i], 0, pcm_current); } } } lW=W; W=vb.W; //nW=-1; // glue_bits+=vb.glue_bits; // time_bits+=vb.time_bits; // floor_bits+=vb.floor_bits; // res_bits+=vb.res_bits; if(sequence+1!=vb.sequence) granulepos=-1; // out of sequence; lose count sequence=vb.sequence; { int sizeW=vi.blocksizes[W]; int _centerW=centerW+vi.blocksizes[lW]/4+sizeW/4; int beginW=_centerW-sizeW/2; int endW=beginW+sizeW; int beginSl=0; int endSl=0; // Do we have enough PCM/mult storage for the block? if(endW>pcm_storage){ // expand the storage pcm_storage=endW+vi.blocksizes[1]; for(int i=0; i<vi.channels; i++){ float[] foo=new float[pcm_storage]; System.arraycopy(pcm[i], 0, foo, 0, pcm[i].length); pcm[i]=foo; } } // overlap/add PCM switch(W){ case 0: beginSl=0; endSl=vi.blocksizes[0]/2; break; case 1: beginSl=vi.blocksizes[1]/4-vi.blocksizes[lW]/4; endSl=beginSl+vi.blocksizes[lW]/2; break; } for(int j=0; j<vi.channels; j++){ int _pcm=beginW; // the overlap/add section int i=0; for(i=beginSl; i<endSl; i++){ pcm[j][_pcm+i]+=vb.pcm[j][i]; } // the remaining section for(; i<sizeW; i++){ pcm[j][_pcm+i]=vb.pcm[j][i]; } } // track the frame number... This is for convenience, but also // making sure our last packet doesn't end with added padding. If // the last packet is partial, the number of samples we'll have to // return will be past the vb->granulepos. // // This is not foolproof! It will be confused if we begin // decoding at the last page after a seek or hole. In that case, // we don't have a starting point to judge where the last frame // is. For this reason, vorbisfile will always try to make sure // it reads the last two marked pages in proper sequence if(granulepos==-1){ granulepos=vb.granulepos; } else{ granulepos+=(_centerW-centerW); if(vb.granulepos!=-1&&granulepos!=vb.granulepos){ if(granulepos>vb.granulepos&&vb.eofflag!=0){ // partial last frame. Strip the padding off _centerW-=(granulepos-vb.granulepos); }// else{ Shouldn't happen *unless* the bitstream is out of // spec. Either way, believe the bitstream } granulepos=vb.granulepos; } } // Update, cleanup centerW=_centerW; pcm_current=endW; // if(vb.eofflag!=0) // eofflag=1; } return (0); }
python
def local_position_ned_encode(self, time_boot_ms, x, y, z, vx, vy, vz): ''' The filtered local position (e.g. fused computer vision and accelerometers). Coordinate frame is right-handed, Z-axis down (aeronautical frame, NED / north-east-down convention) time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) x : X Position (float) y : Y Position (float) z : Z Position (float) vx : X Speed (float) vy : Y Speed (float) vz : Z Speed (float) ''' return MAVLink_local_position_ned_message(time_boot_ms, x, y, z, vx, vy, vz)
java
public double process(Point2D_F64 a , Point2D_F64 b , Se3_F64 a_to_b , Point3D_F64 Xa ) { PerspectiveOps.convertNormToPixel(intrinsicA,a.x,a.y,pixelN); PerspectiveOps.convertNormToPixel(intrinsicA,Xa.x/Xa.z, Xa.y/Xa.z,pixelX); double error = pixelN.distance2(pixelX); a_to_b.transform(Xa,Xb); PerspectiveOps.convertNormToPixel(intrinsicB,b.x,b.y,pixelN); PerspectiveOps.convertNormToPixel(intrinsicB,Xb.x/Xb.z, Xb.y/Xb.z,pixelX); return (error + pixelN.distance2(pixelX))/2; }
java
@PostConstruct public void onInit() { isExecutorAvailable = isExecutorOnClasspath(); if (identityProvider.isUnsatisfied()) { setIdentityProvider(new EJBContextIdentityProvider(context)); } else { setIdentityProvider(identityProvider.get()); } setManagerFactory(new RuntimeManagerFactoryImpl()); super.onInit(); }
python
def _adjust_inferential_results_for_parameter_constraints(self, constraints): """ Ensure that parameters that were constrained during estimation do not have any values showed for inferential results. After all, no inference was performed. Parameters ---------- constraints : list of ints, or None. If list, should contain the positions in the array of all estimated parameters that were constrained to their initial values. Returns ------- None. """ if constraints is not None: # Ensure the model object has inferential results inferential_attributes = ["standard_errors", "tvalues", "pvalues", "robust_std_errs", "robust_t_stats", "robust_p_vals"] assert all([hasattr(self, x) for x in inferential_attributes]) assert hasattr(self, "params") all_names = self.params.index.tolist() for series in [getattr(self, x) for x in inferential_attributes]: for pos in constraints: series.loc[all_names[pos]] = np.nan return None
python
def guess_segments_lines(segments, lines, nearline_tolerance=5.0): """ given segments, outputs a array of line numbers, or -1 if it doesn't belong to any """ ys = segments[:, 1] closeness = numpy.abs(numpy.subtract.outer(ys, lines)) # each row a y, each collumn a distance to each line line_of_y = numpy.argmin(closeness, axis=1) distance = numpy.min(closeness, axis=1) bad = distance > numpy.mean(distance) + nearline_tolerance * numpy.std(distance) line_of_y[bad] = -1 return line_of_y
java
public DatabaseMetaData filterDataBaseMetaData(JdbcTemplate jdbcTemplate, Connection con, DatabaseMetaData databaseMetaData) throws Exception { return databaseMetaData; }
java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { // read values in.defaultReadObject(); OutputStream output = getOutputStream(); if (cachedContent != null) { output.write(cachedContent); } output.close(); cachedContent = null; }
python
def ListOf(cls, **kwargs): """A property that is a list of `cls`.""" def _list_load(value): return [cls.load(d) for d in value] return Property(types=list, load=_list_load, default=list, **kwargs)
java
public void columns(int numOfColumns) { int columns = checkColumns(numOfColumns, 0, 0); String reason = NO_ELEMENT_FOUND; if (columns < 0 && getElement().is().present()) { reason = "Element not table"; } assertTrue(reason, columns >= 0); assertEquals("Number of columns mismatch", numOfColumns, columns); }
python
def exists(name, attributes): ''' Make sure the given attributes exist on the file/directory name The path to the file/directory attributes The attributes that should exist on the file/directory, this is accepted as an array, with key and value split with an equals sign, if you want to specify a hex value then add 0x to the beginning of the value. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} if not os.path.exists(name): ret['result'] = False ret['comment'] = "File or directory doesn't exist" return ret current_attrs = __salt__['xattr.list'](name) current_ids = current_attrs.keys() for attr in attributes: attr_id, attr_val = attr.split("=") attr_hex = attr_val.startswith("0x") if attr_hex: # Remove spaces and new lines so we can match these current_attrs[attr_id] = __salt__['xattr.read'](name, attr_id, hex=True).replace(" ", "").replace("\n", "") attr_val = attr_val[2:].replace(" ", "") if attr_id not in current_attrs: value_matches = False else: value_matches = ((current_attrs[attr_id] == attr_val) or (attr_hex and current_attrs[attr_id] == attr_val)) if attr_id in current_ids and value_matches: continue else: ret['changes'][attr_id] = attr_val __salt__['xattr.write'](name, attr_id, attr_val, attr_hex) if not ret['changes']: ret['comment'] = 'All values existed correctly.' return ret
python
def get_service_instance_from_managed_object(mo_ref, name='<unnamed>'): ''' Retrieves the service instance from a managed object. me_ref Reference to a managed object (of type vim.ManagedEntity). name Name of managed object. This field is optional. ''' if not name: name = mo_ref.name log.trace('[%s] Retrieving service instance from managed object', name) si = vim.ServiceInstance('ServiceInstance') si._stub = mo_ref._stub return si
java
private String escapeToken(String token) { String newToken = (String) escMap.get(token); if (newToken == null) return token; return newToken; }
python
def post_soup(self, *args, **kwargs): """ Shortcut for ``post`` which returns a ``BeautifulSoup`` element """ return BeautifulSoup(self.post(*args, **kwargs).text)
java
public EClass getGSFLW() { if (gsflwEClass == null) { gsflwEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(475); } return gsflwEClass; }
java
public void removeDuplicateRules() { int prod1, prod2; int[] tmp1, tmp2; for (prod1 = getProductionCount() - 1; prod1 >= 0; prod1--) { tmp1 = getProduction(prod1); for (prod2 = prod1 - 1; prod2 >= 0; prod2--) { tmp2 = getProduction(prod2); if (tmp1[0] == tmp2[0]) { if (equal(tmp1, tmp2)) { removeProduction(prod1); break; } } } } }
python
def simple_vertex_array(self, program, buffer, *attributes, index_buffer=None, index_element_size=4) -> 'VertexArray': ''' Create a :py:class:`VertexArray` object. Args: program (Program): The program used when rendering. buffer (Buffer): The buffer. attributes (list): A list of attribute names. Keyword Args: index_element_size (int): byte size of each index element, 1, 2 or 4. index_buffer (Buffer): An index buffer. Returns: :py:class:`VertexArray` object ''' if type(buffer) is list: raise SyntaxError('Change simple_vertex_array to vertex_array') content = [(buffer, detect_format(program, attributes)) + attributes] return self.vertex_array(program, content, index_buffer, index_element_size)
java
public static void frustumM(float[] m, int offset, float left, float right, float bottom, float top, float near, float far) { if (left == right) { throw new IllegalArgumentException("left == right"); } if (top == bottom) { throw new IllegalArgumentException("top == bottom"); } if (near == far) { throw new IllegalArgumentException("near == far"); } if (near <= 0.0f) { throw new IllegalArgumentException("near <= 0.0f"); } if (far <= 0.0f) { throw new IllegalArgumentException("far <= 0.0f"); } final float r_width = 1.0f / (right - left); final float r_height = 1.0f / (top - bottom); final float r_depth = 1.0f / (near - far); final float x = 2.0f * (near * r_width); final float y = 2.0f * (near * r_height); final float A = (right + left) * r_width; final float B = (top + bottom) * r_height; final float C = (far + near) * r_depth; final float D = 2.0f * (far * near * r_depth); m[offset + 0] = x; m[offset + 5] = y; m[offset + 8] = A; m[offset + 9] = B; m[offset + 10] = C; m[offset + 14] = D; m[offset + 11] = -1.0f; m[offset + 1] = 0.0f; m[offset + 2] = 0.0f; m[offset + 3] = 0.0f; m[offset + 4] = 0.0f; m[offset + 6] = 0.0f; m[offset + 7] = 0.0f; m[offset + 12] = 0.0f; m[offset + 13] = 0.0f; m[offset + 15] = 0.0f; }
python
def recordbatch(self, auth, resource, entries, defer=False): """ Records a list of historical entries to the resource specified. Calls a function that bulids a request that writes a list of historical entries to the specified resource. Args: auth: Takes the device cik resource: Takes the dataport alias or rid. entries: A list of entries to write to the resource. """ return self._call('recordbatch', auth, [resource, entries], defer)
java
public static Collection<Info> getPendingCollaborations(BoxAPIConnection api) { URL url = PENDING_COLLABORATIONS_URL.build(api.getBaseURL()); BoxAPIRequest request = new BoxAPIRequest(api, url, "GET"); BoxJSONResponse response = (BoxJSONResponse) request.send(); JsonObject responseJSON = JsonObject.readFrom(response.getJSON()); int entriesCount = responseJSON.get("total_count").asInt(); Collection<BoxCollaboration.Info> collaborations = new ArrayList<BoxCollaboration.Info>(entriesCount); JsonArray entries = responseJSON.get("entries").asArray(); for (JsonValue entry : entries) { JsonObject entryObject = entry.asObject(); BoxCollaboration collaboration = new BoxCollaboration(api, entryObject.get("id").asString()); BoxCollaboration.Info info = collaboration.new Info(entryObject); collaborations.add(info); } return collaborations; }
python
def proxy_callback_allowed(service, pgturl): """Check if a given proxy callback is allowed for the given service identifier.""" if hasattr(settings, 'MAMA_CAS_SERVICES'): return _is_allowed('proxy_callback_allowed', service, pgturl) return _is_valid_service_url(service)
python
def connect(self, index): """Connect signals needed for dependency updates. Pre- and post-delete signals have to be handled separately, as: * in the pre-delete signal we have the information which objects to rebuild, but affected relations are still presented, so rebuild would reflect in the wrong (outdated) indices * in the post-delete signal indices can be rebuild corectly, but there is no information which objects to rebuild, as affected relations were already deleted To bypass this, list of objects should be stored in the pre-delete signal indexing should be triggered in the post-delete signal. """ self.index = index signal = ElasticSignal(self, 'process', pass_kwargs=True) signal.connect(post_save, sender=self.model) signal.connect(pre_delete, sender=self.model) pre_delete_signal = ElasticSignal(self, 'process_predelete', pass_kwargs=True) pre_delete_signal.connect(pre_delete, sender=self.model) post_delete_signal = ElasticSignal(self, 'process_delete', pass_kwargs=True) post_delete_signal.connect(post_delete, sender=self.model) return [signal, pre_delete_signal, post_delete_signal]
java
public final <T> ConfigurationModule setMultiple(final Param<T> opt, final Iterable<String> values) { ConfigurationModule c = deepCopy(); for (final String val : values) { c = c.set(opt, val); } return c; }
python
def retry(default=None): """Retry functions after failures""" def decorator(func): """Retry decorator""" @functools.wraps(func) def _wrapper(*args, **kw): for pos in range(1, MAX_RETRIES): try: return func(*args, **kw) except (RuntimeError, requests.ConnectionError) as error: LOGGER.warning("Failed: %s, %s", type(error), error) # Wait a bit before retrying for _ in range(pos): _rest() LOGGER.warning("Request Aborted") return default return _wrapper return decorator
python
def decrypt_seal(self, data: bytes) -> bytes: """ Decrypt bytes data with a curve25519 version of the ed25519 key pair :param data: Encrypted data :return: """ curve25519_public_key = libnacl.crypto_sign_ed25519_pk_to_curve25519(self.vk) curve25519_secret_key = libnacl.crypto_sign_ed25519_sk_to_curve25519(self.sk) return libnacl.crypto_box_seal_open(data, curve25519_public_key, curve25519_secret_key)
python
def intersects_segment(self, other): """ Returns True if the intersection of self and the segment other is not the null set, otherwise returns False. The algorithm is O(log n). Requires the list to be coalesced. """ i = _bisect_left(self, other) return ((i != 0) and (other[0] < self[i-1][1])) or ((i != len(self)) and (other[1] > self[i][0]))
java
public static void extract(InputStream is, File outputFolder) throws IOException { ZipInputStream zis = new ZipInputStream(is); ZipEntry entry; byte[] buffer = new byte[1024]; while ((entry = zis.getNextEntry()) != null) { File outputFile = new File(outputFolder.getCanonicalPath() + File.separatorChar + entry.getName()); File outputParent = new File(outputFile.getParent()); outputParent.mkdirs(); if (entry.isDirectory()) { if (!outputFile.exists()) { outputFile.mkdir(); } } else { try (FileOutputStream fos = new FileOutputStream(outputFile)) { int len; while ((len = zis.read(buffer)) > 0) { fos.write(buffer, 0, len); } } } } }
java
@Override @SuppressWarnings("ReferenceEquality") protected boolean evaluateDependencies(final Dependency dependency, final Dependency nextDependency, final Set<Dependency> dependenciesToRemove) { Dependency main; //CSOFF: InnerAssignment if ((main = getMainGemspecDependency(dependency, nextDependency)) != null) { if (main == dependency) { mergeDependencies(dependency, nextDependency, dependenciesToRemove); } else { mergeDependencies(nextDependency, dependency, dependenciesToRemove); return true; //since we merged into the next dependency - skip forward to the next in mainIterator } } else if ((main = getMainSwiftDependency(dependency, nextDependency)) != null) { if (main == dependency) { mergeDependencies(dependency, nextDependency, dependenciesToRemove); } else { mergeDependencies(nextDependency, dependency, dependenciesToRemove); return true; //since we merged into the next dependency - skip forward to the next in mainIterator } } else if ((main = getMainAndroidDependency(dependency, nextDependency)) != null) { if (main == dependency) { mergeDependencies(dependency, nextDependency, dependenciesToRemove); } else { mergeDependencies(nextDependency, dependency, dependenciesToRemove); return true; //since we merged into the next dependency - skip forward to the next in mainIterator } } else if ((main = getMainDotnetDependency(dependency, nextDependency)) != null) { if (main == dependency) { mergeDependencies(dependency, nextDependency, dependenciesToRemove); } else { mergeDependencies(nextDependency, dependency, dependenciesToRemove); return true; //since we merged into the next dependency - skip forward to the next in mainIterator } } //CSON: InnerAssignment return false; }
java
public Version getBaseVersion() throws UnsupportedRepositoryOperationException, RepositoryException { checkValid(); if (!this.isNodeType(Constants.MIX_VERSIONABLE)) { throw new UnsupportedRepositoryOperationException("Node is not versionable " + getPath()); } PropertyData bvProp = (PropertyData)dataManager.getItemData(nodeData(), new QPathEntry(Constants.JCR_BASEVERSION, 1), ItemType.PROPERTY); return (Version)dataManager.getItemByIdentifier(ValueDataUtil.getString(bvProp.getValues().get(0)), true, false); }
java
void processAMDModules(Iterable<CompilerInput> inputs) { for (CompilerInput input : inputs) { input.setCompiler(this); Node root = checkNotNull(input.getAstRoot(this)); new TransformAMDToCJSModule(this).process(null, root); } }
java
private StructuredProxyPushSupplier getStructuredProxyPushSupplier(String channelName) throws DevFailed { StructuredProxyPushSupplier structuredProxyPushSupplier = StructuredProxyPushSupplierHelper.narrow(proxySupplier); if (structuredProxyPushSupplier == null) { Except.throw_event_system_failed("API_NotificationServiceFailed", "Failed to narrow the push supplier due to AdminLimitExceeded (hint : make sure the notifd daemon is running on this host", "EventConsumer.connect_event_channel"); return null; // Just to remove warning } // Connect to the proxy consumer try { structuredProxyPushSupplier.connect_structured_push_consumer(_this(orb)); } catch (NullPointerException e) { e.printStackTrace(); Except.throw_event_system_failed("API_NotificationServiceFailed", e + " detected when subscribing to " + channelName, "EventConsumer.connect_event_channel"); } catch (org.omg.CosEventChannelAdmin.AlreadyConnected ex) { Except.throw_event_system_failed("API_NotificationServiceFailed", "Failed to connect the push supplier due to CosEventChannelAdmin.AlreadyConnected.AlreadyConnected exception", "EventConsumer.connect_event_channel"); } catch (org.omg.CosEventChannelAdmin.TypeError ex) { Except.throw_event_system_failed("API_NotificationServiceFailed", "Failed to connect the push supplier due to CosEventChannelAdmin.AlreadyConnected.TypeError exception", "EventConsumer.connect_event_channel"); } return structuredProxyPushSupplier; }
java
@Override public boolean eIsSet(int featureID) { switch (featureID) { case AfplibPackage.BNG__PGRP_NAME: return PGRP_NAME_EDEFAULT == null ? pGrpName != null : !PGRP_NAME_EDEFAULT.equals(pGrpName); case AfplibPackage.BNG__TRIPLETS: return triplets != null && !triplets.isEmpty(); } return super.eIsSet(featureID); }
python
def get_iter(self, path): """ :param path: the :obj:`Gtk.TreePath`-struct :type path: :obj:`Gtk.TreePath` :raises: :class:`ValueError` if `path` doesn't exist :returns: a :obj:`Gtk.TreeIter` :rtype: :obj:`Gtk.TreeIter` Returns an iterator pointing to `path`. If `path` does not exist :class:`ValueError` is raised. """ path = self._coerce_path(path) success, aiter = super(TreeModel, self).get_iter(path) if not success: raise ValueError("invalid tree path '%s'" % path) return aiter
java
public LexiconEntryInfo replaceCategory(CcgCategory newCategory) { return new LexiconEntryInfo(newCategory, lexiconTrigger, lexiconIndex, spanStart, spanEnd, triggerSpanStart, triggerSpanEnd); }
python
def erase_display(self, method=EraseMethod.ALL_MOVE): """ Clear the screen or part of the screen. Arguments: method: One of these possible values: EraseMethod.END or 0: Clear from cursor to the end of the screen. EraseMethod.START or 1: Clear from cursor to the start of the screen. EraseMethod.ALL_MOVE or 2: Clear all, and move home. EraseMethod.ALL_ERASE or 3: Clear all, and erase scrollback buffer. EraseMethod.ALL_MOVE_ERASE or 4: Like doing 2 and 3 in succession. This is a feature of Colr. It is not standard. Default: EraseMethod.ALL_MOVE (2) """ return self.chained(erase.display(method))
python
def read_byte(self, addr): """Read a single byte from static memory area (blocks 0-14). """ if addr < 0 or addr > 127: raise ValueError("invalid byte address") log.debug("read byte at address {0} ({0:02X}h)".format(addr)) cmd = "\x01" + chr(addr) + "\x00" + self.uid return self.transceive(cmd)[-1]
python
def intent(self, intent): ''' Decorator to register intent handler''' def _handler(func): self._handlers['IntentRequest'][intent] = func return func return _handler
java
@Override public void configureMachine( TargetHandlerParameters parameters, String machineId ) throws TargetException { this.logger.fine( "Configuring machine '" + machineId + "': nothing to configure." ); }
java
public static LogEntry of(String logName, MonitoredResource resource, Payload<?> payload) { return newBuilder(payload).setLogName(logName).setResource(resource).build(); }
python
def hpimpute(self, data: ['SASdata', str] = None, code: str = None, freq: str = None, id: str = None, impute: str = None, input: [str, list, dict] = None, performance: str = None, procopts: str = None, stmtpassthrough: str = None, **kwargs: dict) -> 'SASresults': """ Python method to call the HPIMPUTE procedure Documentation link: https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=prochp&docsetTarget=prochp_hpimpute_toc.htm&locale=en :param data: SASdata object or string. This parameter is required. :parm code: The code variable can only be a string type. :parm freq: The freq variable can only be a string type. :parm id: The id variable can only be a string type. :parm impute: The impute variable can only be a string type. :parm input: The input variable can be a string, list or dict type. It refers to the dependent, y, or label variable. :parm performance: The performance variable can only be a string type. :parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type. :parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type. :return: SAS Result Object """
java
public java.util.List<java.util.List<TagFilter>> getOnPremisesTagSetList() { if (onPremisesTagSetList == null) { onPremisesTagSetList = new com.amazonaws.internal.SdkInternalList<java.util.List<TagFilter>>(); } return onPremisesTagSetList; }