language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def main(): """Run the program.""" args = parse_arguments() ext = os.path.splitext(args.input_file)[-1].lower() with gzip.open(args.output_file, mode='wt') as outfile: csvwriter = csv.writer(outfile, delimiter=str('\t'), lineterminator='\n') try: if ext in ('.tab', '.txt', '.tsv'): with open(args.input_file) as infile: for line in infile: outline = line.strip().split('\t') csvwriter.writerow(outline) elif ext == '.csv': with open(args.input_file) as infile: for line in infile: outline = line.strip().split(',') csvwriter.writerow(outline) elif ext in ('.xls', '.xlsx'): workbook = xlrd.open_workbook(args.input_file) worksheet = workbook.sheets()[0] for rownum in range(worksheet.nrows): csvwriter.writerow(worksheet.row_values(rownum)) else: print('{"proc.error":"File extension not recognized."}') except Exception: print('{"proc.error":"Corrupt or unrecognized file."}') raise
java
public void marshall(UntagResourceRequest untagResourceRequest, ProtocolMarshaller protocolMarshaller) { if (untagResourceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(untagResourceRequest.getResourceArn(), RESOURCEARN_BINDING); protocolMarshaller.marshall(untagResourceRequest.getTagKeyList(), TAGKEYLIST_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def binary(self): """ return encoded representation """ creation_size = len(self.creation) if creation_size == 1: return ( b_chr(_TAG_PID_EXT) + self.node.binary() + self.id + self.serial + self.creation ) elif creation_size == 4: return ( b_chr(_TAG_NEW_PID_EXT) + self.node.binary() + self.id + self.serial + self.creation ) else: raise OutputException('unknown pid type')
python
def load_from_rho_file(self, filename): """Convenience function that loads two parameter sets from a rho.dat file, as used by CRMod for forward resistivity/phase models. Parameters ---------- filename: string, file path filename of rho.dat file Returns ------- cid_mag: int ID of magnitude parameter set cid_phase: int ID of phase parameter set """ data = np.loadtxt(filename, skiprows=1) cid_mag = self.add_data(data[:, 0]) cid_pha = self.add_data(data[:, 1]) return cid_mag, cid_pha
java
private Set<String> getConstraintNameList(){ Set<String> result = new HashSet<>(); String propertyValue = getProperty(ConstraintContextProperty.ALL_CONSTRAINTS); if(propertyValue == null) return result; StringTokenizer attributeTokens = StringUtils.splitArrayString(propertyValue, String.valueOf(ArrayUtils.VALUE_SEPARATION)); while(attributeTokens.hasMoreTokens()){ String nextToken = attributeTokens.nextToken(); result.add(nextToken); } return result; }
java
private <T> T assertReqProp(String key,T val) { if(val == null){ throw new RuntimeException("The property [" + key + "] is not present "); } return val; }
java
public boolean mergeIn(final BatchPoints that) { boolean mergeAble = isMergeAbleWith(that); if (mergeAble) { this.points.addAll(that.points); } return mergeAble; }
java
public final void addFolder(@NotNull final Folder folder) { Contract.requireArgNotNull("folder", folder); if (folders == null) { folders = new ArrayList<>(); } folders.add(folder); }
python
def kuhn_munkres(G): # maximum profit bipartite matching in O(n^4) """Maximum profit perfect matching for minimum cost perfect matching just inverse the weights :param G: squared weight matrix of a complete bipartite graph :complexity: :math:`O(n^4)` """ assert len(G) == len(G[0]) n = len(G) mu = [None] * n # Empty matching mv = [None] * n lu = [max(row) for row in G] # Trivial labels lv = [0] * n for u0 in range(n): if mu[u0] is None: # Free node while True: au = [False] * n # Empty alternating tree av = [False] * n if improve_matching(G, u0, mu, mv, au, av, lu, lv): break improve_labels(G, au, av, lu, lv) return (mu, sum(lu) + sum(lv))
java
public void setXmlContentAutoCorrect(String xmlContentAutoCorrect) { m_xmlContentAutoCorrect = Boolean.valueOf(xmlContentAutoCorrect).booleanValue(); if (CmsLog.INIT.isInfoEnabled()) { CmsLog.INIT.info( Messages.get().getBundle().key( m_xmlContentAutoCorrect ? Messages.INIT_XMLCONTENT_AUTOCORRECT_ENABLED_0 : Messages.INIT_XMLCONTENT_AUTOCORRECT_DISABLED_0)); } }
java
public static Geometry drapeLineString(LineString line, Geometry triangles, STRtree sTRtree) { GeometryFactory factory = line.getFactory(); //Split the triangles in lines to perform all intersections Geometry triangleLines = LinearComponentExtracter.getGeometry(triangles, true); Geometry diffExt = lineMerge(line.difference(triangleLines), factory); CoordinateSequenceFilter drapeFilter = new DrapeFilter(sTRtree); diffExt.apply(drapeFilter); return diffExt; }
python
def treeReduce(self, f, depth=2): """ Reduces the elements of this RDD in a multi-level tree pattern. :param depth: suggested depth of the tree (default: 2) >>> add = lambda x, y: x + y >>> rdd = sc.parallelize([-5, -4, -3, -2, -1, 1, 2, 3, 4], 10) >>> rdd.treeReduce(add) -5 >>> rdd.treeReduce(add, 1) -5 >>> rdd.treeReduce(add, 2) -5 >>> rdd.treeReduce(add, 5) -5 >>> rdd.treeReduce(add, 10) -5 """ if depth < 1: raise ValueError("Depth cannot be smaller than 1 but got %d." % depth) zeroValue = None, True # Use the second entry to indicate whether this is a dummy value. def op(x, y): if x[1]: return y elif y[1]: return x else: return f(x[0], y[0]), False reduced = self.map(lambda x: (x, False)).treeAggregate(zeroValue, op, op, depth) if reduced[1]: raise ValueError("Cannot reduce empty RDD.") return reduced[0]
python
def _next_sample_index(self): """ShuffledMux chooses its next sample stream randomly, conditioned on the stream weights. """ return self.rng.choice(self.n_streams, p=(self.stream_weights_ / self.weight_norm_))
java
@Override @Path("{package}/{asset}") public JSONObject get(String assetPath, Map<String,String> headers) throws ServiceException, JSONException { AssetServices assetServices = ServiceLocator.getAssetServices(); AssetInfo asset = assetServices.getAsset(assetPath.substring(7), true); if (asset == null) throw new ServiceException(ServiceException.NOT_FOUND, "Asset not found: " + assetPath); if (asset.getCommitInfo() == null) throw new ServiceException(ServiceException.NOT_FOUND, "Commit Info not found: " + assetPath); return asset.getCommitInfo().getJson(); }
python
def isAuxilied(self): """ Returns if the object is separating and applying to a benefic considering good aspects. """ benefics = [const.VENUS, const.JUPITER] return self.__sepApp(benefics, aspList=[0, 60, 120])
java
public List<DataNode> dataNodes() { List<DataNode> dataNodes = new ArrayList<>(); for (Node node : childNodes) { if (node instanceof DataNode) dataNodes.add((DataNode) node); } return Collections.unmodifiableList(dataNodes); }
java
public Object parseAsPropertyType(String stringToParse, String propertyPath) { Class propertyType = getPropertyType(propertyPath); if (propertyType == null) { return null; } NumberTransformer parser = transformers.get(propertyType); return parser == null ? stringToParse : parser.parseObject(stringToParse); }
python
def static_urls_js(): """ Add global variables to JavaScript about the location and latest version of transpiled files. Usage:: {% static_urls_js %} """ if apps.is_installed('django.contrib.staticfiles'): from django.contrib.staticfiles.storage import staticfiles_storage static_base_url = staticfiles_storage.base_url else: static_base_url = PrefixNode.handle_simple("STATIC_URL") transpile_base_url = urljoin(static_base_url, 'js/transpile/') return { 'static_base_url': static_base_url, 'transpile_base_url': transpile_base_url, 'version': LAST_RUN['version'] }
java
private void createNodeMappings(MtasTokenIdFactory mtasTokenIdFactory, Level level, Level parentLevel) { MtasToken nodeToken; if (level.node != null && level.positionStart != null && level.positionEnd != null) { nodeToken = new MtasTokenString(mtasTokenIdFactory.createTokenId(), level.node, ""); nodeToken.setOffset(level.offsetStart, level.offsetEnd); nodeToken.setRealOffset(level.realOffsetStart, level.realOffsetEnd); nodeToken.addPositionRange(level.positionStart, level.positionEnd); tokenCollection.add(nodeToken); if (parentLevel != null) { parentLevel.tokens.add(nodeToken); } // only for first mapping(?) for (MtasToken token : level.tokens) { token.setParentId(nodeToken.getId()); } } }
java
public Map<String, String> getPrintableHeaders() { final Map<String, String> headers = new HashMap<String, String>( request.headers().entries().size()); for (final Entry<String, String> header : request.headers().entries()) { if (header.getKey().toLowerCase().equals("cookie")) { // null out the cookies headers.put(header.getKey(), "*******"); } else { // http://tools.ietf.org/html/rfc2616#section-4.2 if (headers.containsKey(header.getKey())) { headers.put(header.getKey(), headers.get(header.getKey()) + "," + header.getValue()); } else { headers.put(header.getKey(), header.getValue()); } } } return headers; }
java
public static double[] unbox(final Double[] a, final double valueForNull) { if (a == null) { return null; } return unbox(a, 0, a.length, valueForNull); }
java
@Override public Enumerable<T> enumerate(SET source) throws E, MappingException { return getMapperFromSet(source).enumerate(source); }
java
@Override public WebSphereEjbServices getWebSphereEjbServices(String applicationID) { WebSphereEjbServices services = null; synchronized (ejbServices) { services = ejbServices.get(applicationID); if (services == null) { services = new WebSphereEjbServicesImpl(applicationID, ejbDescriptorMap); ejbServices.put(applicationID, services); } } return services; }
java
private Object handleTxCommand(TxInvocationContext ctx, TransactionBoundaryCommand command) { if (trace) log.tracef("handleTxCommand for command %s, origin %s", command, getOrigin(ctx)); updateTopologyId(command); return invokeNextAndHandle(ctx, command, handleTxReturn); }
java
public ServiceFuture<OperationStatus> deletePatternAsync(UUID appId, String versionId, UUID patternId, final ServiceCallback<OperationStatus> serviceCallback) { return ServiceFuture.fromResponse(deletePatternWithServiceResponseAsync(appId, versionId, patternId), serviceCallback); }
python
def defaultcolour(self, colour): """ Auxiliary method to choose a default colour. Give me a user provided colour : if it is None, I change it to the default colour, respecting negative. Plus, if the image is in RGB mode and you give me 128 for a gray, I translate this to the expected (128, 128, 128) ... """ if colour == None: if self.negative == True: if self.pilimage.mode == "L" : return 0 else : return (0, 0, 0) else : if self.pilimage.mode == "L" : return 255 else : return (255, 255, 255) else : if self.pilimage.mode == "RGB" and type(colour) == type(0): return (colour, colour, colour) else : return colour
java
static boolean isGeneralError(Symbol amqpError) { return (amqpError == ClientConstants.SERVER_BUSY_ERROR || amqpError == ClientConstants.TIMEOUT_ERROR || amqpError == AmqpErrorCode.ResourceLimitExceeded); }
java
public Waiter<DescribeInstancesRequest> instanceExists() { return new WaiterBuilder<DescribeInstancesRequest, DescribeInstancesResult>().withSdkFunction(new DescribeInstancesFunction(client)) .withAcceptors(new InstanceExists.IsTrueMatcher(), new InstanceExists.IsInvalidInstanceIDNotFoundMatcher()) .withDefaultPollingStrategy(new PollingStrategy(new MaxAttemptsRetryStrategy(40), new FixedDelayStrategy(5))) .withExecutorService(executorService).build(); }
java
@Override public FileStatus[] globStatus(Path pathPattern, PathFilter filter) throws IOException { checkOpen(); logger.atFine().log("GHFS.globStatus: %s", pathPattern); // URI does not handle glob expressions nicely, for the purpose of // fully-qualifying a path we can URI-encode them. // Using toString() to avoid Path(URI) constructor. Path encodedPath = new Path(pathPattern.toUri().toString()); // We convert pathPattern to GCS path and then to Hadoop path to ensure that it ends up in // the correct format. See note in getHadoopPath for more information. Path encodedFixedPath = getHadoopPath(getGcsPath(encodedPath)); // Decode URI-encoded path back into a glob path. Path fixedPath = new Path(URI.create(encodedFixedPath.toString())); logger.atFine().log("GHFS.globStatus fixedPath: %s => %s", pathPattern, fixedPath); if (enableConcurrentGlob && couldUseFlatGlob(fixedPath)) { return concurrentGlobInternal(fixedPath, filter); } if (enableFlatGlob && couldUseFlatGlob(fixedPath)) { return flatGlobInternal(fixedPath, filter); } return super.globStatus(fixedPath, filter); }
python
def is_bot_the_only_committer(self, pr): """ Checks if the bot is the only committer for the given pull request. :param update: Update to check :return: bool - True if conflict found """ committer = self.provider.get_pull_request_committer( self.user_repo, pr) # flatten the list and remove duplicates committer_set = set([c.login for c in committer]) # it's impossible to get the bots login if this is an integration, just check that # there's only one commit in the commit history. if self.integration or getattr(self.provider, 'name', '') == 'gitlab': return len(committer_set) == 1 # check that there's exactly one committer in this PRs commit history and # that the committer is the bot return len(committer_set) == 1 and self.provider.is_same_user(self.bot, committer[0])
java
public BaseField getField(String strTableName, int iFieldSeq) // Lookup this field { if (this.getRecord(strTableName) != this) return null; return this.getField(iFieldSeq); }
java
public Query groupby(final Expression... groupbyColumns) { if (groupbyColumns == null) { return this; } return groupby(Arrays.asList(groupbyColumns)); }
java
public static boolean isStructuredType(Type type) { MetaType metaType = type.getMetaType(); return metaType == MetaType.ENTITY || metaType == MetaType.COMPLEX; }
java
static public BagObject url (String urlString, Bag postData, String postDataMimeType) { return url (urlString, postData, postDataMimeType, () -> null); }
python
def _ensure_opened(self): """Start monitors, or restart after a fork. Hold the lock when calling this. """ if not self._opened: self._opened = True self._update_servers() # Start or restart the events publishing thread. if self._publish_tp or self._publish_server: self.__events_executor.open() # Ensure that the monitors are open. for server in itervalues(self._servers): server.open()
python
def getControllerAxisTypeNameFromEnum(self, eAxisType): """returns the name of an EVRControllerAxisType enum value. This function is deprecated in favor of the new IVRInput system.""" fn = self.function_table.getControllerAxisTypeNameFromEnum result = fn(eAxisType) return result
java
final Table SYSTEM_CROSSREFERENCE() { Table t = sysTables[SYSTEM_CROSSREFERENCE]; if (t == null) { t = createBlankTable(sysTableHsqlNames[SYSTEM_CROSSREFERENCE]); addColumn(t, "PKTABLE_CAT", SQL_IDENTIFIER); addColumn(t, "PKTABLE_SCHEM", SQL_IDENTIFIER); addColumn(t, "PKTABLE_NAME", SQL_IDENTIFIER); // not null addColumn(t, "PKCOLUMN_NAME", SQL_IDENTIFIER); // not null addColumn(t, "FKTABLE_CAT", SQL_IDENTIFIER); addColumn(t, "FKTABLE_SCHEM", SQL_IDENTIFIER); addColumn(t, "FKTABLE_NAME", SQL_IDENTIFIER); // not null addColumn(t, "FKCOLUMN_NAME", SQL_IDENTIFIER); // not null addColumn(t, "KEY_SEQ", Type.SQL_SMALLINT); // not null addColumn(t, "UPDATE_RULE", Type.SQL_SMALLINT); // not null addColumn(t, "DELETE_RULE", Type.SQL_SMALLINT); // not null addColumn(t, "FK_NAME", SQL_IDENTIFIER); addColumn(t, "PK_NAME", SQL_IDENTIFIER); addColumn(t, "DEFERRABILITY", Type.SQL_SMALLINT); // not null // order: FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, and KEY_SEQ // added for unique: FK_NAME // false PK, as FKTABLE_CAT, FKTABLE_SCHEM and/or FK_NAME // may be null HsqlName name = HsqlNameManager.newInfoSchemaObjectName( sysTableHsqlNames[SYSTEM_CROSSREFERENCE].name, false, SchemaObject.INDEX); t.createPrimaryKey(name, new int[] { 4, 5, 6, 8, 11 }, false); return t; } PersistentStore store = database.persistentStoreCollection.getStore(t); // calculated column values String pkTableCatalog; String pkTableSchema; String pkTableName; String pkColumnName; String fkTableCatalog; String fkTableSchema; String fkTableName; String fkColumnName; Integer keySequence; Integer updateRule; Integer deleteRule; String fkName; String pkName; Integer deferrability; // Intermediate holders Iterator tables; Table table; Table fkTable; Table pkTable; int columnCount; int[] mainCols; int[] refCols; Constraint[] constraints; Constraint constraint; int constraintCount; HsqlArrayList fkConstraintsList; Object[] row; DITableInfo pkInfo; DITableInfo fkInfo; // column number mappings final int ipk_table_cat = 0; final int ipk_table_schem = 1; final int ipk_table_name = 2; final int ipk_column_name = 3; final int ifk_table_cat = 4; final int ifk_table_schem = 5; final int ifk_table_name = 6; final int ifk_column_name = 7; final int ikey_seq = 8; final int iupdate_rule = 9; final int idelete_rule = 10; final int ifk_name = 11; final int ipk_name = 12; final int ideferrability = 13; tables = database.schemaManager.databaseObjectIterator(SchemaObject.TABLE); pkInfo = new DITableInfo(); fkInfo = new DITableInfo(); // We must consider all the constraints in all the user tables, since // this is where reference relationships are recorded. However, we // are only concerned with Constraint.FOREIGN_KEY constraints here // because their corresponing Constraint.MAIN entries are essentially // duplicate data recorded in the referenced rather than the // referencing table. Also, we skip constraints where either // the referenced, referencing or both tables are not accessible // relative to the session of the calling context fkConstraintsList = new HsqlArrayList(); while (tables.hasNext()) { table = (Table) tables.next(); if (table.isView() || !isAccessibleTable(table)) { continue; } constraints = table.getConstraints(); constraintCount = constraints.length; for (int i = 0; i < constraintCount; i++) { constraint = (Constraint) constraints[i]; if (constraint.getConstraintType() == Constraint.FOREIGN_KEY && isAccessibleTable(constraint.getRef())) { fkConstraintsList.add(constraint); } } } // Now that we have all of the desired constraints, we need to // process them, generating one row in our ouput table for each // imported/exported column pair of each constraint. // Do it. for (int i = 0; i < fkConstraintsList.size(); i++) { constraint = (Constraint) fkConstraintsList.get(i); pkTable = constraint.getMain(); pkInfo.setTable(pkTable); pkTableName = pkInfo.getName(); fkTable = constraint.getRef(); fkInfo.setTable(fkTable); fkTableName = fkInfo.getName(); pkTableCatalog = pkTable.getCatalogName().name; pkTableSchema = pkTable.getSchemaName().name; fkTableCatalog = fkTable.getCatalogName().name; fkTableSchema = fkTable.getSchemaName().name; mainCols = constraint.getMainColumns(); refCols = constraint.getRefColumns(); columnCount = refCols.length; fkName = constraint.getRefName().name; pkName = constraint.getMainName().name; deferrability = ValuePool.getInt(constraint.getDeferability()); //pkName = constraint.getMainIndex().getName().name; deleteRule = ValuePool.getInt(constraint.getDeleteAction()); updateRule = ValuePool.getInt(constraint.getUpdateAction()); for (int j = 0; j < columnCount; j++) { keySequence = ValuePool.getInt(j + 1); pkColumnName = pkInfo.getColName(mainCols[j]); fkColumnName = fkInfo.getColName(refCols[j]); row = t.getEmptyRowData(); row[ipk_table_cat] = pkTableCatalog; row[ipk_table_schem] = pkTableSchema; row[ipk_table_name] = pkTableName; row[ipk_column_name] = pkColumnName; row[ifk_table_cat] = fkTableCatalog; row[ifk_table_schem] = fkTableSchema; row[ifk_table_name] = fkTableName; row[ifk_column_name] = fkColumnName; row[ikey_seq] = keySequence; row[iupdate_rule] = updateRule; row[idelete_rule] = deleteRule; row[ifk_name] = fkName; row[ipk_name] = pkName; row[ideferrability] = deferrability; t.insertSys(store, row); } } return t; }
python
def AddSerializedFile(self, serialized_file_desc_proto): """Adds the FileDescriptorProto and its types to this pool. Args: serialized_file_desc_proto: A bytes string, serialization of the FileDescriptorProto to add. """ # pylint: disable=g-import-not-at-top from google.protobuf import descriptor_pb2 file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( serialized_file_desc_proto) self.Add(file_desc_proto)
java
static void closeAllStreams(Iterable<EditLogInputStream> streams) { for (EditLogInputStream s : streams) { IOUtils.closeStream(s); } }
java
public EndpointBuilder<T> export(Class<?>... additional) { this.additionalInterfaces.addAll(Arrays.asList(additional)); return this; }
java
private void clientFinished(Finished mesg) throws IOException { if (debug != null && Debug.isOn("handshake")) { mesg.print(System.out); } /* * Verify if client did send the certificate when client * authentication was required, otherwise server should not proceed */ if (doClientAuth == SSLEngineImpl.clauth_required) { // get X500Principal of the end-entity certificate for X509-based // ciphersuites, or Kerberos principal for Kerberos ciphersuites session.getPeerPrincipal(); } /* * Verify if client did send clientCertificateVerify message following * the client Certificate, otherwise server should not proceed */ if (needClientVerify) { fatalSE(Alerts.alert_handshake_failure, "client did not send certificate verify message"); } /* * Verify the client's message with the "before" digest of messages, * and forget about continuing to use that digest. */ boolean verified = mesg.verify(handshakeHash, Finished.CLIENT, session.getMasterSecret()); if (!verified) { fatalSE(Alerts.alert_handshake_failure, "client 'finished' message doesn't verify"); // NOTREACHED } /* * save client verify data for secure renegotiation */ if (secureRenegotiation) { clientVerifyData = mesg.getVerifyData(); } /* * OK, it verified. If we're doing the full handshake, add that * "Finished" message to the hash of handshake messages, then send * the change_cipher_spec and Finished message. */ if (!resumingSession) { input.digestNow(); sendChangeCipherAndFinish(true); } /* * Update the session cache only after the handshake completed, else * we're open to an attack against a partially completed handshake. */ session.setLastAccessedTime(System.currentTimeMillis()); if (!resumingSession && session.isRejoinable()) { ((SSLSessionContextImpl)sslContext.engineGetServerSessionContext()) .put(session); if (debug != null && Debug.isOn("session")) { System.out.println( "%% Cached server session: " + session); } } else if (!resumingSession && debug != null && Debug.isOn("session")) { System.out.println( "%% Didn't cache non-resumable server session: " + session); } }
java
public static Object convert(Object typeKey, Object value) { if (value==null) { return null; } if (typeKey==null) { return value; } Conversion<?> conversion=getTypeConversion(typeKey,value); // Convert the value if (conversion!=null) { if (value instanceof Listener) { ((Listener)value).beforeConversion(typeKey); } Object result=conversion.convert(value); if (value instanceof Listener) { result=((Listener)value).afterConversion(typeKey,result); } return result; } else { throw new IllegalArgumentException("Could not find type "+ "conversion for type \""+typeKey+"\" (value = \""+value+"\")"); } }
python
def set_hosts(sld, tld, hosts): ''' Sets DNS host records settings for the requested domain. returns True if the host records were set successfully sld SLD of the domain name tld TLD of the domain name hosts Must be passed as a list of Python dictionaries, with each dictionary containing the following keys: - **hostname** - **recordtype** - One of ``A``, ``AAAA``, ``CNAME``, ``MX``, ``MXE``, ``TXT``, ``URL``, ``URL301``, or ``FRAME`` - **address** - URL or IP address - **ttl** - An integer between 60 and 60000 (default: ``1800``) Additonally, the ``mxpref`` key can be present, but must be accompanied by an ``emailtype`` key. CLI Example: .. code-block:: bash salt 'my-minion' namecheap_domains_dns.set_hosts sld tld hosts ''' opts = salt.utils.namecheap.get_opts('namecheap.domains.dns.setHosts') opts['SLD'] = sld opts['TLD'] = tld i = 1 for hostrecord in hosts: str_i = six.text_type(i) opts['HostName' + str_i] = hostrecord['hostname'] opts['RecordType' + str_i] = hostrecord['recordtype'] opts['Address' + str_i] = hostrecord['address'] if 'ttl' in hostrecord: opts['TTL' + str_i] = hostrecord['ttl'] if 'mxpref' in hostrecord: opts['MXPref' + str_i] = hostrecord['mxpref'] opts['EmailType'] = hostrecord['emailtype'] i += 1 response_xml = salt.utils.namecheap.post_request(opts) if response_xml is None: return False dnsresult = response_xml.getElementsByTagName('DomainDNSSetHostsResult')[0] return salt.utils.namecheap.string_to_value(dnsresult.getAttribute('IsSuccess'))
java
public static IPv6Address fromString(final String string) { if (string == null) throw new IllegalArgumentException("can not parse [null]"); final String withoutIPv4MappedNotation = IPv6AddressHelpers.rewriteIPv4MappedNotation(string); final String longNotation = IPv6AddressHelpers.expandShortNotation(withoutIPv4MappedNotation); final long[] longs = tryParseStringArrayIntoLongArray(string, longNotation); IPv6AddressHelpers.validateLongs(longs); return IPv6AddressHelpers.mergeLongArrayIntoIPv6Address(longs); }
java
public static void writeSecurityBuffer(short length, short allocated, int bufferOffset, byte[] b, int offset) { ByteUtilities.writeShort(length, b, offset); ByteUtilities.writeShort(allocated, b, offset + 2); ByteUtilities.writeInt(bufferOffset, b, offset + 4); }
python
def unicode_escape_sequence_fix(self, value): """ It is possible to define unicode characters in the config either as the actual utf-8 character or using escape sequences the following all will show the Greek delta character. Δ \N{GREEK CAPITAL LETTER DELTA} \U00000394 \u0394 """ def fix_fn(match): # we don't escape an escaped backslash if match.group(0) == r"\\": return r"\\" return match.group(0).encode("utf-8").decode("unicode-escape") return re.sub(r"\\\\|\\u\w{4}|\\U\w{8}|\\N\{([^}\\]|\\.)+\}", fix_fn, value)
java
@Override public void removeByGroupId(long groupId) { for (CPSpecificationOption cpSpecificationOption : findByGroupId( groupId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(cpSpecificationOption); } }
python
def read(self, num): """Read ``num`` number of bytes from the stream. Note that this will automatically resets/ends the current bit-reading if it does not end on an even byte AND ``self.padded`` is True. If ``self.padded`` is True, then the entire stream is treated as a bitstream. :num: number of bytes to read :returns: the read bytes, or empty string if EOF has been reached """ start_pos = self.tell() if self.padded: # we toss out any uneven bytes self._bits.clear() res = utils.binary(self._stream.read(num)) else: bits = self.read_bits(num * 8) res = bits_to_bytes(bits) res = utils.binary(res) end_pos = self.tell() self._update_consumed_ranges(start_pos, end_pos) return res
python
def readTicks(self, start, end): ''' read ticks ''' rows = self.__hbase.scanTable(self.tableName(HBaseDAM.TICK), [HBaseDAM.TICK], start, end) return [self.__rowResultToTick(row) for row in rows]
python
def _check_operators(self, operators): """ Check Inputs This method cheks that the input operators and weights are correctly formatted Parameters ---------- operators : list, tuple or np.ndarray List of linear operator class instances Returns ------- np.array operators Raises ------ TypeError For invalid input type """ if not isinstance(operators, (list, tuple, np.ndarray)): raise TypeError('Invalid input type, operators must be a list, ' 'tuple or numpy array.') operators = np.array(operators) if not operators.size: raise ValueError('Operator list is empty.') for operator in operators: if not hasattr(operator, 'op'): raise ValueError('Operators must contain "op" method.') if not hasattr(operator, 'cost'): raise ValueError('Operators must contain "cost" method.') operator.op = check_callable(operator.op) operator.cost = check_callable(operator.cost) return operators
python
def ast_str(self, indent=0): """Return a minimal string to print a tree-like structure. Kwargs: indent (int): The number of indentation levels. """ line = self.line or 0 col = self.column or 0 name = type(self).__name__ spell = getattr(self, 'name', '[no spelling]') result = ' ({})'.format(self.result) if hasattr(self, 'result') else '' prefix = indent * '| ' return '{}[{}:{}] {}{}: {}'.format(prefix, line, col, name, result, spell)
java
public boolean isSessionToken(String site, String token) { // Add a default port if (!site.contains(":")) { site = site + (":80"); } HttpSessionTokensSet siteTokens = sessionTokens.get(site); if (siteTokens == null) return false; return siteTokens.isSessionToken(token); }
java
public final int strLengthNull(byte[]bytes, int p, int end) { int n = 0; while (true) { if (bytes[p] == 0) { int len = minLength(); if (len == 1) return n; int q = p + 1; while (len > 1) { if (bytes[q] != 0) break; q++; len--; } if (len == 1) return n; } p += length(bytes, p, end); n++; } }
python
def _wrap_function_return(val): """ Recursively walks each thing in val, opening lists and dictionaries, converting all occurrences of UnityGraphProxy to an SGraph, UnitySFrameProxy to SFrame, and UnitySArrayProxy to SArray. """ if type(val) is _UnityGraphProxy: return _SGraph(_proxy = val) elif type(val) is _UnitySFrameProxy: return _SFrame(_proxy = val) elif type(val) is _UnitySArrayProxy: return _SArray(_proxy = val) elif type(val) is _UnityModel: # we need to cast it up to the appropriate type uid = val.get_uid() if uid in class_uid_to_class: return class_uid_to_class[uid](_proxy=val) else: return val elif type(val) is list: return [_wrap_function_return(i) for i in val] elif type(val) is dict: return dict( (i, _wrap_function_return(val[i])) for i in val) else: return val
java
@Override public boolean deleteKAMStoreSchema(DBConnection dbc, String schemaName) throws IOException { boolean deleteSchemas = getSchemaManagementStatus(dbc); if (deleteSchemas) { runScripts(dbc, "/" + dbc.getType() + DELETE_KAM_SQL_PATH, schemaName, deleteSchemas); } else { // Truncate the schema instead of deleting it. InputStream sqlStream = null; if (dbc.isMysql()) { sqlStream = getClass().getResourceAsStream( "/" + dbc.getType() + KAM_SQL_PATH + "1.sql"); } else if (dbc.isOracle()) { sqlStream = getClass().getResourceAsStream( "/" + dbc.getType() + KAM_SQL_PATH + "0.sql"); } if (sqlStream != null) { runScript(dbc, sqlStream, schemaName); } } return deleteSchemas; }
java
@SuppressWarnings("deprecation") public static RTPBridge getRTPBridge(XMPPConnection connection, String sessionID) throws NotConnectedException, InterruptedException { if (!connection.isConnected()) { return null; } RTPBridge rtpPacket = new RTPBridge(sessionID); rtpPacket.setTo(RTPBridge.NAME + "." + connection.getXMPPServiceDomain()); StanzaCollector collector = connection.createStanzaCollectorAndSend(rtpPacket); RTPBridge response = collector.nextResult(); // Cancel the collector. collector.cancel(); return response; }
python
def _connectToAPI(self): """ :return: A tweepy.API object that performs the queries """ #authorize twitter, initialize tweepy auth = tweepy.OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_key, self.access_secret) api = tweepy.API(auth) return api
java
public static cacheselector[] get(nitro_service service, options option) throws Exception{ cacheselector obj = new cacheselector(); cacheselector[] response = (cacheselector[])obj.get_resources(service,option); return response; }
java
public boolean getBooleanOrDefault(int key, boolean dfl) { Any3<Boolean, Integer, String> value = data.getOrDefault(Any2.<Integer, String>left(key), Any3.<Boolean, Integer, String>create1(dfl)); return value.get1().orElseThrow(() -> new IllegalArgumentException("expected boolean argument for param " + key)); }
python
def return_opml_response(self, context, **response_kwargs): ''' Returns export data as an opml file. ''' self.template_name = 'fiction_outlines/outline.opml' response = super().render_to_response(context, content_type='text/xml', **response_kwargs) response['Content-Disposition'] = 'attachment; filename="{}.opml"'.format(slugify(self.object.title)) return response
python
def _initiate_replset(self, port, name, maxwait=30): """Initiate replica set.""" if not self.args['replicaset'] and name != 'configRepl': if self.args['verbose']: print('Skipping replica set initialization for %s' % name) return con = self.client('localhost:%i' % port) try: rs_status = con['admin'].command({'replSetGetStatus': 1}) return rs_status except OperationFailure as e: # not initiated yet for i in range(maxwait): try: con['admin'].command({'replSetInitiate': self.config_docs[name]}) break except OperationFailure as e: print(e.message + " - will retry") time.sleep(1) if self.args['verbose']: print("initializing replica set '%s' with configuration: %s" % (name, self.config_docs[name])) print("replica set '%s' initialized." % name)
python
def is_false(self): """ Ensures :attr:`subject` is ``False``. """ self._run(unittest_case.assertFalse, (self._subject,)) return ChainInspector(self._subject)
python
def pre_init(): """ The pre_init function of the plugin. Here rafcon-classes can be extended/monkey-patched or completely substituted. A example is given with the rafcon_execution_hooks_plugin. :return: """ logger.info("Run pre-initiation hook of {} plugin.".format(__file__.split(os.path.sep)[-2])) # Example: Monkey-Path rafcon.core.script.Script class to print additional log-message while execution from rafcon.core.script import Script old_execute_method = Script.execute def new_execute_method(self, state, inputs=None, outputs=None, backward_execution=False): logger.debug("patched version of Script class is used.") result = old_execute_method(self, state, inputs, outputs, backward_execution) logger.debug("patched version of Script execute-method is finished with result: {}.".format(result)) return result Script.execute = new_execute_method
java
public Matrix4f rotationX(float ang) { float sin, cos; sin = (float) Math.sin(ang); cos = (float) Math.cosFromSin(sin, ang); if ((properties & PROPERTY_IDENTITY) == 0) MemUtil.INSTANCE.identity(this); this._m11(cos); this._m12(sin); this._m21(-sin); this._m22(cos); _properties(PROPERTY_AFFINE | PROPERTY_ORTHONORMAL); return this; }
java
void addCredit(InputChannelID receiverId, int credit) throws Exception { if (fatalError) { return; } NetworkSequenceViewReader reader = allReaders.get(receiverId); if (reader != null) { reader.addCredit(credit); enqueueAvailableReader(reader); } else { throw new IllegalStateException("No reader for receiverId = " + receiverId + " exists."); } }
java
private static void blur(int[] srcPixels, int[] dstPixels, int width, int height, float[] kernel, int radius) { float a; float r; float g; float b; int ca; int cr; int cg; int cb; for (int y = 0; y < height; y++) { int index = y; int offset = y * width; for (int x = 0; x < width; x++) { a = r = g = b = 0.0f; for (int i = -radius; i <= radius; i++) { int subOffset = x + i; if (subOffset < 0 || subOffset >= width) { subOffset = (x + width) % width; } int pixel = srcPixels[offset + subOffset]; float blurFactor = kernel[radius + i]; a += blurFactor * ((pixel >> 24) & 0xFF); r += blurFactor * ((pixel >> 16) & 0xFF); g += blurFactor * ((pixel >> 8) & 0xFF); b += blurFactor * ((pixel) & 0xFF); } ca = (int) (a + 0.5f); cr = (int) (r + 0.5f); cg = (int) (g + 0.5f); cb = (int) (b + 0.5f); dstPixels[index] = ((ca > 255 ? 255 : ca) << 24) | ((cr > 255 ? 255 : cr) << 16) | ((cg > 255 ? 255 : cg) << 8) | (cb > 255 ? 255 : cb); index += height; } } }
python
def _M2_sparse(Xvar, mask_X, Yvar, mask_Y, weights=None): """ 2nd moment matrix exploiting zero input columns """ C = np.zeros((len(mask_X), len(mask_Y))) C[np.ix_(mask_X, mask_Y)] = _M2_dense(Xvar, Yvar, weights=weights) return C
java
@SuppressWarnings("unchecked") @Override public <R> R query(TemporalQuery<R> query) { if (query == TemporalQueries.localDate()) { return (R) this; } return super.query(query); }
java
public static ScopeFormats fromConfig(Configuration config) { String jmFormat = config.getString(MetricOptions.SCOPE_NAMING_JM); String jmJobFormat = config.getString(MetricOptions.SCOPE_NAMING_JM_JOB); String tmFormat = config.getString(MetricOptions.SCOPE_NAMING_TM); String tmJobFormat = config.getString(MetricOptions.SCOPE_NAMING_TM_JOB); String taskFormat = config.getString(MetricOptions.SCOPE_NAMING_TASK); String operatorFormat = config.getString(MetricOptions.SCOPE_NAMING_OPERATOR); return new ScopeFormats(jmFormat, jmJobFormat, tmFormat, tmJobFormat, taskFormat, operatorFormat); }
java
@Override public ResourceSet<Webhook> read(final TwilioRestClient client) { return new ResourceSet<>(this, client, firstPage(client)); }
java
public void getRaidInfo(String[] ids, Callback<List<Raid>> callback) throws GuildWars2Exception, NullPointerException { isParamValid(new ParamChecker(ids)); gw2API.getRaidInfo(processIds(ids), GuildWars2.lang.getValue()).enqueue(callback); }
java
private String makeAlias(String prefix) { int i = 0; String result; do { if (i == 0) { result = prefix; } else { result = prefix + i; } } while (m_usedAliases.contains(result)); m_usedAliases.add(result); return result; }
java
public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "Workflow id cannot be blank"); stub.removeWorkflow( WorkflowServicePb.RemoveWorkflowRequest.newBuilder() .setWorkflodId(workflowId) .setArchiveWorkflow(archiveWorkflow) .build() ); }
java
public java.util.List<java.util.Map<String, AttributeValue>> getItems() { return items; }
python
async def discover_nupnp(websession): """Discover bridges via NUPNP.""" async with websession.get(URL_NUPNP) as res: return [Bridge(item['internalipaddress'], websession=websession) for item in (await res.json())]
java
public static LinkedList<String> readLineListWithLessMemory(String path) { LinkedList<String> result = new LinkedList<String>(); String line = null; boolean first = true; try { BufferedReader bw = new BufferedReader(new InputStreamReader(IOUtil.newInputStream(path), "UTF-8")); while ((line = bw.readLine()) != null) { if (first) { first = false; if (!line.isEmpty() && line.charAt(0) == '\uFEFF') line = line.substring(1); } result.add(line); } bw.close(); } catch (Exception e) { logger.warning("加载" + path + "失败," + e); } return result; }
python
def _using_stdout(self): """ Return whether the handler is using sys.stdout. """ if WINDOWS and colorama: # Then self.stream is an AnsiToWin32 object. return self.stream.wrapped is sys.stdout return self.stream is sys.stdout
python
def get_group_names(self): """ Returns the set of Django group names that this user belongs to by virtue of LDAP group memberships. """ if self._group_names is None: self._load_cached_attr("_group_names") if self._group_names is None: group_infos = self._get_group_infos() self._group_names = { self._group_type.group_name_from_info(group_info) for group_info in group_infos } self._cache_attr("_group_names") return self._group_names
java
public Converter<NetworkResponse, ?> getResponseConverter(Type type, Annotation[] annotations) { checkNotNull(type, "type == null"); checkNotNull(annotations, "annotations == null"); for (int i = 0, count = converterFactories.size(); i < count; i++) { Converter<NetworkResponse, ?> converter = converterFactories.get(i).fromResponse(type, annotations); if (converter != null) { return converter; } } StringBuilder builder = new StringBuilder("Could not locate Response converter for ") .append(type) .append(". Tried:"); for (Factory converterFactory : converterFactories) { builder.append("\n * ").append(converterFactory.getClass().getName()); } throw new IllegalArgumentException(builder.toString()); }
java
@UiThread @SafeVarargs public static <T extends View> void run(@NonNull T view, @NonNull Action<? super T>... actions) { for (Action<? super T> action : actions) { action.apply(view, 0); } }
python
def get_dependants(cls, dist): """Yield dependant user packages for a given package name.""" for package in cls.installed_distributions: for requirement_package in package.requires(): requirement_name = requirement_package.project_name # perform case-insensitive matching if requirement_name.lower() == dist.lower(): yield package
java
public static void v(Throwable t, String tag, String message, Object... args) { sLogger.v(t, tag, message, args); }
python
def save(filepath, *args, encoding=None, **kwargs): """Same as `get_tikz_code()`, but actually saves the code to a file. :param filepath: The file to which the TikZ output will be written. :type filepath: str :param encoding: Sets the text encoding of the output file, e.g. 'utf-8'. For supported values: see ``codecs`` module. :returns: None """ code = get_tikz_code(*args, filepath=filepath, **kwargs) file_handle = codecs.open(filepath, "w", encoding) try: file_handle.write(code) except UnicodeEncodeError: # We're probably using Python 2, so treat unicode explicitly file_handle.write(six.text_type(code).encode("utf-8")) file_handle.close() return
java
@Override public int read(char[] cbuf, int start, int len) throws IOException { // Let's first ensure there's enough room... if (start < 0 || (start+len) > cbuf.length) { reportBounds(cbuf, start, len); } // Already EOF? if (mByteBuffer == null) { return -1; } if (len < 1) { return 0; } len += start; int outPtr = start; // Ok, first; do we have a surrogate from last round? if (mSurrogate != NULL_CHAR) { cbuf[outPtr++] = mSurrogate; mSurrogate = NULL_CHAR; // No need to load more, already got one char } else { /* Note: we'll try to avoid blocking as much as possible. As a * result, we only need to get 4 bytes for a full char. */ int left = (mByteBufferEnd - mBytePtr); if (left < 4) { if (!loadMore(left)) { // (legal) EOF? return -1; } } } byte[] buf = mByteBuffer; main_loop: while (outPtr < len) { int ptr = mBytePtr; int ch; if (mBigEndian) { ch = (buf[ptr] << 24) | ((buf[ptr+1] & 0xFF) << 16) | ((buf[ptr+2] & 0xFF) << 8) | (buf[ptr+3] & 0xFF); } else { ch = (buf[ptr] & 0xFF) | ((buf[ptr+1] & 0xFF) << 8) | ((buf[ptr+2] & 0xFF) << 16) | (buf[ptr+3] << 24); } mBytePtr += 4; // Does it need to be split to surrogates? // (also, we can and need to verify illegal chars) if (ch >= 0x7F) { if (ch <= 0x9F) { if (mXml11) { // high-order ctrl char detection... if (ch != 0x85) { reportInvalid(ch, outPtr-start, "(can only be included via entity in xml 1.1)"); } ch = CONVERT_NEL_TO; } } else if (ch >= 0xD800) { // Illegal? if (ch > XmlConsts.MAX_UNICODE_CHAR) { reportInvalid(ch, outPtr-start, "(above "+Integer.toHexString(XmlConsts.MAX_UNICODE_CHAR)+") "); } if (ch > 0xFFFF) { // need to split into surrogates? ch -= 0x10000; // to normalize it starting with 0x0 cbuf[outPtr++] = (char) (0xD800 + (ch >> 10)); // hmmh. can this ever be 0? (not legal, at least?) ch = (0xDC00 | (ch & 0x03FF)); // Room for second part? if (outPtr >= len) { // nope mSurrogate = (char) ch; break main_loop; } } else { // in 16-bit range... just need validity checks if (ch < 0xE000) { reportInvalid(ch, outPtr-start, "(a surrogate char) "); } else if (ch >= 0xFFFE) { reportInvalid(ch, outPtr-start, ""); } } } else if (ch == 0x2028 && mXml11) { // LSEP ch = CONVERT_LSEP_TO; } } cbuf[outPtr++] = (char) ch; if (mBytePtr >= mByteBufferEnd) { break main_loop; } } len = outPtr - start; mCharCount += len; return len; }
java
protected boolean isSupportRefreshToken(OAuth2Request clientAuth) { if (clientDetailsService != null) { ClientDetails client = clientDetailsService.loadClientByClientId(clientAuth.getClientId()); return client.getAuthorizedGrantTypes().contains("refresh_token"); } return this.supportRefreshToken; }
java
public void printDot(PrintStream os, int maxLevelsToPrintPerEdge, boolean detail, String optionalTitle, PrintMojo.PrintTreeOptions treeOptions) { os.println("/*"); os.println("Generated by:"); os.println(" http://https://github.com/h2oai/h2o-3/tree/master/h2o-genmodel/src/main/java/hex/genmodel/tools/PrintMojo.java"); os.println("*/"); os.println(""); os.println("/*"); os.println("On a mac:"); os.println(""); os.println("$ brew install graphviz"); os.println("$ dot -Tpng file.gv -o file.png"); os.println("$ open file.png"); os.println("*/"); os.println(""); os.println("digraph G {"); for (SharedTreeSubgraph sg : subgraphArray) { sg.printDot(os, maxLevelsToPrintPerEdge, detail, optionalTitle, treeOptions); } os.println(""); os.println("}"); os.println(""); }
python
def prepare_sorting_fields(self): """ Determine sorting direction and sorting field based on request query parameters and sorting options of self """ if self.sorting_parameter_name in self.request.query_params: # Extract sorting parameter from query string self._sorting_fields = [ _.strip() for _ in self.request.query_params.get(self.sorting_parameter_name).split(',') ] if self._sorting_fields: # Create a list of sorting parameters. Each parameter is a tuple: (field:str, descending:bool) self._sorting_fields = [ (self.sorting_fields_map.get(field.lstrip('-'), field.lstrip('-')), field[0] == '-') for field in self._sorting_fields ]
java
public void compileAndRun() throws Exception { synchronized (this.monitor) { try { stop(); Class<?>[] compiledSources = compile(); monitorForChanges(); // Run in new thread to ensure that the context classloader is setup this.runThread = new RunThread(compiledSources); this.runThread.start(); this.runThread.join(); } catch (Exception ex) { if (this.fileWatchThread == null) { throw ex; } else { ex.printStackTrace(); } } } }
python
def signup_or_login_with_mobile_phone(cls, phone_number, sms_code): ''' param phone_nubmer: string_types param sms_code: string_types 在调用此方法前请先使用 request_sms_code 请求 sms code ''' data = { 'mobilePhoneNumber': phone_number, 'smsCode': sms_code } response = client.post('/usersByMobilePhone', data) content = response.json() user = cls() user._update_data(content) user._handle_save_result(True) if 'smsCode' not in content: user._attributes.pop('smsCode', None) return user
java
public <T> long bulkGraphOperation(final SecurityContext securityContext, final Iterator<T> iterator, final long commitCount, String description, final BulkGraphOperation<T> operation, boolean validation) { final Predicate<Long> condition = operation.getCondition(); final App app = StructrApp.getInstance(securityContext); final boolean doValidation = operation.doValidation(); final boolean doCallbacks = operation.doCallbacks(); final boolean doNotifications = operation.doNotifications(); long objectCount = 0L; boolean active = true; while (active) { active = false; try (final Tx tx = app.tx(doValidation, doCallbacks, doNotifications)) { while (iterator.hasNext() && (condition == null || condition.accept(objectCount))) { T node = iterator.next(); active = true; try { boolean success = operation.handleGraphObject(securityContext, node); if (success) { objectCount++; } } catch (Throwable t) { operation.handleThrowable(securityContext, t, node); } // commit transaction after commitCount if ((objectCount % commitCount) == 0) { break; } } tx.success(); } catch (Throwable t) { // bulk transaction failed, what to do? operation.handleTransactionFailure(securityContext, t); } if (description != null) { info("{}: {} objects processed", description, objectCount); } } return objectCount; }
python
def is_readable(path=None): """ Test if the supplied filesystem path can be read :param path: A filesystem path :return: True if the path is a file that can be read. Otherwise, False """ if os.path.isfile(path) and os.access(path, os.R_OK): return True return False
java
private void fetch() throws Exception { for (; nextFetchIndex < objects.size() && fetchedBytes.get() <= prefetchConfig.getMaxFetchCapacityBytes(); nextFetchIndex++) { final T object = objects.get(nextFetchIndex); LOG.info("Fetching [%d]th object[%s], fetchedBytes[%d]", nextFetchIndex, object, fetchedBytes.get()); final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory); fetchedBytes.addAndGet(download(object, outFile)); fetchedFiles.put(new FetchedFile<>(object, outFile, getFileCloser(outFile, fetchedBytes))); } }
java
@Override public void writeSilenceForced(long tick) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "writeSilenceForced", Long.valueOf(tick)); TickRange tr = null; synchronized (oststream) //see defect 289889 { oststream.setCursor(tick); // Get the TickRange containing this tick tr = oststream.getNext(); } writeSilenceInternal(tr, true); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "writeSilenceForced"); }
java
public ClockSkewAdjustment getAdjustment(AdjustmentRequest adjustmentRequest) { ValidationUtils.assertNotNull(adjustmentRequest, "adjustmentRequest"); ValidationUtils.assertNotNull(adjustmentRequest.exception, "adjustmentRequest.exception"); ValidationUtils.assertNotNull(adjustmentRequest.clientRequest, "adjustmentRequest.clientRequest"); ValidationUtils.assertNotNull(adjustmentRequest.serviceResponse, "adjustmentRequest.serviceResponse"); int timeSkewInSeconds = 0; boolean isAdjustmentRecommended = false; try { if (isAdjustmentRecommended(adjustmentRequest)) { Date serverDate = getServerDate(adjustmentRequest); if (serverDate != null) { timeSkewInSeconds = timeSkewInSeconds(getCurrentDate(adjustmentRequest), serverDate); isAdjustmentRecommended = true; } } } catch (RuntimeException e) { log.warn("Unable to correct for clock skew.", e); } return new ClockSkewAdjustment(isAdjustmentRecommended, timeSkewInSeconds); }
java
public static <T> Typed<T> wrap(final Type type) { return new Typed<T>() { @Override public Type getType() { return type; } }; }
python
def file_contents_safe(self, sentry_unit, file_name, max_wait=60, fatal=False): """Get file contents from a sentry unit. Wrap amulet file_contents with retry logic to address races where a file checks as existing, but no longer exists by the time file_contents is called. Return None if file not found. Optionally raise if fatal is True.""" unit_name = sentry_unit.info['unit_name'] file_contents = False tries = 0 while not file_contents and tries < (max_wait / 4): try: file_contents = sentry_unit.file_contents(file_name) except IOError: self.log.debug('Attempt {} to open file {} from {} ' 'failed'.format(tries, file_name, unit_name)) time.sleep(4) tries += 1 if file_contents: return file_contents elif not fatal: return None elif fatal: msg = 'Failed to get file contents from unit.' amulet.raise_status(amulet.FAIL, msg)
java
@Override public void open() throws InfoStoreException { String methodName = "open"; try { getClassSource().open(); } catch (ClassSource_Exception e) { // defect 84235:we are generating multiple Warning/Error messages for each error due to each level reporting them. // Disable the following warning and defer message generation to a higher level. // CWWKC0026W //Tr.warning(tc, "ANNO_INFOSTORE_OPEN1_EXCEPTION", getHashText(), getClassSource().getHashText()); String eMsg = "[ " + getHashText() + " ] Failed to open class source "; throw InfoStoreException.wrap(tc, CLASS_NAME, methodName, eMsg, e); } }
python
def from_yaml(): """ Load configuration from yaml source(s), cached to only run once """ default_yaml_str = snippets.get_snippet_content('hatchery.yml') ret = yaml.load(default_yaml_str, Loader=yaml.RoundTripLoader) for config_path in CONFIG_LOCATIONS: config_path = os.path.expanduser(config_path) if os.path.isfile(config_path): with open(config_path) as config_file: config_dict = yaml.load(config_file, Loader=yaml.RoundTripLoader) if config_dict is None: continue for k, v in config_dict.items(): if k not in ret.keys(): raise ConfigError( 'found garbage key "{}" in {}'.format(k, config_path) ) ret[k] = v return ret
python
def cp_file(): """ dumps databases into /backups, uploads to s3, deletes backups older than a month fab -f ./fabfile.py backup_dbs """ args = parser.parse_args() copy_file(args.aws_access_key_id, args.aws_secret_access_key, args.bucket_name, args.file, args.s3_folder)
python
def plotMDS(data, theOrders, theLabels, theColors, theAlphas, theSizes, theMarkers, options): """Plot the MDS data. :param data: the data to plot (MDS values). :param theOrders: the order of the populations to plot. :param theLabels: the names of the populations to plot. :param theColors: the colors of the populations to plot. :param theAlphas: the alpha value for the populations to plot. :param theSizes: the sizes of the markers for each population to plot. :param theMarkers: the type of marker for each population to plot. :param options: the options. :type data: list of numpy.array :type theOrders: list :type theLabels: list :type theColors: list :type theAlphas: list :type theSizes: list :type theMarkers: list :type options: argparse.Namespace """ # Do the import import matplotlib as mpl if options.format != "X11" and mpl.get_backend() != "agg": mpl.use("Agg") import matplotlib.pyplot as plt if options.format != "X11": plt.ioff() fig = plt.figure() try: fig.subplots_adjust(right=options.adjust_right, left=options.adjust_left, bottom=options.adjust_bottom, top=options.adjust_top) except ValueError as e: raise ProgramError(e) ax = fig.add_subplot(111) # Setting the axis ax.xaxis.set_ticks_position("bottom") ax.yaxis.set_ticks_position("left") ax.spines["top"].set_visible(False) ax.spines["right"].set_visible(False) ax.spines["bottom"].set_position(("outward", 9)) ax.spines["left"].set_position(("outward", 9)) # The plot plotObject = [] labels = [] for i, index in enumerate(theOrders): try: tmp, = ax.plot(data[0][i], data[1][i], theMarkers[i], color=theColors[i], mec=theColors[i], markersize=theSizes[i], alpha=theAlphas[i]) except ValueError as e: msg = "Problem with markers: %(e)s" % locals() raise ProgramError(msg) plotObject.append(tmp) labels.append(index) # The legend prop = mpl.font_manager.FontProperties(size=options.legend_size) leg = ax.legend(plotObject, labels, loc=options.legend_position, numpoints=1, fancybox=True, prop=prop, ncol=options.legend_ncol) leg.get_frame().set_alpha(0.5) # The title and XY labels ax.set_title(options.title, fontsize=options.title_fontsize, weight="bold") ax.set_xlabel(options.xlabel, fontsize=options.label_fontsize) ax.set_ylabel(options.ylabel, fontsize=options.label_fontsize) # Changing the size of the tick labels for tick in ax.yaxis.get_major_ticks() + ax.xaxis.get_major_ticks(): tick.label.set_fontsize(options.axis_fontsize) if options.format == "X11": # Show the plot plt.show() else: fileName = options.out + "." + options.format try: plt.savefig(fileName, dpi=300) except IOError: msg = "%(fileName)s: can't write file" % locals() raise ProgramError(msg) except ValueError as e: colorError = False for errorMsg in str(e).split("\n"): if errorMsg.startswith("to_rgb"): colorError = True if colorError: msg = "problem with the population colors" raise ProgramError(msg) else: print str(e)