language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public static Collection<MessagingContext> forImportedVariables( String domain, String applicationName, Instance instance, ThoseThat thoseThat ) { Map<String,MessagingContext> result = new HashMap<> (); for( ImportedVariable var : ComponentHelpers.findAllImportedVariables( instance.getComponent()).values()) { String componentOrApplicationTemplateName = VariableHelpers.parseVariableName( var.getName()).getKey(); if( result.containsKey( componentOrApplicationTemplateName )) continue; // When we import a variable, it is either internal or external, but not both! RecipientKind kind = var.isExternal() ? RecipientKind.INTER_APP : RecipientKind.AGENTS; MessagingContext ctx = new MessagingContext( kind, domain, componentOrApplicationTemplateName, thoseThat, applicationName ); result.put( componentOrApplicationTemplateName, ctx ); } return result.values(); }
java
@NonNull private static <K, T> PagedList<T> create(@NonNull DataSource<K, T> dataSource, @NonNull Executor notifyExecutor, @NonNull Executor fetchExecutor, @Nullable BoundaryCallback<T> boundaryCallback, @NonNull Config config, @Nullable K key) { if (dataSource.isContiguous() || !config.enablePlaceholders) { int lastLoad = ContiguousPagedList.LAST_LOAD_UNSPECIFIED; if (!dataSource.isContiguous()) { //noinspection unchecked dataSource = (DataSource<K, T>) ((PositionalDataSource<T>) dataSource) .wrapAsContiguousWithoutPlaceholders(); if (key != null) { lastLoad = (Integer) key; } } ContiguousDataSource<K, T> contigDataSource = (ContiguousDataSource<K, T>) dataSource; return new ContiguousPagedList<>(contigDataSource, notifyExecutor, fetchExecutor, boundaryCallback, config, key, lastLoad); } else { return new TiledPagedList<>((PositionalDataSource<T>) dataSource, notifyExecutor, fetchExecutor, boundaryCallback, config, (key != null) ? (Integer) key : 0); } }
python
def check_reaction(reactants, products): """Check the stoichiometry and format of chemical reaction used for folder structure. list of reactants -> list of products """ reactant_list = [reactant.split('@')[0].strip( 'star').strip('gas') for reactant in reactants] product_list = [product.split('@')[0].strip( 'star').strip('gas') for product in products] reactant_atoms = [extract_atoms(reactant) for reactant in reactant_list] product_atoms = [extract_atoms(product) for product in product_list] reactants = add_atoms(reactant_atoms) products = add_atoms(product_atoms) r_stars = 0 p_stars = 0 for i, a in enumerate(reactant_atoms): if a == '' or 'star' in reactant_list[i]: r_stars += 1 elif isinstance(a, float): r_stars += a for a in product_atoms: if a == '': p_stars += 1 elif isinstance(a, float): p_stars += a assert ''.join(sorted(reactants)) == ''.join(sorted(products))
java
public TafResp validate(LifeForm reading, HttpServletRequest req, HttpServletResponse resp) { return NullTafResp.singleton(); }
python
def flags(cmd, data): """ Checks if there are flags in the path data Returns the indices of all values in the path data which are flags """ if cmd.lower() == 'a': # a: (rx ry x-axis-rotation large-arc-flag sweep-flag x y)+ indices = range(len(data)) return [index for index in indices if (index % 7) in [3, 4]] return []
java
private Lifecycle getLifecycleForPhase( String phase ) throws BuildFailureException, LifecycleExecutionException { Lifecycle lifecycle = (Lifecycle) getPhaseToLifecycleMap().get( phase ); if ( lifecycle == null ) { throw new BuildFailureException( "Unable to find lifecycle for phase '" + phase + "'" ); } return lifecycle; }
java
@Override protected Calendar getDate() { Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(value); return calendar; }
python
def profile(self): """ Buffered result of :meth:`build_profile` """ if self._profile is None: self._profile = self.build_profile() return self._profile
java
public static byte[] str2bin(final String values, final JBBPBitOrder bitOrder) { if (values == null) { return new byte[0]; } int buff = 0; int cnt = 0; final ByteArrayOutputStream buffer = new ByteArrayOutputStream((values.length() + 7) >> 3); final boolean msb0 = bitOrder == JBBPBitOrder.MSB0; for (final char v : values.toCharArray()) { switch (v) { case '_': case ' ': continue; case '0': case 'X': case 'x': case 'Z': case 'z': { if (msb0) { buff >>= 1; } else { buff <<= 1; } } break; case '1': { if (msb0) { buff = (buff >> 1) | 0x80; } else { buff = (buff << 1) | 1; } } break; default: throw new IllegalArgumentException("Detected unsupported char '" + v + ']'); } cnt++; if (cnt == 8) { buffer.write(buff); cnt = 0; buff = 0; } } if (cnt > 0) { buffer.write(msb0 ? buff >>> (8 - cnt) : buff); } return buffer.toByteArray(); }
java
protected T setNiftyName(String niftyName) { Preconditions.checkNotNull(niftyName, "niftyName cannot be null"); this.niftyName = niftyName; return (T) this; }
java
public String getNormalisedContentTypeValue() { String contentType = getHeader(CONTENT_TYPE); if (contentType != null) { return contentType.toLowerCase(Locale.ROOT); } return null; }
java
static void registerDiscoveryClient(UUID injectorId, ReadOnlyDiscoveryClient discoveryClient, DiscoveryJmsConfig config) { DISCO_CLIENTS.put(injectorId, discoveryClient); CONFIGS.put(injectorId, config); LOG.info("Registered discovery client %s as %s", injectorId, discoveryClient); }
java
public void printDataEndForm(PrintWriter out, int iPrintOptions) { out.println("</tr>\n</table>"); if ((iPrintOptions & HtmlConstants.DETAIL_SCREEN) != 0) out.println("</td>\n</tr>"); }
java
protected void storeValidation(Validation v, XMLStreamWriter writer) throws Exception { writer.writeStartElement(XML.ELEMENT_VALIDATION); if (v.getValidConnectionChecker() != null) { storeExtension(v.getValidConnectionChecker(), writer, XML.ELEMENT_VALID_CONNECTION_CHECKER); } if (v.getCheckValidConnectionSql() != null) { writer.writeStartElement(XML.ELEMENT_CHECK_VALID_CONNECTION_SQL); writer.writeCharacters(v.getValue(XML.ELEMENT_CHECK_VALID_CONNECTION_SQL, v.getCheckValidConnectionSql())); writer.writeEndElement(); } if (v.isValidateOnMatch() != null) { writer.writeStartElement(XML.ELEMENT_VALIDATE_ON_MATCH); writer.writeCharacters(v.getValue(XML.ELEMENT_VALIDATE_ON_MATCH, v.isValidateOnMatch().toString())); writer.writeEndElement(); } if (v.isBackgroundValidation() != null) { writer.writeStartElement(XML.ELEMENT_BACKGROUND_VALIDATION); writer.writeCharacters(v.getValue(XML.ELEMENT_BACKGROUND_VALIDATION, v.isBackgroundValidation().toString())); writer.writeEndElement(); } if (v.getBackgroundValidationMillis() != null) { writer.writeStartElement(XML.ELEMENT_BACKGROUND_VALIDATION_MILLIS); writer.writeCharacters(v.getValue(XML.ELEMENT_BACKGROUND_VALIDATION_MILLIS, v.getBackgroundValidationMillis().toString())); writer.writeEndElement(); } if (v.isUseFastFail() != null) { writer.writeStartElement(XML.ELEMENT_USE_FAST_FAIL); writer.writeCharacters(v.getValue(XML.ELEMENT_USE_FAST_FAIL, v.isUseFastFail().toString())); writer.writeEndElement(); } if (v.getStaleConnectionChecker() != null) { storeExtension(v.getStaleConnectionChecker(), writer, XML.ELEMENT_STALE_CONNECTION_CHECKER); } if (v.getExceptionSorter() != null) { storeExtension(v.getExceptionSorter(), writer, XML.ELEMENT_EXCEPTION_SORTER); } writer.writeEndElement(); }
java
public Expression<Long> gte(long value) { String valueString = "'" + value + "'"; return new Expression<Long>(this, Operation.gte, valueString); }
python
def build_stack_changes(stack_name, new_stack, old_stack, new_params, old_params): """Builds a list of strings to represent the the parameters (if changed) and stack diff""" from_file = "old_%s" % (stack_name,) to_file = "new_%s" % (stack_name,) lines = difflib.context_diff( old_stack, new_stack, fromfile=from_file, tofile=to_file, n=7) # ensure at least a few lines of context are displayed afterward template_changes = list(lines) log_lines = [] if not template_changes: log_lines.append("*** No changes to template ***") param_diffs = diff_parameters(old_params, new_params) if param_diffs: log_lines.append(format_params_diff(param_diffs)) if template_changes: log_lines.append("".join(template_changes)) return log_lines
python
def eigenvectors_nrev(T, right=True): r"""Compute eigenvectors of transition matrix. Parameters ---------- T : (d, d) ndarray Transition matrix (stochastic matrix) k : int or tuple of ints, optional Compute the first k eigenvalues of T right : bool, optional If right=True compute right eigenvectors, left eigenvectors otherwise Returns ------- eigvec : (d, d) ndarray The eigenvectors of T ordered with decreasing absolute value of the corresponding eigenvalue """ if right: val, R = eig(T, left=False, right=True) """ Sorted eigenvalues and left and right eigenvectors. """ perm = np.argsort(np.abs(val))[::-1] # eigval=val[perm] eigvec = R[:, perm] else: val, L = eig(T, left=True, right=False) """ Sorted eigenvalues and left and right eigenvectors. """ perm = np.argsort(np.abs(val))[::-1] # eigval=val[perm] eigvec = L[:, perm] return eigvec
java
void writeTagNoClear(long bucketIndex, int posInBucket, long tag) { long tagStartIdx = getTagOffset(bucketIndex, posInBucket); // BIT BANGIN YEAAAARRHHHGGGHHH for (int i = 0; i < bitsPerTag; i++) { // second arg just does bit test in tag if ((tag & (1L << i)) != 0) { memBlock.set(tagStartIdx + i); } } }
python
def _unary(space,const,name): """ Reduce the domain of variable name to be node-consistent with this constraint, i.e. remove those values for the variable that are not consistent with the constraint. returns True if the domain of name was modified """ if not name in const.vnames: return False if space.variables[name].discrete: values = const.domains[name] else: values = const.domains[name] space.domains[name] = space.domains[name].intersection(values) return True
python
def leave(self, _id): """ Leave a room """ if SockJSRoomHandler._room.has_key(self._gcls() + _id): SockJSRoomHandler._room[self._gcls() + _id].remove(self) if len(SockJSRoomHandler._room[self._gcls() + _id]) == 0: del SockJSRoomHandler._room[self._gcls() + _id]
java
public Map<String, List<String>> getResponseHeaders() { if (myResponseHeaders == null) { myResponseHeaders = new HashMap<>(); } return myResponseHeaders; }
java
private static List<ValueInjector> getValueInjectFields(Environment environment, ClassDefine classDefine) { List<ValueInjector> valueInjectors = new ArrayList<>(8); //handle class annotation if (null != classDefine.getType().getAnnotation(Value.class)) { String suffix = classDefine.getType().getAnnotation(Value.class).name(); Arrays.stream(classDefine.getDeclaredFields()).forEach(field -> valueInjectors.add( new ValueInjector(environment, field, suffix + "." + field.getName()) )); } else { Arrays.stream(classDefine.getDeclaredFields()). filter(field -> null != field.getAnnotation(Value.class)). map(field -> new ValueInjector( environment, field, field.getAnnotation(Value.class).name()) ).forEach(valueInjectors::add); } return valueInjectors; }
java
public void handleGet(HttpRequest request, HttpResponse response, String pathInContext, String pathParams, Resource resource) throws IOException { if(log.isDebugEnabled())log.debug("Looking for "+resource); if (resource!=null && resource.exists()) { // check if directory if (resource.isDirectory()) { if (!pathInContext.endsWith("/") && !pathInContext.equals("/")) { log.debug("Redirect to directory/"); String q=request.getQuery(); StringBuffer buf=request.getRequestURL(); if (q!=null&&q.length()!=0) { buf.append('?'); buf.append(q); } response.setField(HttpFields.__Location, URI.addPaths(buf.toString(),"/")); response.setStatus(302); request.setHandled(true); return; } // See if index file exists String welcome=getHttpContext().getWelcomeFile(resource); if (welcome!=null) { // Forward to the index String ipath=URI.addPaths(pathInContext,welcome); if (_redirectWelcomeFiles) { // Redirect to the index ipath=URI.addPaths(getHttpContext().getContextPath(),ipath); response.setContentLength(0); response.sendRedirect(ipath); } else { URI uri=request.getURI(); uri.setPath(URI.addPaths(uri.getPath(),welcome)); getHttpContext().handle(ipath,pathParams,request,response); } return; } // Check modified dates if (!passConditionalHeaders(request,response,resource)) return; // If we got here, no forward to index took place sendDirectory(request,response,resource,pathInContext.length()>1); } // check if it is a file else if (resource.exists()) { // Check modified dates if (!passConditionalHeaders(request,response,resource)) return; sendData(request,response,pathInContext,resource,true); } else // don't know what it is log.warn("Unknown file type"); } }
java
public static String getNodesPathName(Node node) { final StringBuffer buffer = new StringBuffer(); if (node.getNodeType() == Node.ATTRIBUTE_NODE) { buildNodeName(((Attr) node).getOwnerElement(), buffer); buffer.append("."); buffer.append(node.getLocalName()); } else { buildNodeName(node, buffer); } return buffer.toString(); }
java
public void trace(Object message, Throwable t) { getLogger().trace(message.toString(), t); }
java
protected boolean attemptConnectWork(ConnectInfo ci) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "attemptConnectWork"); } boolean returnConnectDone = true; switch (ci.action) { case (ConnectInfo.FINISH_CONNECTION): { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Finish_connection case for, local: " + ci.localAddress + " remote: " + ci.remoteAddress); } if (ci.channel.isConnectionPending()) { try { boolean connectDone = ci.channel.finishConnect(); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Finishconnect returned " + connectDone + " for, local: " + ci.ioSocket.getSocket().getLocalSocketAddress() + " remote: " + ci.ioSocket.getSocket().getRemoteSocketAddress()); } if (!connectDone) { // Not connected yet, so just put it back in the selector. // This can happen if the network connection goes down // while connect is in selector if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "FinishConnect returned false, retrying"); } queueConnectForSelector(ci); returnConnectDone = false; break; } if (!ci.channel.isConnected()) { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "FinishConnect returned true, but not connected"); } // Add local and remote address information InetSocketAddress iaRemote = ci.remoteAddress; InetSocketAddress iaLocal = ci.localAddress; IOException e = new IOException("Connection could not be established. local=" + iaLocal + " remote=" + iaRemote); ci.setError(e); ci.tcpConnLink.connectFailed(e); break; } } catch (IOException ioe) { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "SocketChannel connect failed, local: " + ci.ioSocket.getSocket().getLocalSocketAddress() + " remote: " + ci.ioSocket.getSocket().getRemoteSocketAddress()); } if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "SocketChannel.finishConnect Exception Caught: " + ioe); } ci.setError(ioe); ci.tcpConnLink.connectFailed(ioe); break; } } else { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Connection got selected, but isConnectionPending returned false"); } returnConnectDone = false; queueConnectForSelector(ci); break; } if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "SocketChannel connected, local: " + ci.ioSocket.getSocket().getLocalSocketAddress() + " remote: " + ci.ioSocket.getSocket().getRemoteSocketAddress()); } ci.setFinishComplete(); try { ci.tcpConnLink.connectComplete(ci.ioSocket); } catch (IOException ioe) { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "SocketChannel connect failed, local: " + ci.ioSocket.getSocket().getLocalSocketAddress() + " remote: " + ci.ioSocket.getSocket().getRemoteSocketAddress()); } if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "SocketChannel.finishConnect Exception Caught: " + ioe); } ci.setError(ioe); ci.tcpConnLink.connectFailed(ioe); } break; } case (ConnectInfo.CALL_ERROR): { ci.tcpConnLink.connectFailed(ci.errorException); break; } default: { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Should never get here - default."); } break; } } // end-switch if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "attemptConnectWork returning " + returnConnectDone); } return returnConnectDone; }
python
def _component2dtype(self, component): """Determin the appropriate numpy datatype for an OpenJPEG component. Parameters ---------- component : ctypes pointer to ImageCompType (image_comp_t) single image component structure. Returns ------- builtins.type numpy datatype to be used to construct an image array """ if component.prec > 16: msg = "Unhandled precision: {0} bits.".format(component.prec) raise IOError(msg) if component.sgnd: dtype = np.int8 if component.prec <=8 else np.int16 else: dtype = np.uint8 if component.prec <=8 else np.uint16 return dtype
python
def intervallookupone(table, start='start', stop='stop', value=None, include_stop=False, strict=True): """ Construct an interval lookup for the given table, returning at most one result for each query. E.g.:: >>> import petl as etl >>> table = [['start', 'stop', 'value'], ... [1, 4, 'foo'], ... [3, 7, 'bar'], ... [4, 9, 'baz']] >>> lkp = etl.intervallookupone(table, 'start', 'stop', strict=False) >>> lkp.search(0, 1) >>> lkp.search(1, 2) (1, 4, 'foo') >>> lkp.search(2, 4) (1, 4, 'foo') >>> lkp.search(2, 5) (1, 4, 'foo') >>> lkp.search(9, 14) >>> lkp.search(19, 140) >>> lkp.search(0) >>> lkp.search(1) (1, 4, 'foo') >>> lkp.search(2) (1, 4, 'foo') >>> lkp.search(4) (3, 7, 'bar') >>> lkp.search(5) (3, 7, 'bar') If ``strict=True``, queries returning more than one result will raise a `DuplicateKeyError`. If ``strict=False`` and there is more than one result, the first result is returned. Note start coordinates are included and stop coordinates are excluded from the interval. Use the `include_stop` keyword argument to include the upper bound of the interval when finding overlaps. """ tree = tupletree(table, start=start, stop=stop, value=value) return IntervalTreeLookupOne(tree, strict=strict, include_stop=include_stop)
python
def _get_access_info(self, instance, db_key, db_name=None): ''' Convenience method to extract info from instance ''' dsn = instance.get('dsn') host = instance.get('host') username = instance.get('username') password = instance.get('password') database = instance.get(db_key) if db_name is None else db_name driver = instance.get('driver') if not dsn: if not host: host = '127.0.0.1,1433' if not database: database = self.DEFAULT_DATABASE if not driver: driver = self.DEFAULT_DRIVER return dsn, host, username, password, database, driver
python
def _must_decode(value): """Copied from pkginfo 1.4.1, _compat module.""" if type(value) is bytes: try: return value.decode('utf-8') except UnicodeDecodeError: return value.decode('latin1') return value
java
public static <T> T retrieveService(Class<T> clazz, Registry registry) { if(registry == null) { throw new IllegalArgumentException("Registry cannot be null."); } Set<T> servicesFromRegistry = registry.findByType(clazz); if (servicesFromRegistry.size() == 1) { T service = servicesFromRegistry.iterator().next(); LOG.info("Found Kura " + clazz.getCanonicalName() + " in the registry. Kura component will use that instance."); return service; } else if (servicesFromRegistry.size() > 1) { throw new IllegalStateException("Too many " + clazz.getCanonicalName() + " services found in a registry: " + servicesFromRegistry.size()); } else { throw new IllegalArgumentException( "No " + clazz.getCanonicalName() + " service instance found in a registry."); } }
java
private boolean isValidWeakMetaClass(MetaClass metaClass, MetaClassRegistry.MetaClassCreationHandle mccHandle) { if(metaClass==null) return false; boolean enableGloballyOn = (mccHandle instanceof ExpandoMetaClassCreationHandle); boolean cachedAnswerIsEMC = (metaClass instanceof ExpandoMetaClass); return (!enableGloballyOn || cachedAnswerIsEMC); }
python
def remove_unused_links(self, used): """Removes all saved links except the ones that are used. Args: used (list): list of used links that should not be removed. """ unused = [] self._execute("SELECT * FROM {}".format(self.LINK_STATE_TABLE)) for row in self.cursor: relpath, inode, mtime = row inode = self._from_sqlite(inode) path = os.path.join(self.root_dir, relpath) if path in used: continue if not os.path.exists(path): continue actual_inode = get_inode(path) actual_mtime, _ = get_mtime_and_size(path) if inode == actual_inode and mtime == actual_mtime: logger.debug("Removing '{}' as unused link.".format(path)) remove(path) unused.append(relpath) for relpath in unused: cmd = 'DELETE FROM {} WHERE path = "{}"' self._execute(cmd.format(self.LINK_STATE_TABLE, relpath))
python
def lmx_h1k_f64k(): """HParams for training languagemodel_lm1b32k_packed. 880M Params.""" hparams = lmx_base() hparams.hidden_size = 1024 hparams.filter_size = 65536 hparams.batch_size = 2048 return hparams
python
def merge_equal_neighbors(self): """ Merge neighbors with same speaker. """ IDX_LENGTH = 3 merged = self.segs.copy() current_start = 0 j = 0 seg = self.segs.iloc[0] for i in range(1, self.num_segments): seg = self.segs.iloc[i] last = self.segs.iloc[i - 1] if seg.speaker == last.speaker: merged.iat[j, IDX_LENGTH] = seg.start + seg.length - current_start else: j += 1 merged.iloc[j] = seg current_start = seg.start merged = merged.iloc[:(j+1)] merged.sort_values('start', inplace = True) return self.update_segs(merged)
java
protected void addInstanceToTree(Instance inst, KDTreeNode node) throws Exception { if (node.isALeaf()) { int instList[] = new int[m_Instances.numInstances()]; try { System.arraycopy(m_InstList, 0, instList, 0, node.m_End + 1); // m_InstList.squeezeIn(m_End, // index); if (node.m_End < m_InstList.length - 1) System.arraycopy(m_InstList, node.m_End + 1, instList, node.m_End + 2, m_InstList.length - node.m_End - 1); instList[node.m_End + 1] = m_Instances.numInstances() - 1; } catch (ArrayIndexOutOfBoundsException ex) { System.err.println("m_InstList.length: " + m_InstList.length + " instList.length: " + instList.length + "node.m_End+1: " + (node.m_End + 1) + "m_InstList.length-node.m_End+1: " + (m_InstList.length - node.m_End - 1)); throw ex; } m_InstList = instList; node.m_End++; node.m_NodeRanges = m_EuclideanDistance.updateRanges(inst, node.m_NodeRanges); m_Splitter.setInstanceList(m_InstList); // split this leaf node if necessary double[][] universe = m_EuclideanDistance.getRanges(); if (node.numInstances() > m_MaxInstInLeaf && getMaxRelativeNodeWidth(node.m_NodeRanges, universe) > m_MinBoxRelWidth) { m_Splitter.splitNode(node, m_NumNodes, node.m_NodeRanges, universe); m_NumNodes += 2; } }// end if node is a leaf else { if (m_EuclideanDistance.valueIsSmallerEqual(inst, node.m_SplitDim, node.m_SplitValue)) { addInstanceToTree(inst, node.m_Left); afterAddInstance(node.m_Right); } else addInstanceToTree(inst, node.m_Right); node.m_End++; node.m_NodeRanges = m_EuclideanDistance.updateRanges(inst, node.m_NodeRanges); } }
java
private Map < String, Object > getProps(XmlSchemaSimpleType xsdSimpleType, final CobolAnnotations cobolAnnotations) { XmlSchemaSimpleTypeRestriction restriction = (XmlSchemaSimpleTypeRestriction) xsdSimpleType .getContent(); if (restriction != null && restriction.getBaseTypeName() != null) { QName xsdTypeName = restriction.getBaseTypeName(); List < XmlSchemaFacet > facets = restriction.getFacets(); if (xsdTypeName.equals(Constants.XSD_STRING)) { return getCobolAlphanumType(facets); } else if (xsdTypeName.equals(Constants.XSD_HEXBIN)) { return getCobolOctetStreamType(facets); } else if (xsdTypeName.equals(Constants.XSD_INT)) { return getCobolDecimalType(cobolAnnotations, Integer.class); } else if (xsdTypeName.equals(Constants.XSD_LONG)) { return getCobolDecimalType(cobolAnnotations, Long.class); } else if (xsdTypeName.equals(Constants.XSD_SHORT)) { return getCobolDecimalType(cobolAnnotations, Short.class); } else if (xsdTypeName.equals(Constants.XSD_DECIMAL)) { return getCobolDecimalType(cobolAnnotations, BigDecimal.class); } else if (xsdTypeName.equals(Constants.XSD_FLOAT)) { return getCobolDecimalType(cobolAnnotations, Float.class); } else if (xsdTypeName.equals(Constants.XSD_DOUBLE)) { return getCobolDecimalType(cobolAnnotations, Double.class); } else if (xsdTypeName.equals(Constants.XSD_UNSIGNEDINT)) { return getCobolDecimalType(cobolAnnotations, Long.class); } else if (xsdTypeName.equals(Constants.XSD_UNSIGNEDSHORT)) { return getCobolDecimalType(cobolAnnotations, Integer.class); } else if (xsdTypeName.equals(Constants.XSD_UNSIGNEDLONG)) { return getCobolDecimalType(cobolAnnotations, BigInteger.class); } else if (xsdTypeName.equals(Constants.XSD_INTEGER)) { return getCobolDecimalType(cobolAnnotations, BigInteger.class); } else { throw new Xsd2ConverterException("Unsupported xsd type " + xsdTypeName); } } else { throw new Xsd2ConverterException("Simple type without restriction " + xsdSimpleType.getQName()); } }
java
public void start() throws Exception { internalPageFactory = new InternalPageFactory(bundleContext, pageId, applicationName, pageName, pageClass, overwrites, injectionSource); internalPageFactory.register(); }
python
def group(args): """ %prog group tabfile > tabfile.grouped Given a tab-delimited file, either group all elements within the file or group the elements in the value column(s) based on the key (groupby) column For example, convert this | into this --------------------------------------- a 2 3 4 | a,2,3,4,5,6 a 5 6 | b,7,8 b 7 8 | c,9,10,11 c 9 | c 10 11 | If grouping by a particular column, convert this | into this: --------------------------------------------- a 2 3 4 | a 2,5 3,6 4 a 5 6 | b 7 8 b 7 8 | c 9,10 11 c 9 | c 10 11 | By default, it uniqifies all the grouped elements """ from jcvi.utils.cbook import AutoVivification from jcvi.utils.grouper import Grouper p = OptionParser(group.__doc__) p.set_sep() p.add_option("--groupby", default=None, type='int', help="Default column to groupby [default: %default]") p.add_option("--groupsep", default=',', help="Separator to join the grouped elements [default: `%default`]") p.add_option("--nouniq", default=False, action="store_true", help="Do not uniqify the grouped elements [default: %default]") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) tabfile, = args sep = opts.sep groupby = opts.groupby groupsep = opts.groupsep cols = [] grouper = AutoVivification() if groupby is not None else Grouper() fp = must_open(tabfile) for row in fp: row = row.rstrip() atoms = row.split(sep) if groupby is not None: if len(cols) < len(atoms): cols = [x for x in xrange(len(atoms))] if groupby not in cols: logging.error("groupby col index `{0}` is out of range".format(groupby)) sys.exit() key = atoms[groupby] for col in cols: if col == groupby: continue if not grouper[key][col]: grouper[key][col] = [] if opts.nouniq else set() if col < len(atoms): if groupsep in atoms[col]: for atom in atoms[col].split(groupsep): if opts.nouniq: grouper[key][col].append(atom) else: grouper[key][col].add(atom) else: if opts.nouniq: grouper[key][col].append(atoms[col]) else: grouper[key][col].add(atoms[col]) else: grouper.join(*atoms) for key in grouper: if groupby is not None: line = [] for col in cols: if col == groupby: line.append(key) elif col in grouper[key].keys(): line.append(groupsep.join(grouper[key][col])) else: line.append("na") print(sep.join(line)) else: print(groupsep.join(key))
python
def metatiles_are_equal(tile_data_1, tile_data_2): """ Return True if the two tiles are both zipped metatiles and contain the same set of files with the same contents. This ignores the timestamp of the individual files in the zip files, as well as their order or any other metadata. """ try: buf_1 = StringIO.StringIO(tile_data_1) buf_2 = StringIO.StringIO(tile_data_2) with zipfile.ZipFile(buf_1, mode='r') as zip_1: with zipfile.ZipFile(buf_2, mode='r') as zip_2: return _metatile_contents_equal(zip_1, zip_2) except (StandardError, zipfile.BadZipFile, zipfile.LargeZipFile): # errors, such as files not being proper zip files, or missing # some attributes or contents that we expect, are treated as not # equal. pass return False
java
private static void createBlockFile(String blockPath) throws IOException { FileUtils.createBlockPath(blockPath, ServerConfiguration.get(PropertyKey.WORKER_DATA_FOLDER_PERMISSIONS)); FileUtils.createFile(blockPath); FileUtils.changeLocalFileToFullPermission(blockPath); LOG.debug("Created new file block, block path: {}", blockPath); }
python
def get_contacts(self, limit=100, offset=0, params={}): """ Get all account contacts """ url = self.CONTACTS_URL + "?limit=%s&offset=%s" % (limit, offset) for key, value in params.items(): if key is 'ids': value = ",".join(value) url += '&%s=%s' % (key, value) connection = Connection(self.token) connection.set_url(self.production, url) return connection.get_request()
python
def add(self, child): """ Adds a typed child object to the component type. @param child: Child object to be added. """ if isinstance(child, Parameter): self.add_parameter(child) elif isinstance(child, Property): self.add_property(child) elif isinstance(child, DerivedParameter): self.add_derived_parameter(child) elif isinstance(child, IndexParameter): self.add_index_parameter(child) elif isinstance(child, Constant): self.add_constant(child) elif isinstance(child, Exposure): self.add_exposure(child) elif isinstance(child, Requirement): self.add_requirement(child) elif isinstance(child, ComponentRequirement): self.add_component_requirement(child) elif isinstance(child, InstanceRequirement): self.add_instance_requirement(child) elif isinstance(child, Children): self.add_children(child) elif isinstance(child, Text): self.add_text(child) elif isinstance(child, Link): self.add_link(child) elif isinstance(child, Path): self.add_path(child) elif isinstance(child, EventPort): self.add_event_port(child) elif isinstance(child, ComponentReference): self.add_component_reference(child) elif isinstance(child, Attachments): self.add_attachments(child) else: raise ModelError('Unsupported child element')
python
def read_tvips_header(fh, byteorder, dtype, count, offsetsize): """Read TVIPS EM-MENU headers and return as dict.""" result = {} header = fh.read_record(TIFF.TVIPS_HEADER_V1, byteorder=byteorder) for name, typestr in TIFF.TVIPS_HEADER_V1: result[name] = header[name].tolist() if header['Version'] == 2: header = fh.read_record(TIFF.TVIPS_HEADER_V2, byteorder=byteorder) if header['Magic'] != int(0xaaaaaaaa): log.warning('read_tvips_header: invalid TVIPS v2 magic number') return {} # decode utf16 strings for name, typestr in TIFF.TVIPS_HEADER_V2: if typestr.startswith('V'): s = header[name].tostring().decode('utf16', errors='ignore') result[name] = stripnull(s, null='\0') else: result[name] = header[name].tolist() # convert nm to m for axis in 'XY': header['PhysicalPixelSize' + axis] /= 1e9 header['PixelSize' + axis] /= 1e9 elif header.version != 1: log.warning('read_tvips_header: unknown TVIPS header version') return {} return result
python
def load_settings(self, settings): """Load settings from file""" with open(settings) as settings_file: settings_dict = simplejson.load(settings_file) for key, value in settings_dict.items(): self.__setattr__(key, value)
java
public static Hop resolveHostByDnsSrvLookup(Hop hop) { String host = hop.getHost(); String transport = hop.getTransport(); if(transport==null) { transport = ListeningPoint.UDP; } transport = transport.toLowerCase(); Record[] records = null; try { records = new Lookup("_sip._" + transport + "." + host, Type.SRV).run(); } catch (TextParseException e) { logger.error("Impossible to parse the parameters for dns lookup", e); } if (records == null || records.length == 0) { // SRV lookup failed, use the outbound proxy directly. if(logger.isDebugEnabled()) { logger .debug("SRV lookup for host:transport " + ""+ host + "/" + transport + " returned nothing " + "-- we are going to just use the domain name directly"); } return hop; } else { Map<String, String> cachedEntry = foundCachedEntry(host, transport, (Record[]) records); if(cachedEntry == null) { SRVRecord record = (SRVRecord) records[0]; int recordPort = record.getPort(); String resolvedName = record.getTarget().toString(); try { String hostAddress= InetAddress.getByName(resolvedName).getHostAddress(); if(logger.isDebugEnabled()) { logger.debug("Did a successful DNS SRV lookup for host:transport " + ""+ host + "/" + transport + " , Host Name = " + resolvedName + " , Host IP Address = " + hostAddress + ", Host Port = " + recordPort); } Map<String, String> entry = new HashMap<String, String>(); entry.put("hostName", resolvedName); entry.put("hostAddress", hostAddress); entry.put("hostPort", ""+recordPort); cachedLookup.putIfAbsent(host + transport, entry); return new HopImpl(hostAddress, recordPort, transport); } catch (UnknownHostException e) { logger.error("Impossible to get the host address of the resolved name, " + "we are going to just use the domain name directly" + resolvedName, e); return hop; } } else { String entryResolvedName = cachedEntry.get("hostName"); String hostAddress = cachedEntry.get("hostAddress"); String hostPort = cachedEntry.get("hostPort"); if(logger.isDebugEnabled()) { logger.debug("Reusing a previous DNS SRV lookup for host:transport " + ""+ host + "/" + transport + " , Host Name = " + entryResolvedName + " , Host IP Address = " + hostAddress + ", Host Port = " + hostPort); } return new HopImpl(hostAddress, Integer.parseInt(hostPort), transport); } } }
java
public void marshall(CelebrityDetail celebrityDetail, ProtocolMarshaller protocolMarshaller) { if (celebrityDetail == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(celebrityDetail.getUrls(), URLS_BINDING); protocolMarshaller.marshall(celebrityDetail.getName(), NAME_BINDING); protocolMarshaller.marshall(celebrityDetail.getId(), ID_BINDING); protocolMarshaller.marshall(celebrityDetail.getConfidence(), CONFIDENCE_BINDING); protocolMarshaller.marshall(celebrityDetail.getBoundingBox(), BOUNDINGBOX_BINDING); protocolMarshaller.marshall(celebrityDetail.getFace(), FACE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def geodist(self, name, place1, place2, unit=None): """ Return the distance between ``place1`` and ``place2`` members of the ``name`` key. The units must be one of the following : m, km mi, ft. By default meters are used. """ pieces = [name, place1, place2] if unit and unit not in ('m', 'km', 'mi', 'ft'): raise DataError("GEODIST invalid unit") elif unit: pieces.append(unit) return self.execute_command('GEODIST', *pieces)
python
def export_model(self, export_formats, export_dir=None): """Exports model based on export_formats. Subclasses should override _export_model() to actually export model to local directory. Args: export_formats (list): List of formats that should be exported. export_dir (str): Optional dir to place the exported model. Defaults to self.logdir. Return: A dict that maps ExportFormats to successfully exported models. """ export_dir = export_dir or self.logdir return self._export_model(export_formats, export_dir)
java
public void killWorkers(Set<ContainerPlan> containers) { for (ContainerPlan container : containers) { LOG.log(Level.INFO, "Find and kill container for worker {0}", container.getId()); Optional<HeronWorker> worker = multiKeyWorkerMap.lookupByWorkerId(container.getId()); if (worker.isPresent()) { LOG.log(Level.INFO, "Killing container {0} for worker {1}", new Object[]{worker.get().evaluator.getId(), worker.get().workerId}); AllocatedEvaluator evaluator = multiKeyWorkerMap.detachEvaluatorAndRemove(worker.get()); evaluator.close(); } else { LOG.log(Level.WARNING, "Did not find worker for {0}", container.getId()); } containerPlans.remove(container.getId()); } }
java
public final Cache2kBuilder<K, V> name(Class<?> _class, String _fieldName) { if (_fieldName == null) { throw new NullPointerException(); } config().setName(_class.getName() + "." + _fieldName); return this; }
python
async def _reap_payloads(self): """Clean up all finished payloads""" for task in self._tasks.copy(): if task.done(): self._tasks.remove(task) if task.exception() is not None: raise task.exception() await asyncio.sleep(0)
python
def suspended_updates(): """ This allows you to postpone updates to all the search indexes inside of a with: with suspended_updates(): model1.save() model2.save() model3.save() model4.delete() """ if getattr(local_storage, "bulk_queue", None) is None: local_storage.bulk_queue = defaultdict(list) try: yield finally: for index, items in local_storage.bulk_queue.items(): index.bulk(chain(*items)) local_storage.bulk_queue = None
java
public final void dispatchCharactersEvents(int nodeHandle, ContentHandler ch, boolean normalize) throws SAXException { int identity = makeNodeIdentity(nodeHandle); if (identity == DTM.NULL) return; int type = _type2(identity); if (type == DTM.ELEMENT_NODE || type == DTM.DOCUMENT_NODE) { int startNode = identity; identity = _firstch2(identity); if (DTM.NULL != identity) { int offset = -1; int length = 0; do { type = _exptype2(identity); if (type == DTM.TEXT_NODE || type == DTM.CDATA_SECTION_NODE) { int dataIndex = m_dataOrQName.elementAt(identity); if (dataIndex >= 0) { if (-1 == offset) { offset = dataIndex >>> TEXT_LENGTH_BITS; } length += dataIndex & TEXT_LENGTH_MAX; } else { if (-1 == offset) { offset = m_data.elementAt(-dataIndex); } length += m_data.elementAt(-dataIndex + 1); } } identity++; } while (_parent2(identity) >= startNode); if (length > 0) { if(normalize) m_chars.sendNormalizedSAXcharacters(ch, offset, length); else m_chars.sendSAXcharacters(ch, offset, length); } } } else if (DTM.TEXT_NODE == type || DTM.CDATA_SECTION_NODE == type) { int dataIndex = m_dataOrQName.elementAt(identity); if (dataIndex >= 0) { if (normalize) m_chars.sendNormalizedSAXcharacters(ch, dataIndex >>> TEXT_LENGTH_BITS, dataIndex & TEXT_LENGTH_MAX); else m_chars.sendSAXcharacters(ch, dataIndex >>> TEXT_LENGTH_BITS, dataIndex & TEXT_LENGTH_MAX); } else { if (normalize) m_chars.sendNormalizedSAXcharacters(ch, m_data.elementAt(-dataIndex), m_data.elementAt(-dataIndex+1)); else m_chars.sendSAXcharacters(ch, m_data.elementAt(-dataIndex), m_data.elementAt(-dataIndex+1)); } } else { int dataIndex = m_dataOrQName.elementAt(identity); if (dataIndex < 0) { dataIndex = -dataIndex; dataIndex = m_data.elementAt(dataIndex + 1); } String str = (String)m_values.elementAt(dataIndex); if(normalize) FastStringBuffer.sendNormalizedSAXcharacters(str.toCharArray(), 0, str.length(), ch); else ch.characters(str.toCharArray(), 0, str.length()); } }
python
def ones_comp_sum16(num1: int, num2: int) -> int: """Calculates the 1's complement sum for 16-bit numbers. Args: num1: 16-bit number. num2: 16-bit number. Returns: The calculated result. """ carry = 1 << 16 result = num1 + num2 return result if result < carry else result + 1 - carry
java
public List<Integer> getConnectionRetries() { List<Integer> items = new ArrayList<Integer>(); for (int i = 0; i < 10; i++) { items.add(i); } return items; }
python
def is_complex(arg): ''' is_complex(x) yields True if x is a complex numeric object and False otherwise. Note that this includes anything representable as as a complex number such as an integer or a boolean value. In effect, this makes this function an alias for is_number(arg). ''' return (is_complex(mag(arg)) if is_quantity(arg) else True if isinstance(arg, numbers.Complex) else is_npscalar(arg, 'complex') or is_npvalue(arg, 'complex'))
python
def cli(env): """List routing types.""" mgr = SoftLayer.LoadBalancerManager(env.client) routing_methods = mgr.get_routing_methods() table = formatting.KeyValueTable(['ID', 'Name']) table.align['ID'] = 'l' table.align['Name'] = 'l' table.sortby = 'ID' for routing_method in routing_methods: table.add_row([routing_method['id'], routing_method['name']]) env.fout(table)
python
def find_good(control_board, actuation_steps, resistor_index, start_index, end_index): ''' Use a binary search over the range of provided actuation_steps to find the maximum actuation voltage that is measured by the board feedback circuit using the specified feedback resistor. ''' lower = start_index upper = end_index while lower < upper - 1: index = lower + (upper - lower) / 2 v = actuation_steps[index] control_board.set_waveform_voltage(v) data = measure_board_rms(control_board) valid_data = data[data['divider resistor index'] >= 0] if (valid_data['divider resistor index'] < resistor_index).sum(): # We have some measurements from another resistor. upper = index else: lower = index control_board.set_waveform_voltage(actuation_steps[lower]) data = measure_board_rms(control_board) return lower, data
java
@SuppressWarnings("nls") public static void writeProjectionFile( String filePath, String extention, CoordinateReferenceSystem crs ) throws IOException { /* * fill a prj file */ String prjPath = null; if (extention != null && filePath.toLowerCase().endsWith("." + extention)) { int dotLoc = filePath.lastIndexOf("."); prjPath = filePath.substring(0, dotLoc); prjPath = prjPath + ".prj"; } else { if (!filePath.endsWith(".prj")) { prjPath = filePath + ".prj"; } else { prjPath = filePath; } } try (BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(prjPath))) { bufferedWriter.write(crs.toWKT()); } }
python
def copy(self): """Returns a deep copy of the instance.""" clone = self.__class__() clone.fields = self.fields.copy() for k in clone.fields: clone.fields[k] = self.get_field(k).do_copy(clone.fields[k]) clone.default_fields = self.default_fields.copy() clone.overloaded_fields = self.overloaded_fields.copy() clone.overload_fields = self.overload_fields.copy() clone.underlayer = self.underlayer clone.explicit = self.explicit clone.raw_packet_cache = self.raw_packet_cache clone.post_transforms = self.post_transforms[:] clone.__dict__["payload"] = self.payload.copy() clone.payload.add_underlayer(clone) clone.time = self.time clone.sent_time = self.sent_time return clone
java
@Override public void writeRowsArray(List<Column> columns, Object[][] rows) throws IOException { for (Object[] currentRow : rows) { if (currentRow != null) { writeRow(columns, currentRow); } else { break; } } }
java
protected void onRemoved() { List<FilterChangeListener> listeners = getAllListeners(); for (FilterChangeListener listener : listeners) { listener.onRemove(this); } }
python
def compile_geometry(lat, lon, elev): """ Take in lists of lat and lon coordinates, and determine what geometry to create :param list lat: Latitude values :param list lon: Longitude values :param float elev: Elevation value :return dict: """ logger_excel.info("enter compile_geometry") lat = _remove_geo_placeholders(lat) lon = _remove_geo_placeholders(lon) # 4 coordinate values if len(lat) == 2 and len(lon) == 2: logger_excel.info("found 4 coordinates") geo_dict = geometry_linestring(lat, lon, elev) # # 4 coordinate values # if (lat[0] != lat[1]) and (lon[0] != lon[1]): # geo_dict = geometry_polygon(lat, lon) # # 3 unique coordinates # else: # geo_dict = geometry_multipoint(lat, lon) # # 2 coordinate values elif len(lat) == 1 and len(lon) == 1: logger_excel.info("found 2 coordinates") geo_dict = geometry_point(lat, lon, elev) # coordinate range. one value given but not the other. elif (None in lon and None not in lat) or (len(lat) > 0 and len(lon) == 0): geo_dict = geometry_range(lat, elev, "lat") elif (None in lat and None not in lon) or (len(lon) > 0 and len(lat) == 0): geo_dict = geometry_range(lat, elev, "lon") # Too many points, or no points else: geo_dict = {} logger_excel.warn("compile_geometry: invalid coordinates: lat: {}, lon: {}".format(lat, lon)) logger_excel.info("exit compile_geometry") return geo_dict
python
def expand_entry(entry, ignore_xs=0x0): """Turn all Xs which are not marked in `ignore_xs` into ``0``\ s and ``1``\ s. The following will expand any Xs in bits ``1..3``\ :: >>> entry = RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1100) >>> list(expand_entry(entry, 0xfffffff1)) == [ ... RoutingTableEntry(set(), 0b0100, 0xfffffff0 | 0b1110), # 010X ... RoutingTableEntry(set(), 0b0110, 0xfffffff0 | 0b1110), # 011X ... ] True Parameters ---------- entry : :py:class:`~rig.routing_table.RoutingTableEntry` or similar The entry to expand. ignore_xs : int Bit-mask of Xs which should not be expanded. Yields ------ :py:class:`~rig.routing_table.RoutingTableEntry` Routing table entries which represent the original entry but with all Xs not masked off by `ignore_xs` replaced with 1s and 0s. """ # Get all the Xs in the entry that are not ignored xs = (~entry.key & ~entry.mask) & ~ignore_xs # Find the most significant X for bit in (1 << i for i in range(31, -1, -1)): if bit & xs: # Yield all the entries with this bit set as 0 entry_0 = RoutingTableEntry(entry.route, entry.key, entry.mask | bit, entry.sources) for new_entry in expand_entry(entry_0, ignore_xs): yield new_entry # And yield all the entries with this bit set as 1 entry_1 = RoutingTableEntry(entry.route, entry.key | bit, entry.mask | bit, entry.sources) for new_entry in expand_entry(entry_1, ignore_xs): yield new_entry # Stop looking for Xs break else: # If there are no Xs then yield the entry we were given. yield entry
python
def sync_state(self): """ Syncs the internal Pybullet robot state to the joint positions of the robot being controlled. """ # sync IK robot state to the current robot joint positions self.sync_ik_robot(self.robot_jpos_getter()) # make sure target pose is up to date self.ik_robot_target_pos, self.ik_robot_target_orn = ( self.ik_robot_eef_joint_cartesian_pose() )
java
public void setParseRobotsTxt(boolean parseRobotsTxt) { this.parseRobotsTxt = parseRobotsTxt; getConfig().setProperty(SPIDER_PARSE_ROBOTS_TXT, Boolean.toString(parseRobotsTxt)); }
java
public MethodHandle arrayAccess(VarHandle.AccessMode mode) { return invoke(MethodHandles.arrayElementVarHandle(type().parameterType(0)).toMethodHandle(mode)); }
python
def extract_string_pairs_in_directory(directory_path, extract_func, filter_func): """ Retrieves all string pairs in the directory Args: directory_path (str): The path of the directory containing the file to extract string pairs from. extract_func (function): Function for extracting the localization keys and comments from the files. The extract function receives 2 parameters: - dict that the keys (a key in the dict) and comments (a value in the dict) are added to. - str representing file path filter_func (function): Function for filtering files in the directory. The filter function receives the file name and returns a bool representing the filter result. True if the file name passed the filter, False otherwise. Returns: dict: A mapping between string pairs first value (probably the key), and the second value (probably the comment). """ result = {} for root, dirnames, filenames in os.walk(directory_path): for file_name in filenames: if filter_func(file_name): file_path = os.path.join(root, file_name) try: extract_func(result, file_path) except Exception as e: print "Error in file " + file_name print e return result
python
def libvlc_media_save_meta(p_md): '''Save the meta previously set. @param p_md: the media desriptor. @return: true if the write operation was successful. ''' f = _Cfunctions.get('libvlc_media_save_meta', None) or \ _Cfunction('libvlc_media_save_meta', ((1,),), None, ctypes.c_int, Media) return f(p_md)
java
public NotificationChain basicSetLeft(RuleElement newLeft, NotificationChain msgs) { RuleElement oldLeft = left; left = newLeft; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, SimpleAntlrPackage.UNTIL_ELEMENT__LEFT, oldLeft, newLeft); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; }
python
def serialize_payload(self): """ Serializes the marker output data into a payload buffer. :return: The serialized payload. :rtype: bytes """ with io.BytesIO() as stream: stream.write(self.OPEN_ASSETS_TAG) bitcoin.core.VarIntSerializer.stream_serialize(len(self.asset_quantities), stream) for asset_quantity in self.asset_quantities: stream.write(self.leb128_encode(asset_quantity)) bitcoin.core.VarIntSerializer.stream_serialize(len(self.metadata), stream) stream.write(self.metadata) return stream.getvalue()
python
def warnify(self, message, duration=3000, notification_clicked_slot=None, **kwargs): """ Displays an Application notification warning. :param message: Notification message. :type message: unicode :param duration: Notification display duration. :type duration: int :param notification_clicked_slot: Notification clicked slot. :type notification_clicked_slot: object :param \*\*kwargs: Keywords arguments. :type \*\*kwargs: \*\* :return: Method success. :rtype: bool """ return self.notify(message, duration, notification_clicked_slot, message_level="Warning", color=QColor(220, 128, 64), background_color=QColor(32, 32, 32), border_color=QColor(220, 128, 64), **kwargs)
python
def basic_cancel(self, consumer_tag, nowait=False): """End a queue consumer This method cancels a consumer. This does not affect already delivered messages, but it does mean the server will not send any more messages for that consumer. The client may receive an abitrary number of messages in between sending the cancel method and receiving the cancel-ok reply. RULE: If the queue no longer exists when the client sends a cancel command, or the consumer has been cancelled for other reasons, this command has no effect. PARAMETERS: consumer_tag: shortstr consumer tag Identifier for the consumer, valid within the current connection. RULE: The consumer tag is valid only within the channel from which the consumer was created. I.e. a client MUST NOT create a consumer in one channel and then use it in another. nowait: boolean do not send a reply method If set, the server will not respond to the method. The client should not wait for a reply method. If the server could not complete the method it will raise a channel or connection exception. """ if self.connection is not None: self.no_ack_consumers.discard(consumer_tag) args = AMQPWriter() args.write_shortstr(consumer_tag) args.write_bit(nowait) self._send_method((60, 30), args) return self.wait(allowed_methods=[ (60, 31), # Channel.basic_cancel_ok ])
python
def get_historical_data(fsym, tsym, freq, info='full', e='all', try_conversion=True, aggregate=1, limit=1440, to_ts=False): """Get minute-by-minute historical price and volume information for the requested currency pair. Available data is limited to the last 7 days. Args: fsym: FROM symbol. tsym: TO symbol. freq: Frequency of the data. Can be set to 'minute', 'hour' or 'day'. info: Select price or volume information to return. Default of 'full' returns all of them. Can be set to 'high', 'low', 'open', 'close', 'volumefrom', and 'volumeto' or a list containing several of those values. e: Default returns average price across all exchanges. Can be set to the name of a single exchange. try_conversion: If the crypto does not trade directly into the toSymbol requested, BTC will be used for conversion. If set to false, it will try to get values without using any conversion at all. aggregate: Aggregates the minute prices into bins of the specified size. limit: Number of minute prices. The limit settings depend on the freq selected: minute: default = 1440, min = 1, max = 2000 hour: default = 168, min = 1, max 2000 day: default = 30, min = 1, max 2000 Using aggregate reduces the maximum number of points that can be returned by a factor equal to the chosen bin size. # add 'toTs' parameter ###################### ###################### ###################### ###################### ###################### ###################### Returns: List of dictionairies containing the price and volume information for each requested tick. [{'time': ..., 'close': ..., 'high': ..., 'low': ..., 'open': ..., 'volumefrom': ..., 'volumeto': ...}, {...}, ...] """ # load data url = build_url(freq, fsym=fsym, tsym=tsym, freq=freq, e=e, try_conversion=try_conversion, aggregate=aggregate, limit=limit, to_ts=to_ts) data = load_data(url) data = data['Data'] # convert timestamps to nice date format for d in data: d['time'] = timestamp_to_date(d['time']) # convert single input info to single element list if not isinstance(info, list): info = [info] # select information to return if info[0] == 'full': return data else: for d in data: for k, v in list(d.items()): if k not in info and k != 'time': del d[k] return data
python
def retrieve(self, request, project, pk=None): """ Retrieve a bug-job-map entry. pk is a composite key in the form bug_id-job_id """ job_id, bug_id = map(int, pk.split("-")) job = Job.objects.get(repository__name=project, id=job_id) try: bug_job_map = BugJobMap.objects.get(job=job, bug_id=bug_id) serializer = BugJobMapSerializer(bug_job_map) return Response(serializer.data) except BugJobMap.DoesNotExist: return Response("Object not found", status=HTTP_404_NOT_FOUND)
python
def normalize(self, mag=1.): """Normalize a Channel, set `null` to 0 and the mag to given value. Parameters ---------- mag : float (optional) New value of mag. Default is 1. """ def f(dataset, s, null, mag): dataset[s] -= null dataset[s] /= mag if self.signed: mag = self.mag() / mag else: mag = self.max() / mag self.chunkwise(f, null=self.null, mag=mag) self._null = 0
java
public boolean cancelAll(boolean mayInterruptIfRunning) { boolean res = true; if (N.notNullOrEmpty(upFutures)) { for (ContinuableFuture<?> preFuture : upFutures) { res = res & preFuture.cancelAll(mayInterruptIfRunning); } } return cancel(mayInterruptIfRunning) && res; }
python
def copy(self): """ Returns a "T" (tee) copy of the given stream, allowing the calling stream to continue being used. """ a, b = it.tee(self._data) # 2 generators, not thread-safe self._data = a return Stream(b)
python
def add_marker_to_qtls(qtlfile, mapfile, outputfile='qtls_with_mk.csv'): """This function adds to a list of QTLs, the closest marker to the QTL peak. :arg qtlfile: a CSV list of all the QTLs found. The file should be structured as follow:: Trait, Linkage group, position, other columns The other columns will not matter as long as the first three columns are as such. :arg mapfile: a CSV representation of the map used for the QTL mapping analysis. The file should be structured as follow:: Marker, Linkage group, position :kwarg outputfile: the name of the output file in which the list of QTLs with their closest marker will be written. """ qtl_list = read_input_file(qtlfile, ',') map_list = read_input_file(mapfile, ',') if not qtl_list or not map_list: # pragma: no cover return qtl_list[0].append('Closest marker') qtls = [] qtls.append(qtl_list[0]) for qtl in qtl_list[1:]: qtl.append(add_marker_to_qtl(qtl, map_list)) qtls.append(qtl) LOG.info('- %s QTLs processed in %s' % (len(qtls), qtlfile)) write_matrix(outputfile, qtls)
python
def py3round(number): """Unified rounding in all python versions.""" if abs(round(number) - number) == 0.5: return int(2.0 * round(number / 2.0)) return int(round(number))
java
private int adjustDstOffsetForInvalidWallClock( long standardTimeInZone, TimeZone zone, int dstOffset) { if (dstOffset != 0) { // If applying the DST offset produces a time that is outside DST then it must be // an invalid wall clock so clear the DST offset to avoid that happening. if (!zone.inDaylightTime(new Date(standardTimeInZone - dstOffset))) { dstOffset = 0; } } return dstOffset; }
python
def create_properties(self): # pylint: disable=no-self-use """ Format the properties with which to instantiate the connection. This acts like a user agent over HTTP. :rtype: dict """ properties = {} properties["product"] = "eventhub.python" properties["version"] = __version__ properties["framework"] = "Python {}.{}.{}".format(*sys.version_info[0:3]) properties["platform"] = sys.platform return properties
java
private Object batchInsertAfterReordering(final String statementName, final Object parameterObject) { Set<String> keys = new HashSet<String>(); keys.add(getDefaultDataSourceName()); keys.addAll(getCobarDataSourceService().getDataSources().keySet()); final CobarMRBase mrbase = new CobarMRBase(keys); ExecutorService executor = createCustomExecutorService(Runtime.getRuntime() .availableProcessors(), "batchInsertAfterReordering"); try { final StringBuffer exceptionStaktrace = new StringBuffer(); Collection<?> paramCollection = ((BatchInsertTask) parameterObject).getEntities(); final CountDownLatch latch = new CountDownLatch(paramCollection.size()); Iterator<?> iter = paramCollection.iterator(); while (iter.hasNext()) { final Object entity = iter.next(); Runnable task = new Runnable() { public void run() { try { SortedMap<String, DataSource> dsMap = lookupDataSourcesByRouter( statementName, entity); if (MapUtils.isEmpty(dsMap)) { logger .info( "can't find routing rule for {} with parameter {}, so use default data source for it.", statementName, entity); mrbase.emit(getDefaultDataSourceName(), entity); } else { if (dsMap.size() > 1) { throw new IllegalArgumentException( "unexpected routing result, found more than 1 target data source for current entity:" + entity); } mrbase.emit(dsMap.firstKey(), entity); } } catch (Throwable t) { exceptionStaktrace.append(ExceptionUtils.getFullStackTrace(t)); } finally { latch.countDown(); } } }; executor.execute(task); } try { latch.await(); } catch (InterruptedException e) { throw new ConcurrencyFailureException( "unexpected interruption when re-arranging parameter collection into sub-collections ", e); } if (exceptionStaktrace.length() > 0) { throw new ConcurrencyFailureException( "unpected exception when re-arranging parameter collection, check previous log for details.\n" + exceptionStaktrace); } } finally { executor.shutdown(); } List<ConcurrentRequest> requests = new ArrayList<ConcurrentRequest>(); for (Map.Entry<String, List<Object>> entity : mrbase.getResources().entrySet()) { final List<Object> paramList = entity.getValue(); if (CollectionUtils.isEmpty(paramList)) { continue; } String identity = entity.getKey(); final DataSource dataSourceToUse = findDataSourceToUse(entity.getKey()); final SqlMapClientCallback callback = new SqlMapClientCallback() { public Object doInSqlMapClient(SqlMapExecutor executor) throws SQLException { return executor.insert(statementName, paramList); } }; ConcurrentRequest request = new ConcurrentRequest(); request.setDataSource(dataSourceToUse); request.setAction(callback); request.setExecutor(getDataSourceSpecificExecutors().get(identity)); requests.add(request); } return getConcurrentRequestProcessor().process(requests); }
java
public int getStartingID() { int iStartingID = 1; // (default) if (this.getProperty(STARTING_ID) != null) { try { iStartingID = Integer.parseInt(this.getProperty(STARTING_ID)); } catch (NumberFormatException e) { iStartingID = 1; } } return iStartingID; }
java
public List<CmsResource> getMatchedResources() { if (m_replace) { // re-read the resources to include changes List<CmsResource> result = new ArrayList<CmsResource>(); for (CmsResource resource : m_matchedResources) { try { result.add(getCms().readResource(resource.getStructureId())); } catch (CmsException e) { LOG.error(e.getLocalizedMessage(), e); } } return result; } else { return new ArrayList<CmsResource>(m_matchedResources); } }
java
public com.cloudant.client.api.model.Response save(Object object, int writeQuorum) { Response couchDbResponse = client.couchDbClient.put(getDBUri(), object, true, writeQuorum); com.cloudant.client.api.model.Response response = new com.cloudant.client.api.model .Response(couchDbResponse); return response; }
python
def windowed(seq, n, fillvalue=None, step=1): """Return a sliding window of width *n* over the given iterable. >>> all_windows = windowed([1, 2, 3, 4, 5], 3) >>> list(all_windows) [(1, 2, 3), (2, 3, 4), (3, 4, 5)] When the window is larger than the iterable, *fillvalue* is used in place of missing values:: >>> list(windowed([1, 2, 3], 4)) [(1, 2, 3, None)] Each window will advance in increments of *step*: >>> list(windowed([1, 2, 3, 4, 5, 6], 3, fillvalue='!', step=2)) [(1, 2, 3), (3, 4, 5), (5, 6, '!')] To slide into the iterable's items, use :func:`chain` to add filler items to the left: >>> iterable = [1, 2, 3, 4] >>> n = 3 >>> padding = [None] * (n - 1) >>> list(windowed(chain(padding, iterable), 3)) [(None, None, 1), (None, 1, 2), (1, 2, 3), (2, 3, 4)] """ if n < 0: raise ValueError('n must be >= 0') if n == 0: yield tuple() return if step < 1: raise ValueError('step must be >= 1') it = iter(seq) window = deque([], n) append = window.append # Initial deque fill for _ in range(n): append(next(it, fillvalue)) yield tuple(window) # Appending new items to the right causes old items to fall off the left i = 0 for item in it: append(item) i = (i + 1) % step if i % step == 0: yield tuple(window) # If there are items from the iterable in the window, pad with the given # value and emit them. if (i % step) and (step - i < n): for _ in range(step - i): append(fillvalue) yield tuple(window)
java
@Override public View focusSearch(View focused, int direction) { View result = mLayout.onInterceptFocusSearch(focused, direction); if (result != null) { return result; } final FocusFinder ff = FocusFinder.getInstance(); result = ff.findNextFocus(this, focused, direction); if (result == null && mAdapter != null) { eatRequestLayout(); result = mLayout.onFocusSearchFailed(focused, direction, mRecycler, mState); resumeRequestLayout(false); } return result != null ? result : super.focusSearch(focused, direction); }
java
public static <T, R> Function<T, R> monitor(Function<T, R> function, AtomicLong calls) { return new MonitoringFunction<>(function, calls); }
python
def export_text(self, filename=None): """ Export in text format. Optionally provide a filename to save to. :param str filename: path including filename (optional) :return: None """ result = self.make_request( resource='export', params={'format': 'txt'}, filename=filename, raw_result=True, headers = {'accept': 'text/plain'}) if not filename: return result.content
java
public static final byte[] toAsciiBytes(final String value) { byte[] result = new byte[value.length()]; for (int i = 0; i < value.length(); i++) { result[i] = (byte) value.charAt(i); } return result; }
java
public String format( String key, Object[] args ) { try { String pattern = getPatternString( key ); StringBuffer buff = new StringBuffer( key.length() + SPAN ); MessageFormat messFormat = new MessageFormat( pattern, m_locale ); messFormat.format( args, buff, null ); String result = buff.toString(); buff.setLength( 0 ); return result; } catch( MissingResourceException mre ) { StringBuffer sb = new StringBuffer(); sb.append( "Unknown resource. Bundle: '" ); sb.append( m_baseName ); sb.append( "' Key: '" ); sb.append( key ); sb.append( "' Args: '" ); for( int i = 0; i < args.length; i++ ) { if( 0 != i ) { sb.append( "', '" ); } sb.append( args[ i ] ); } sb.append( "' Reason: " ); sb.append( mre ); return sb.toString(); } }
java
public static List<String> getTypeParameters(final String type) { if (type.charAt(0) != 'L') return emptyList(); int lastStart = type.indexOf('<') + 1; final List<String> parameters = new ArrayList<>(); if (lastStart > 0) { int depth = 0; for (int i = lastStart; i < type.length() - 2; i++) { final char c = type.charAt(i); if (c == '<') depth++; else if (c == '>') depth--; else if (c == ';' && depth == 0) { parameters.add(type.substring(lastStart, i + 1)); lastStart = i + 1; } } } return parameters; }
java
@SuppressWarnings("PMD.AvoidLiteralsInIfCondition") public static String toStringTypes(final Type[] types, final String separator, final Map<String, Type> generics) { final String res; if (types.length == 0) { res = ""; } else if (types.length == 1) { // only one argument res = toStringType(types[0], generics); } else { final StringBuilder buf = new StringBuilder(types.length * 20); boolean first = true; for (Type type : types) { if (!first) { buf.append(separator); } buf.append(toStringType(type, generics)); first = false; } res = buf.toString(); } return res; }
python
def QA_util_random_with_topic(topic='Acc', lens=8): """ 生成account随机值 Acc+4数字id+4位大小写随机 """ _list = [chr(i) for i in range(65, 91)] + [chr(i) for i in range(97, 123) ] + [str(i) for i in range(10)] num = random.sample(_list, lens) return '{}_{}'.format(topic, ''.join(num))
python
def add_raw_data(self, data, attrs): """ Add raw data for a read. :param data: The raw data DAQ values (16 bit integers). The read must already exist in the file. It must not already have raw data. """ self.assert_writeable() if "Raw" not in self.handle: self.handle.create_group("Raw") if "Signal" in self.handle['Raw']: msg = "Fast5 file already has raw data for read '{}' in: {}" raise KeyError(msg.format(self.read_id, self.filename)) self.handle['Raw'].create_dataset('Signal', data=data, compression='gzip', shuffle=True, dtype='i2') self._add_attributes("Raw", attrs, clear=True)
python
def relative_deviation(h1, h2): # 18 us @array, 42 us @list \w 100 bins r""" Calculate the deviation between two histograms. The relative deviation between two histograms :math:`H` and :math:`H'` of size :math:`m` is defined as: .. math:: d_{rd}(H, H') = \frac{ \sqrt{\sum_{m=1}^M(H_m - H'_m)^2} }{ \frac{1}{2} \left( \sqrt{\sum_{m=1}^M H_m^2} + \sqrt{\sum_{m=1}^M {H'}_m^2} \right) } *Attributes:* - semimetric (triangle equation satisfied?) *Attributes for normalized histograms:* - :math:`d(H, H')\in[0, \sqrt{2}]` - :math:`d(H, H) = 0` - :math:`d(H, H') = d(H', H)` *Attributes for not-normalized histograms:* - :math:`d(H, H')\in[0, 2]` - :math:`d(H, H) = 0` - :math:`d(H, H') = d(H', H)` *Attributes for not-equal histograms:* - not applicable Parameters ---------- h1 : sequence The first histogram. h2 : sequence The second histogram, same bins as ``h1``. Returns ------- relative_deviation : float Relative deviation between the two histograms. """ h1, h2 = __prepare_histogram(h1, h2) numerator = math.sqrt(scipy.sum(scipy.square(h1 - h2))) denominator = (math.sqrt(scipy.sum(scipy.square(h1))) + math.sqrt(scipy.sum(scipy.square(h2)))) / 2. return numerator / denominator
python
def save_all_figures_todir(self, dirname): """Save all figure in dirname.""" fignames = [] for thumbnail in self._thumbnails: fig = thumbnail.canvas.fig fmt = thumbnail.canvas.fmt fext = {'image/png': '.png', 'image/jpeg': '.jpg', 'image/svg+xml': '.svg'}[fmt] figname = get_unique_figname(dirname, 'Figure', fext) save_figure_tofile(fig, fmt, figname) fignames.append(figname) return fignames
java
private void writeCompileOptions(final Writer writer, final boolean isDebug, final String baseDir, final CommandLineCompilerConfiguration compilerConfig) throws IOException { final StringBuffer baseOptions = new StringBuffer(50); baseOptions.append("# ADD BASE CPP"); final StringBuffer options = new StringBuffer(50); options.append("# ADD CPP"); final File[] includePath = compilerConfig.getIncludePath(); for (final File element : includePath) { options.append(" /I \""); final String relPath = CUtil.getRelativePath(baseDir, element); options.append(CUtil.toWindowsPath(relPath)); options.append('"'); } final Hashtable<String, String> optionMap = new Hashtable<>(); if (isDebug) { // // release options that should be mapped to debug counterparts // optionMap.put("/MT", "/MTd"); optionMap.put("/ML", "/MLd"); optionMap.put("/MD", "/MDd"); optionMap.put("/O2", "/Od"); optionMap.put("/O3", "/Od"); } else { // // debug options that should be mapped to release counterparts // optionMap.put("/MTD", "/MT"); optionMap.put("/MLD", "/ML"); optionMap.put("/MDD", "/MD"); optionMap.put("/GM", ""); optionMap.put("/ZI", ""); optionMap.put("/OD", "/O2"); optionMap.put("/GZ", ""); } final String[] preArgs = compilerConfig.getPreArguments(); for (final String preArg : preArgs) { if (preArg.startsWith("/D")) { options.append(" /D "); baseOptions.append(" /D "); final String body = preArg.substring(2); if (preArg.indexOf('=') >= 0) { options.append(body); baseOptions.append(body); } else { final StringBuffer buf = new StringBuffer("\""); if ("NDEBUG".equals(body) || "_DEBUG".equals(body)) { if (isDebug) { buf.append("_DEBUG"); } else { buf.append("NDEBUG"); } } else { buf.append(body); } buf.append("\""); options.append(buf); baseOptions.append(buf); } } else if (!preArg.startsWith("/I")) { String option = preArg; final String key = option.toUpperCase(Locale.US); if (optionMap.containsKey(key)) { option = optionMap.get(key); } options.append(" "); options.append(option); baseOptions.append(" "); baseOptions.append(option); } } baseOptions.append("\r\n"); options.append("\r\n"); writer.write(baseOptions.toString()); writer.write(options.toString()); }