language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def _set_zone(self, v, load=False): """ Setter method for zone, mapped from YANG variable /zoning/defined_configuration/zone (list) If this variable is read-only (config: false) in the source YANG file, then _set_zone is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_zone() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("zone_name",zone.zone, yang_name="zone", rest_name="zone", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='zone-name', extensions={u'tailf-common': {u'info': u'List of defined Zones', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'zone_defined_zone'}}), is_container='list', yang_name="zone", rest_name="zone", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'List of defined Zones', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'zone_defined_zone'}}, namespace='urn:brocade.com:mgmt:brocade-zone', defining_module='brocade-zone', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """zone must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("zone_name",zone.zone, yang_name="zone", rest_name="zone", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='zone-name', extensions={u'tailf-common': {u'info': u'List of defined Zones', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'zone_defined_zone'}}), is_container='list', yang_name="zone", rest_name="zone", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'List of defined Zones', u'cli-no-key-completion': None, u'cli-full-no': None, u'cli-suppress-key-abbreviation': None, u'cli-full-command': None, u'callpoint': u'zone_defined_zone'}}, namespace='urn:brocade.com:mgmt:brocade-zone', defining_module='brocade-zone', yang_type='list', is_config=True)""", }) self.__zone = t if hasattr(self, '_set'): self._set()
python
def unload_module(self, module_name): """Unload the specified module, if it is loaded.""" module = self.loaded_modules.get(module_name) if not module: _log.warning("Ignoring request to unload non-existant module '%s'", module_name) return False module.stop(reloading=False) del self.loaded_modules[module_name] self.module_ordering.remove(module_name) return True
python
async def next(self, count=None): """Load the next (newer) page/batch of events. :param count: A limit on the number of events to fetch. By default the limit is the same as that specified for this set of events. """ if self._next_uri is None: return self[:0] # An empty slice of `self`. else: return self._fetch(self._next_uri, count)
java
public static ObjectMapperClientFeatureModule with( Class<? extends Annotation> clientBindingAnnotation) { BindingAnnotations.checkIsBindingAnnotation(clientBindingAnnotation); return new ObjectMapperClientFeatureModule(clientBindingAnnotation); }
java
protected List<CmsResourceTypeConfig> internalGetResourceTypes(boolean filterDisabled) { CmsADEConfigData parentData = parent(); List<CmsResourceTypeConfig> parentResourceTypes = null; if (parentData == null) { parentResourceTypes = Lists.newArrayList(); } else { parentResourceTypes = Lists.newArrayList(); for (CmsResourceTypeConfig typeConfig : parentData.internalGetResourceTypes(false)) { CmsResourceTypeConfig copiedType = typeConfig.copy(m_data.isDiscardInheritedTypes()); parentResourceTypes.add(copiedType); } } List<CmsResourceTypeConfig> result = combineConfigurationElements( parentResourceTypes, m_data.getOwnResourceTypes(), true); if (m_data.isCreateContentsLocally()) { for (CmsResourceTypeConfig typeConfig : result) { typeConfig.updateBasePath( CmsStringUtil.joinPaths(m_data.getBasePath(), CmsADEManager.CONTENT_FOLDER_NAME)); } } if (filterDisabled) { Iterator<CmsResourceTypeConfig> iter = result.iterator(); while (iter.hasNext()) { CmsResourceTypeConfig typeConfig = iter.next(); if (typeConfig.isDisabled()) { iter.remove(); } } } return result; }
python
def spawn_radia(job, rna_bam, tumor_bam, normal_bam, univ_options, radia_options): """ This module will spawn a radia job for each chromosome, on the RNA and DNA. ARGUMENTS 1. rna_bam: Dict of input STAR bams rna_bam |- 'rnaAligned.sortedByCoord.out.bam': REFER run_star() |- 'rna_fix_pg_sorted.bam': <JSid> +- 'rna_fix_pg_sorted.bam.bai': <JSid> 2. tumor_bam: Dict of input tumor WGS/WSQ bam + bai tumor_bam |- 'tumor_fix_pg_sorted.bam': <JSid> +- 'tumor_fix_pg_sorted.bam.bai': <JSid> 3. normal_bam: Dict of input normal WGS/WSQ bam + bai normal_bam |- 'normal_fix_pg_sorted.bam': <JSid> +- 'normal_fix_pg_sorted.bam.bai': <JSid> 4. univ_options: Dict of universal arguments used by almost all tools univ_options +- 'dockerhub': <dockerhub to use> 5. radia_options: Dict of parameters specific to radia radia_options |- 'genome_fasta': <JSid for genome fasta file> +- 'genome_fai': <JSid for genome fai file> RETURN VALUES 1. perchrom_radia: Dict of results of radia per chromosome perchrom_radia |- 'chr1' | |- 'radia_filtered_chr1.vcf': <JSid> | +- 'radia_filtered_chr1_radia.log': <JSid> |- 'chr2' | |- 'radia_filtered_chr2.vcf': <JSid> | +- 'radia_filtered_chr2_radia.log': <JSid> etc... This module corresponds to node 11 on the tree """ job.fileStore.logToMaster('Running spawn_radia on %s' % univ_options['patient']) rna_bam_key = 'rnaAligned.sortedByCoord.out.bam' # to reduce next line size bams = {'tumor_rna': rna_bam[rna_bam_key]['rna_fix_pg_sorted.bam'], 'tumor_rnai': rna_bam[rna_bam_key]['rna_fix_pg_sorted.bam.bai'], 'tumor_dna': tumor_bam['tumor_dna_fix_pg_sorted.bam'], 'tumor_dnai': tumor_bam['tumor_dna_fix_pg_sorted.bam.bai'], 'normal_dna': normal_bam['normal_dna_fix_pg_sorted.bam'], 'normal_dnai': normal_bam['normal_dna_fix_pg_sorted.bam.bai']} # Make a dict object to hold the return values for each of the chromosome jobs. Then run radia # on each chromosome. chromosomes = [''.join(['chr', str(x)]) for x in range(1, 23) + ['X', 'Y']] perchrom_radia = defaultdict() for chrom in chromosomes: perchrom_radia[chrom] = job.addChildJobFn(run_radia, bams, univ_options, radia_options, chrom, disk='60G').rv() return perchrom_radia
java
public static Inet6Address getByName(CharSequence ip, boolean ipv4Mapped) { byte[] bytes = getIPv6ByName(ip, ipv4Mapped); if (bytes == null) { return null; } try { return Inet6Address.getByAddress(null, bytes, -1); } catch (UnknownHostException e) { throw new RuntimeException(e); // Should never happen } }
java
public final void entryRuleAssignableTerminal() throws RecognitionException { try { // InternalXtext.g:921:1: ( ruleAssignableTerminal EOF ) // InternalXtext.g:922:1: ruleAssignableTerminal EOF { before(grammarAccess.getAssignableTerminalRule()); pushFollow(FollowSets000.FOLLOW_1); ruleAssignableTerminal(); state._fsp--; after(grammarAccess.getAssignableTerminalRule()); match(input,EOF,FollowSets000.FOLLOW_2); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; }
java
private boolean startPartition(PartitionPlanConfig config) { if (isStoppingStoppedOrFailed()) { return false; } if (isMultiJvm) { startPartitionRemote(config); } else { startPartitionLocal(config); } // TODO: should this message be issued from the partition? (won't go to top-level log if so?) // should we have a separate message issued from here, "partition.submitted" ? JoblogUtil.logToJobLogAndTraceOnly(Level.FINER, "partition.started", new Object[] { config.getPartitionNumber(), getStepName(), config.getTopLevelInstanceId(), config.getTopLevelExecutionId() }, logger); return true; }
java
private FileSystem newZipFileSystem(final File file) throws IOException { final Map<String, String> env = new HashMap<>(); env.put("create", "true"); return newFileSystem(URI.create("jar:" + file.toURI()), env); }
java
public static void construct_column(int k, int[] Ap, int[] Ai, double[] Ax, int[] Q, double[] X, int k1, int[] PSinv, double[] Rs, int scale, int[] Offp, int[] Offi, double[] Offx) { double aik ; int i, p, pend, oldcol, kglobal, poff, oldrow ; /* ---------------------------------------------------------------------- */ /* Scale and scatter the column into X. */ /* ---------------------------------------------------------------------- */ kglobal = k + k1 ; /* column k of the block is col kglobal of A */ poff = Offp [kglobal] ; /* start of off-diagonal column */ oldcol = Q [kglobal] ; pend = Ap [oldcol+1] ; if (scale <= 0) { /* no scaling */ for (p = Ap [oldcol] ; p < pend ; p++) { oldrow = Ai [p] ; i = PSinv [oldrow] - k1 ; aik = Ax [p] ; if (i < 0) { /* this is an entry in the off-diagonal part */ Offi [poff] = oldrow ; Offx [poff] = aik ; poff++ ; } else { /* (i,k) is an entry in the block. scatter into X */ X [i] = aik ; } } } else { /* row scaling */ for (p = Ap [oldcol] ; p < pend ; p++) { oldrow = Ai [p] ; i = PSinv [oldrow] - k1 ; aik = Ax [p] ; aik = SCALE_DIV (aik, Rs [oldrow]) ; if (i < 0) { /* this is an entry in the off-diagonal part */ Offi [poff] = oldrow ; Offx [poff] = aik ; poff++ ; } else { /* (i,k) is an entry in the block. scatter into X */ X [i] = aik ; } } } Offp [kglobal+1] = poff ; /* start of the next col of off-diag part */ }
java
public static final <T extends Date> Function<T, MutableDateTime> dateToMutableDateTime(Chronology chronology) { return new DateToMutableDateTime<T>(chronology); }
java
public static RedisClient create(ClientResources clientResources, RedisURI redisURI) { assertNotNull(clientResources); assertNotNull(redisURI); return new RedisClient(clientResources, redisURI); }
python
def register(self, src, trg, trg_mask=None, src_mask=None): """ Implementation of pair-wise registration and warping using Enhanced Correlation Coefficient This function estimates an Euclidean transformation (x,y translation + rotation) using the intensities of the pair of images to be registered. The similarity metric is a modification of the cross-correlation metric, which is invariant to distortions in contrast and brightness. :param src: 2D single channel source moving image :param trg: 2D single channel target reference image :param trg_mask: Mask of target image. Not used in this method. :param src_mask: Mask of source image. Not used in this method. :return: Estimated 2D transformation matrix of shape 2x3 """ # Parameters of registration warp_mode = cv2.MOTION_EUCLIDEAN # Specify the threshold of the increment # in the correlation coefficient between two iterations termination_eps = 1e-10 # Define termination criteria criteria = (cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, self.params['MaxIters'], termination_eps) # Initialise warp matrix warp_matrix = np.eye(2, 3, dtype=np.float32) # Run the ECC algorithm. The results are stored in warp_matrix. _, warp_matrix = cv2.findTransformECC(src.astype(np.float32), trg.astype(np.float32), warp_matrix, warp_mode, criteria) return warp_matrix
java
DistributionSummary summary(Meter.Id id, DistributionStatisticConfig distributionStatisticConfig, double scale) { return registerMeterIfNecessary(DistributionSummary.class, id, distributionStatisticConfig, (id2, filteredConfig) -> newDistributionSummary(id2, filteredConfig.merge(defaultHistogramConfig()), scale), NoopDistributionSummary::new); }
python
def _parse_textgroup(self, cts_file): """ Parses a textgroup from a cts file :param cts_file: Path to the CTS File :type cts_file: str :return: CtsTextgroupMetadata and Current file """ with io.open(cts_file) as __xml__: return self.classes["textgroup"].parse( resource=__xml__ ), cts_file
java
public static MchPayApp generateMchAppData(String prepay_id, String appId, String partnerid, String key) { Map<String, String> wx_map = new LinkedHashMap<String, String>(); wx_map.put("appid", appId); wx_map.put("partnerid", partnerid); wx_map.put("prepayid", prepay_id); wx_map.put("package", "Sign=WXPay"); wx_map.put("noncestr", UUID.randomUUID().toString().replace("-", "")); wx_map.put("timestamp", System.currentTimeMillis() / 1000 + ""); String sign = SignatureUtil.generateSign(wx_map, key); MchPayApp mchPayApp = new MchPayApp(); mchPayApp.setAppid(appId); mchPayApp.setPartnerid(partnerid); mchPayApp.setPrepayid(prepay_id); mchPayApp.setPackage_(wx_map.get("package")); mchPayApp.setNoncestr(wx_map.get("noncestr")); mchPayApp.setTimestamp(wx_map.get("timestamp")); mchPayApp.setSign(sign); return mchPayApp; }
java
public final DSLMapParser.key_section_return key_section() throws RecognitionException { DSLMapParser.key_section_return retval = new DSLMapParser.key_section_return(); retval.start = input.LT(1); Object root_0 = null; ParserRuleReturnScope ks =null; RewriteRuleSubtreeStream stream_key_sentence=new RewriteRuleSubtreeStream(adaptor,"rule key_sentence"); try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMap.g:150:5: ( (ks= key_sentence )+ -> ^( VT_ENTRY_KEY ( key_sentence )+ ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMap.g:150:7: (ks= key_sentence )+ { // src/main/resources/org/drools/compiler/lang/dsl/DSLMap.g:150:9: (ks= key_sentence )+ int cnt8=0; loop8: while (true) { int alt8=2; int LA8_0 = input.LA(1); if ( (LA8_0==COLON||(LA8_0 >= LEFT_CURLY && LA8_0 <= LITERAL)||LA8_0==RIGHT_SQUARE) ) { alt8=1; } switch (alt8) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMap.g:150:9: ks= key_sentence { pushFollow(FOLLOW_key_sentence_in_key_section568); ks=key_sentence(); state._fsp--; if (state.failed) return retval; if ( state.backtracking==0 ) stream_key_sentence.add(ks.getTree()); } break; default : if ( cnt8 >= 1 ) break loop8; if (state.backtracking>0) {state.failed=true; return retval;} EarlyExitException eee = new EarlyExitException(8, input); throw eee; } cnt8++; } // AST REWRITE // elements: key_sentence // token labels: // rule labels: retval // token list labels: // rule list labels: // wildcard labels: if ( state.backtracking==0 ) { retval.tree = root_0; RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.getTree():null); root_0 = (Object)adaptor.nil(); // 151:5: -> ^( VT_ENTRY_KEY ( key_sentence )+ ) { // src/main/resources/org/drools/compiler/lang/dsl/DSLMap.g:151:8: ^( VT_ENTRY_KEY ( key_sentence )+ ) { Object root_1 = (Object)adaptor.nil(); root_1 = (Object)adaptor.becomeRoot((Object)adaptor.create(VT_ENTRY_KEY, "VT_ENTRY_KEY"), root_1); if ( !(stream_key_sentence.hasNext()) ) { throw new RewriteEarlyExitException(); } while ( stream_key_sentence.hasNext() ) { adaptor.addChild(root_1, stream_key_sentence.nextTree()); } stream_key_sentence.reset(); adaptor.addChild(root_0, root_1); } } retval.tree = root_0; } } retval.stop = input.LT(-1); if ( state.backtracking==0 ) { retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } } catch (RecognitionException re) { reportError(re); recover(input,re); retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); } finally { // do for sure before leaving } return retval; }
python
def _init_pdf(style_path, header=None, footer=FOOTER): """ Initialize :class:`RstToPdf` class. Args: style_path (str): Path to the style for the PDF. header (str, default None): Header which will be rendered to each page. footer (str, default FOOTER): Footer, which will be rendered to each page. See :attr:`FOOTER` for details. Returns: obj: Initialized object. """ return RstToPdf( language="cs", font_path=[ "/usr/share/fonts", "/usr/share/fonts/truetype/", '.', '/usr/local/lib/python2.7/dist-packages/rst2pdf/fonts' ], stylesheets=[ style_path ], breaklevel=0, splittables=True, header=header, footer=footer )
java
public View<?, ?> getRootParentView() { View<?, ?> currentView = this; while (currentView.hasParent()) { currentView = currentView.getParent(); } return currentView; }
java
@Override public int read() throws IOException { if (pos >= size) { // try to get more ... readMore(); if (pos >= size) return -1; // Still nothing } return buffer[pos++] & 0xFF; }
java
public static Node createStatusUpdateNode(final String statusUpdateId) { if (INDEX_STATUS_UPDATES.get(IDENTIFIER, statusUpdateId).getSingle() == null) { final Node statusUpdate = DATABASE.createNode(); statusUpdate.setProperty(Properties.StatusUpdate.IDENTIFIER, statusUpdateId); INDEX_STATUS_UPDATES.add(statusUpdate, IDENTIFIER, statusUpdateId); return statusUpdate; } throw new IllegalArgumentException( "status update node with identifier \"" + statusUpdateId + "\" already existing!"); }
python
def from_sys_requirements(cls, system_requirements, _type='all'): """ Returns SystemRequirementsDict encapsulating system requirements. It can extract only entrypoints with specific fields ('clusterSpec', 'instanceType', etc), depending on the value of _type. """ if _type not in ('all', 'clusterSpec', 'instanceType'): raise DXError("Expected '_type' to be either 'all', 'clusterSpec', or 'instanceType'") if _type == 'all': return cls(system_requirements) extracted = defaultdict(dict) for entrypoint, req in system_requirements.items(): if _type in req: extracted[entrypoint][_type] = req[_type] return cls(dict(extracted))
python
def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True, extra_headers=None): """Helper method to fetch remote document. Must be given either the ``url`` or ``host``. If ``url`` is given, only that will be tried without falling back to http from https. If ``host`` given, `path` will be added to it. Will fall back to http on non-success status code. :arg url: Full url to fetch, including protocol :arg host: Domain part only without path or protocol :arg path: Path without domain (defaults to "/") :arg timeout: Seconds to wait for response (defaults to 10) :arg raise_ssl_errors: Pass False if you want to try HTTP even for sites with SSL errors (default True) :returns: Tuple of document (str or None), status code (int or None) and error (an exception class instance or None) :raises ValueError: If neither url nor host are given as parameters """ if not url and not host: raise ValueError("Need url or host.") logger.debug("fetch_document: url=%s, host=%s, path=%s, timeout=%s, raise_ssl_errors=%s", url, host, path, timeout, raise_ssl_errors) headers = {'user-agent': USER_AGENT} if extra_headers: headers.update(extra_headers) if url: # Use url since it was given logger.debug("fetch_document: trying %s", url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug("fetch_document: found document, code %s", response.status_code) return response.text, response.status_code, None except RequestException as ex: logger.debug("fetch_document: exception %s", ex) return None, None, ex # Build url with some little sanitizing host_string = host.replace("http://", "").replace("https://", "").strip("/") path_string = path if path.startswith("/") else "/%s" % path url = "https://%s%s" % (host_string, path_string) logger.debug("fetch_document: trying %s", url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug("fetch_document: found document, code %s", response.status_code) response.raise_for_status() return response.text, response.status_code, None except (HTTPError, SSLError, ConnectionError) as ex: if isinstance(ex, SSLError) and raise_ssl_errors: logger.debug("fetch_document: exception %s", ex) return None, None, ex # Try http then url = url.replace("https://", "http://") logger.debug("fetch_document: trying %s", url) try: response = requests.get(url, timeout=timeout, headers=headers) logger.debug("fetch_document: found document, code %s", response.status_code) response.raise_for_status() return response.text, response.status_code, None except RequestException as ex: logger.debug("fetch_document: exception %s", ex) return None, None, ex except RequestException as ex: logger.debug("fetch_document: exception %s", ex) return None, None, ex
java
public Quaterniond nlerpIterative(Quaterniondc q, double alpha, double dotThreshold, Quaterniond dest) { double q1x = x, q1y = y, q1z = z, q1w = w; double q2x = q.x(), q2y = q.y(), q2z = q.z(), q2w = q.w(); double dot = q1x * q2x + q1y * q2y + q1z * q2z + q1w * q2w; double absDot = Math.abs(dot); if (1.0 - 1E-6 < absDot) { return dest.set(this); } double alphaN = alpha; while (absDot < dotThreshold) { double scale0 = 0.5; double scale1 = dot >= 0.0 ? 0.5 : -0.5; if (alphaN < 0.5) { q2x = scale0 * q2x + scale1 * q1x; q2y = scale0 * q2y + scale1 * q1y; q2z = scale0 * q2z + scale1 * q1z; q2w = scale0 * q2w + scale1 * q1w; double s = 1.0 / Math.sqrt(q2x * q2x + q2y * q2y + q2z * q2z + q2w * q2w); q2x *= s; q2y *= s; q2z *= s; q2w *= s; alphaN = alphaN + alphaN; } else { q1x = scale0 * q1x + scale1 * q2x; q1y = scale0 * q1y + scale1 * q2y; q1z = scale0 * q1z + scale1 * q2z; q1w = scale0 * q1w + scale1 * q2w; double s = 1.0 / Math.sqrt(q1x * q1x + q1y * q1y + q1z * q1z + q1w * q1w); q1x *= s; q1y *= s; q1z *= s; q1w *= s; alphaN = alphaN + alphaN - 1.0; } dot = q1x * q2x + q1y * q2y + q1z * q2z + q1w * q2w; absDot = Math.abs(dot); } double scale0 = 1.0 - alphaN; double scale1 = dot >= 0.0 ? alphaN : -alphaN; double destX = scale0 * q1x + scale1 * q2x; double destY = scale0 * q1y + scale1 * q2y; double destZ = scale0 * q1z + scale1 * q2z; double destW = scale0 * q1w + scale1 * q2w; double s = 1.0 / Math.sqrt(destX * destX + destY * destY + destZ * destZ + destW * destW); dest.x *= s; dest.y *= s; dest.z *= s; dest.w *= s; return dest; }
java
public void setSearchLocations(Collection<String> searchLocations) { patchMatchingResolver = new PathMatchingResourcePatternResolver(getDefaultResourceLoader()); for (String searchLocation : searchLocations) { initializeForSearchLocation(searchLocation); } }
java
private void reportFatalError(String msgId, String arg) throws JspCoreException { throw new JspCoreException(msgId, new Object[] { arg }); //err.jspError(msgId, arg); }
java
public com.google.api.ads.adwords.axis.v201809.cm.IncomeTier getTier() { return tier; }
java
public static Supplier<Void> supplier(Runnable adaptee) { dbc.precondition(adaptee != null, "cannot adapt a null runnable"); return () -> { adaptee.run(); return null; }; }
python
def list_opts(): """Returns a list of oslo_config options available in the library. The returned list includes all oslo_config options which may be registered at runtime by the library. Each element of the list is a tuple. The first element is the name of the group under which the list of elements in the second element will be registered. A group name of None corresponds to the [DEFAULT] group in config files. The purpose of this is to allow tools like the Oslo sample config file generator to discover the options exposed to users by this library. :returns: a list of (group_name, opts) tuples """ from tvrenamer.common import tools all_opts = [] all_opts.extend(tools.make_opt_list([CLI_OPTS, EPISODE_OPTS, FORMAT_OPTS], None)) all_opts.extend(tools.make_opt_list([CACHE_OPTS], 'cache')) return all_opts
java
private boolean copyBlockLocalAdd(String srcFileSystem, File srcBlockFile, int srcNamespaceId, Block srcBlock, int dstNamespaceId, Block dstBlock) throws IOException { boolean hardlink = true; File dstBlockFile = null; lock.writeLock().lock(); try { if (isValidBlock(dstNamespaceId, dstBlock, false) || volumeMap.getOngoingCreates(dstNamespaceId, dstBlock) != null) { throw new BlockAlreadyExistsException("Block " + dstBlock + " already exists"); } if (srcBlockFile == null || !srcBlockFile.exists()) { throw new IOException("Block " + srcBlock.getBlockName() + " is not valid or does not have a valid block file"); } boolean inlineChecksum = Block.isInlineChecksumBlockFilename(srcBlockFile .getName()); FSVolume dstVol = null; if (shouldHardLinkBlockCopy) { dstVol = findVolumeForHardLink( srcFileSystem, srcNamespaceId, srcBlock, srcBlockFile); } // Could not find a volume for a hard link, fall back to regular file // copy. if (dstVol == null) { dstVol = volumes.getNextVolume(srcBlock.getNumBytes()); hardlink = false; } int checksumType = DataChecksum.CHECKSUM_UNKNOWN; int bytesPerChecksum = -1; if (inlineChecksum) { GenStampAndChecksum sac = BlockInlineChecksumReader .getGenStampAndChecksumFromInlineChecksumFile(srcBlockFile .getName()); checksumType = sac.checksumType; bytesPerChecksum = sac.bytesPerChecksum; } List<Thread> threads = null; // We do not want to create a BBW, hence treat this as a replication // request. dstBlockFile = createTmpFile(dstNamespaceId, dstVol, dstBlock, true, inlineChecksum, checksumType, bytesPerChecksum); DatanodeBlockInfo binfo = new DatanodeBlockInfo(dstVol, dstBlockFile, DatanodeBlockInfo.UNFINALIZED, true, inlineChecksum, checksumType, bytesPerChecksum, false, 0); volumeMap.add(dstNamespaceId, dstBlock, binfo); volumeMap.addOngoingCreates(dstNamespaceId, dstBlock, new ActiveFile( binfo, threads, ActiveFile.UNKNOWN_SIZE, false)); } finally { lock.writeLock().unlock(); } if (dstBlockFile == null) { throw new IOException("Could not allocate block file for : " + dstBlock.getBlockName()); } return hardlink; }
java
@Override public ModifyEndpointResult modifyEndpoint(ModifyEndpointRequest request) { request = beforeClientExecution(request); return executeModifyEndpoint(request); }
java
@Override public RepositoryInfo getRepositoryInfo(String repositoryId, ExtensionsData extension) { LOGGER.debug("-- getting repository info"); RepositoryInfo info = jcrRepository(repositoryId).getRepositoryInfo(login(repositoryId)); return new RepositoryInfoLocal(repositoryId, info); }
python
def initialize(self): """ A reimplemented initializer. This method will add the include objects to the parent of the include and ensure that they are initialized. """ super(Block, self).initialize() if self.block: self.block.parent.insert_children(self.block, self.children)
java
private void finishRunningTask(TaskStatus finalStatus, long now) { TaskAttemptID taskId = finalStatus.getTaskID(); if (LOG.isDebugEnabled()) { LOG.debug("Finishing running task id=" + taskId + ", now=" + now); } SimulatorTaskInProgress tip = tasks.get(taskId); if (tip == null) { throw new IllegalArgumentException("Unknown task attempt " + taskId + " completed"); } TaskStatus currentStatus = tip.getTaskStatus(); if (currentStatus.getRunState() != State.RUNNING) { throw new IllegalArgumentException( "Task attempt to finish is not running: " + tip); } // Check that finalStatus describes a task attempt that has just been // completed State finalRunState = finalStatus.getRunState(); if (finalRunState != State.SUCCEEDED && finalRunState != State.FAILED && finalRunState != State.KILLED) { throw new IllegalArgumentException( "Final run state for completed task can't be : " + finalRunState + " " + tip); } if (now != finalStatus.getFinishTime()) { throw new IllegalArgumentException( "Current time does not match task finish time: now=" + now + ", finish=" + finalStatus.getFinishTime()); } if (currentStatus.getIsMap() != finalStatus.getIsMap() || currentStatus.getNumSlots() != finalStatus.getNumSlots() || currentStatus.getPhase() != finalStatus.getPhase() || currentStatus.getStartTime() != finalStatus.getStartTime()) { throw new IllegalArgumentException( "Current status does not match final status"); } // We can't assert getShuffleFinishTime() and getSortFinishTime() for // reduces as those were unknown when the task attempt completion event // was created. We have not called setMapFinishTime() for maps either. // If we were really thorough we could update the progress of the task // and check if it is consistent with finalStatus. // If we've got this far it is safe to update the task status currentStatus.setRunState(finalStatus.getRunState()); currentStatus.setFinishTime(finalStatus.getFinishTime()); currentStatus.setProgress(finalStatus.getProgress()); // and update the free slots int numSlots = currentStatus.getNumSlots(); if (tip.isMapTask()) { usedMapSlots -= numSlots; if (usedMapSlots < 0) { throw new IllegalStateException( "TaskTracker reaches negative map slots: " + usedMapSlots); } } else { usedReduceSlots -= numSlots; if (usedReduceSlots < 0) { throw new IllegalStateException( "TaskTracker reaches negative reduce slots: " + usedReduceSlots); } } }
python
def main(args=None): """ The main routine. """ cfg.configureLogger() wireHandlers(cfg) # get config from a flask standard place not our config yml app.run(debug=cfg.runInDebug(), host='0.0.0.0', port=cfg.getPort())
java
private final StringBuilder makeAjax(ClientBehaviorContext context, AjaxBehavior behavior) { StringBuilder retVal = SharedStringBuilder.get(context.getFacesContext(), AJAX_SB, 60); StringBuilder paramBuffer = SharedStringBuilder.get(context.getFacesContext(), AJAX_PARAM_SB, 20); String executes = mapToString(context, paramBuffer, AJAX_KEY_EXECUTE, behavior.getExecute()); String render = mapToString(context, paramBuffer, AJAX_KEY_RENDER, behavior.getRender()); String onError = behavior.getOnerror(); if (onError != null && !onError.trim().equals(EMPTY)) { //onError = AJAX_KEY_ONERROR + COLON + onError; paramBuffer.setLength(0); paramBuffer.append(AJAX_KEY_ONERROR); paramBuffer.append(COLON); paramBuffer.append(onError); onError = paramBuffer.toString(); } else { onError = null; } String onEvent = behavior.getOnevent(); if (onEvent != null && !onEvent.trim().equals(EMPTY)) { paramBuffer.setLength(0); paramBuffer.append(AJAX_KEY_ONEVENT); paramBuffer.append(COLON); paramBuffer.append(onEvent); onEvent = paramBuffer.toString(); } else { onEvent = null; } /* * since version 2.2 */ String delay = behavior.getDelay(); if (delay != null && !delay.trim().equals(EMPTY)) { paramBuffer.setLength(0); paramBuffer.append(AJAX_KEY_DELAY); paramBuffer.append(COLON); //RTC 168751 / JIRA MYFACES-3974 if ("none".equals(delay)) { paramBuffer.append('\''); paramBuffer.append(delay); paramBuffer.append('\''); } else { paramBuffer.append(delay); } delay = paramBuffer.toString(); } else { delay = null; } /* * since version 2.2 */ String resetValues = Boolean.toString(behavior.isResetValues()); if (resetValues.equals("true")) { paramBuffer.setLength(0); paramBuffer.append(AJAX_KEY_RESETVALUES); paramBuffer.append(COLON); paramBuffer.append(resetValues); resetValues = paramBuffer.toString(); } else { resetValues = null; } String sourceId = null; if (context.getSourceId() == null) { sourceId = AJAX_VAL_THIS; } else { paramBuffer.setLength(0); paramBuffer.append('\''); paramBuffer.append(context.getSourceId()); paramBuffer.append('\''); sourceId = paramBuffer.toString(); } String event = context.getEventName(); retVal.append(JS_AJAX_REQUEST); retVal.append(L_PAREN); retVal.append(sourceId); retVal.append(COMMA); retVal.append(AJAX_VAL_EVENT); retVal.append(COMMA); Collection<ClientBehaviorContext.Parameter> params = context.getParameters(); int paramSize = (params != null) ? params.size() : 0; List<String> parameterList = new ArrayList<String>(paramSize + 2); if (executes != null) { parameterList.add(executes.toString()); } if (render != null) { parameterList.add(render.toString()); } if (onError != null) { parameterList.add(onError); } if (onEvent != null) { parameterList.add(onEvent); } /* * since version 2.2 */ if (delay != null) { parameterList.add(delay); } /* * since version 2.2 */ if (resetValues != null) { parameterList.add(resetValues); } if (paramSize > 0) { /** * see ClientBehaviorContext.html of the spec * the param list has to be added in the post back */ // params are in 99% RamdonAccess instace created in // HtmlRendererUtils.getClientBehaviorContextParameters(Map<String, String>) if (params instanceof RandomAccess) { List<ClientBehaviorContext.Parameter> list = (List<ClientBehaviorContext.Parameter>) params; for (int i = 0, size = list.size(); i < size; i++) { ClientBehaviorContext.Parameter param = list.get(i); append(paramBuffer, parameterList, param); } } else { for (ClientBehaviorContext.Parameter param : params) { append(paramBuffer, parameterList, param); } } } //parameterList.add(QUOTE + BEHAVIOR_EVENT + QUOTE + COLON + QUOTE + event + QUOTE); paramBuffer.setLength(0); paramBuffer.append(QUOTE); paramBuffer.append(BEHAVIOR_EVENT); paramBuffer.append(QUOTE); paramBuffer.append(COLON); paramBuffer.append(QUOTE); paramBuffer.append(event); paramBuffer.append(QUOTE); parameterList.add(paramBuffer.toString()); /** * I assume here for now that the options are the same which also * can be sent via the options attribute to javax.faces.ajax * this still needs further clarifications but I assume so for now */ retVal.append(buildOptions(paramBuffer, parameterList)); retVal.append(R_PAREN); return retVal; }
python
def put_key(self, source, rel_path): '''Copy a file to the repository Args: source: Absolute path to the source file, or a file-like object rel_path: path relative to the root of the repository ''' k = self._get_boto_key(rel_path) try: k.set_contents_from_file(source) except AttributeError: if os.path.getsize(source) > 4.8 * 1024 * 1024 * 1024: # Need to do multi-part uploads here k.set_contents_from_filename(source) else: k.set_contents_from_filename(source)
python
def fit(self): """ fit waveforms in any domain""" # solve for estimator of B n,p = np.shape(self._X) self._df = float(n - p) self._Cx = np.linalg.pinv(np.dot(self._X.T,self._X)) self._Bhat = np.dot(np.dot(self._Cx, self._X.T), self._A) self._Y_rec = self._compute_prediction(self._X)
python
def _FormatOpaqueToken(self, token_data): """Formats an opaque token as a dictionary of values. Args: token_data (bsm_token_data_opaque): AUT_OPAQUE token data. Returns: dict[str, str]: token values. """ data = ''.join(['{0:02x}'.format(byte) for byte in token_data.data]) return {'data': data}
python
def merge_systems(sysa, sysb, bounding=0.2): '''Generate a system by merging *sysa* and *sysb*. Overlapping molecules are removed by cutting the molecules of *sysa* that have atoms near the atoms of *sysb*. The cutoff distance is defined by the *bounding* parameter. **Parameters** sysa: System First system sysb: System Second system bounding: float or False Extra space used when cutting molecules in *sysa* to make space for *sysb*. If it is False, no overlap handling will be performed. ''' if bounding is not False: # Delete overlaps. if sysa.box_vectors is not None: periodicity = sysa.box_vectors.diagonal() else: periodicity = False p = overlapping_points(sysb.r_array, sysa.r_array, cutoff=bounding, periodic=periodicity) sel = np.ones(len(sysa.r_array), dtype=np.bool) sel[p] = False # Rebuild sysa without water molecules sysa = subsystem_from_atoms(sysa, sel) sysres = System.empty(sysa.n_mol + sysb.n_mol, sysa.n_atoms + sysb.n_atoms) # Assign the attributes for attr in type(sysa).attributes: attr.assign(sysres, attr.concatenate(sysa, sysb)) # edit the mol_indices and n_mol offset = sysa.mol_indices[-1] + sysa.mol_n_atoms[-1] sysres.mol_indices[0:sysa.n_mol] = sysa.mol_indices.copy() sysres.mol_indices[sysa.n_mol:] = sysb.mol_indices.copy() + offset sysres.mol_n_atoms = np.concatenate([sysa.mol_n_atoms, sysb.mol_n_atoms]) sysres.box_vectors = sysa.box_vectors return sysres
python
def inline_link( self, text, url): """*generate a MMD sytle link* **Key Arguments:** - ``text`` -- the text to link from - ``url`` -- the url to link to **Return:** - ``text`` -- the linked text **Usage:** To convert a text and url to MMD link: .. code-block:: python text = md.inline_link( " google search engine ", " http://www.google.com ") print text # OUTPUT: # [google search engine](http://www.google.com) """ m = self.reWS.match(text) prefix = m.group(1) text = m.group(2) suffix = m.group(3) url = url.strip() return "%(prefix)s[%(text)s](%(url)s)%(suffix)s" % locals()
python
def layer_description_extractor(layer, node_to_id): '''get layer description. ''' layer_input = layer.input layer_output = layer.output if layer_input is not None: if isinstance(layer_input, Iterable): layer_input = list(map(lambda x: node_to_id[x], layer_input)) else: layer_input = node_to_id[layer_input] if layer_output is not None: layer_output = node_to_id[layer_output] if isinstance(layer, StubConv): return ( type(layer).__name__, layer_input, layer_output, layer.input_channel, layer.filters, layer.kernel_size, layer.stride, layer.padding, ) elif isinstance(layer, (StubDense,)): return [ type(layer).__name__, layer_input, layer_output, layer.input_units, layer.units, ] elif isinstance(layer, (StubBatchNormalization,)): return (type(layer).__name__, layer_input, layer_output, layer.num_features) elif isinstance(layer, (StubDropout,)): return (type(layer).__name__, layer_input, layer_output, layer.rate) elif isinstance(layer, StubPooling): return ( type(layer).__name__, layer_input, layer_output, layer.kernel_size, layer.stride, layer.padding, ) else: return (type(layer).__name__, layer_input, layer_output)
java
public final void mT__44() throws RecognitionException { try { int _type = T__44; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalXbaseWithAnnotations.g:42:7: ( 'val' ) // InternalXbaseWithAnnotations.g:42:9: 'val' { match("val"); } state.type = _type; state.channel = _channel; } finally { } }
python
def set_options(self, **options): """ Set instance variables based on an options dict """ self.interactive = False self.verbosity = options['verbosity'] self.symlink = "" self.clear = False ignore_patterns = [] self.ignore_patterns = list(set(ignore_patterns)) self.page_themes_updated = 0 self.skins_updated = 0
python
def display(self): """ Displays an overview containing descriptive stats for the Series provided. """ print('Stats for %s from %s - %s' % (self.name, self.start, self.end)) if type(self.rf) is float: print('Annual risk-free rate considered: %s' % (fmtp(self.rf))) print('Summary:') data = [[fmtp(self.total_return), fmtn(self.daily_sharpe), fmtp(self.cagr), fmtp(self.max_drawdown)]] print(tabulate(data, headers=['Total Return', 'Sharpe', 'CAGR', 'Max Drawdown'])) print('\nAnnualized Returns:') data = [[fmtp(self.mtd), fmtp(self.three_month), fmtp(self.six_month), fmtp(self.ytd), fmtp(self.one_year), fmtp(self.three_year), fmtp(self.five_year), fmtp(self.ten_year), fmtp(self.incep)]] print(tabulate(data, headers=['mtd', '3m', '6m', 'ytd', '1y', '3y', '5y', '10y', 'incep.'])) print('\nPeriodic:') data = [ ['sharpe', fmtn(self.daily_sharpe), fmtn(self.monthly_sharpe), fmtn(self.yearly_sharpe)], ['mean', fmtp(self.daily_mean), fmtp(self.monthly_mean), fmtp(self.yearly_mean)], ['vol', fmtp(self.daily_vol), fmtp(self.monthly_vol), fmtp(self.yearly_vol)], ['skew', fmtn(self.daily_skew), fmtn(self.monthly_skew), fmtn(self.yearly_skew)], ['kurt', fmtn(self.daily_kurt), fmtn(self.monthly_kurt), fmtn(self.yearly_kurt)], ['best', fmtp(self.best_day), fmtp(self.best_month), fmtp(self.best_year)], ['worst', fmtp(self.worst_day), fmtp(self.worst_month), fmtp(self.worst_year)]] print(tabulate(data, headers=['daily', 'monthly', 'yearly'])) print('\nDrawdowns:') data = [ [fmtp(self.max_drawdown), fmtp(self.avg_drawdown), fmtn(self.avg_drawdown_days)]] print(tabulate(data, headers=['max', 'avg', '# days'])) print('\nMisc:') data = [['avg. up month', fmtp(self.avg_up_month)], ['avg. down month', fmtp(self.avg_down_month)], ['up year %', fmtp(self.win_year_perc)], ['12m up %', fmtp(self.twelve_month_win_perc)]] print(tabulate(data))
python
def build(args): """Build a target and its dependencies.""" if len(args) != 1: log.error('One target required.') app.quit(1) target = address.new(args[0]) log.info('Resolved target to: %s', target) try: bb = Butcher() bb.clean() bb.load_graph(target) bb.build(target) except (gitrepo.GitError, error.BrokenGraph, error.NoSuchTargetError) as err: log.fatal(err) app.quit(1) except error.OverallBuildFailure as err: log.fatal(err) log.fatal('Error list:') [log.fatal(' [%s]: %s', e.node, e) for e in bb.failure_log] app.quit(1)
python
def _maketicks(self, ax, units='THz'): """Utility method to add tick marks to a band structure.""" # set y-ticks ax.yaxis.set_major_locator(MaxNLocator(6)) ax.yaxis.set_minor_locator(AutoMinorLocator(2)) ax.xaxis.set_minor_locator(AutoMinorLocator(2)) # set x-ticks; only plot the unique tick labels ticks = self.get_ticks() unique_d = [] unique_l = [] if ticks['distance']: temp_ticks = list(zip(ticks['distance'], ticks['label'])) unique_d.append(temp_ticks[0][0]) unique_l.append(temp_ticks[0][1]) for i in range(1, len(temp_ticks)): if unique_l[-1] != temp_ticks[i][1]: unique_d.append(temp_ticks[i][0]) unique_l.append(temp_ticks[i][1]) logging.info('\nLabel positions:') for dist, label in list(zip(unique_d, unique_l)): logging.info('\t{:.4f}: {}'.format(dist, label)) ax.set_xticks(unique_d) ax.set_xticklabels(unique_l) ax.xaxis.grid(True, ls='-') trans_xdata_yaxes = blended_transform_factory(ax.transData, ax.transAxes) ax.vlines(unique_d, 0, 1, transform=trans_xdata_yaxes, colors=rcParams['grid.color'], linewidth=rcParams['grid.linewidth']) # Use a text hyphen instead of a minus sign because some nice fonts # like Whitney don't come with a real minus labels = {'thz': 'THz', 'cm-1': r'cm$^{\mathrm{-}\mathregular{1}}$', 'ev': 'eV', 'mev': 'meV'} ax.set_ylabel('Frequency ({0})'.format(labels[units.lower()]))
python
def get_uri_parts(self, value): """takes an value and returns a tuple of the parts args: value: a uri in any form pyuri, ttl or full IRI """ if value.startswith('pyuri_'): value = self.rpyhttp(value) parts = self.parse_uri(value) try: return (self.ns_dict[parts[0]], parts[1]) except KeyError: try: return (self.ns_dict[parts[0].lower()], parts[1]) except KeyError: return ((None, parts[0]), parts[1])
java
@Override public void execute() throws MojoExecutionException, MojoFailureException { super.execute(); // scan classes and build graph try { processClasspaths(); } catch (Exception e) { throw new MojoExecutionException("Error processing entity classes", e); } }
java
public void learn(double[][] x, int[] y) { int n = x.length; int[] index = Math.permutate(n); for (int i = 0; i < n; i++) { learn(x[index[i]], y[index[i]]); } }
java
public static final PluginCoordinates fromPolicySpec(String pluginPolicySpec) { if (pluginPolicySpec == null) { return null; } int startIdx = 7; int endIdx = pluginPolicySpec.indexOf('/'); String [] split = pluginPolicySpec.substring(startIdx, endIdx).split(":"); //$NON-NLS-1$ String groupId = split[0]; String artifactId = split[1]; String version = split[2]; String classifier = null; String type = null; if (split.length == 4) { type = split[3]; } if (split.length == 5) { classifier = split[3]; type = split[4]; } PluginCoordinates rval = new PluginCoordinates(groupId, artifactId, version, classifier, type); return rval; }
python
def validate_arguments_type_of_function(param_type=None): """ Decorator to validate the <type> of arguments in the calling function are of the `param_type` class. if `param_type` is None, uses `param_type` as the class where it is used. Note: Use this decorator on the functions of the class. """ def inner(function): def wrapper(self, *args, **kwargs): type_ = param_type or type(self) for arg in args + tuple(kwargs.values()): if not isinstance(arg, type_): raise TypeError( ( 'Invalid Type: {}.{}() accepts only the ' 'arguments of type "<{}>"' ).format( type(self).__name__, function.__name__, type_.__name__, ) ) return function(self, *args, **kwargs) return wrapper return inner
python
def as_lwp_str(self, ignore_discard=True, ignore_expires=True): """Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. ignore_discard and ignore_expires: see docstring for FileCookieJar.save """ now = time.time() r = [] for cookie in self: if not ignore_discard and cookie.discard: continue if not ignore_expires and cookie.is_expired(now): continue r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) return "\n".join(r+[""])
java
public void configure( int width , int height , float vfov ) { declareVectors( width, height ); float r = (float)Math.tan(vfov/2.0f); for (int pixelY = 0; pixelY < height; pixelY++) { float z = 2*r*pixelY/(height-1) - r; for (int pixelX = 0; pixelX < width; pixelX++) { float theta = GrlConstants.F_PI2*pixelX/width - GrlConstants.F_PI; float x = (float)Math.cos(theta); float y = (float)Math.sin(theta); vectors[pixelY*width+pixelX].set(x,y,z); } } }
python
def static_url(path, absolute=False): """ Shorthand for returning a URL for the requested static file. Arguments: path -- the path to the file (relative to the static files directory) absolute -- whether the link should be absolute or relative """ if os.sep != '/': path = '/'.join(path.split(os.sep)) return flask.url_for('static', filename=path, _external=absolute)
java
public LineString extractPart(double startDistance, double endDistance) { LineString result = new LineString(); for (int i = 0; i < this.segments.size(); startDistance -= this.segments.get(i).length(), endDistance -= this.segments.get(i).length(), i++) { LineSegment segment = this.segments.get(i); // Skip first segments that we don't need double length = segment.length(); if (length < startDistance) { continue; } Point startPoint = null, endPoint = null; if (startDistance >= 0) { // This will be our starting point startPoint = segment.pointAlongLineSegment(startDistance); } if (endDistance < length) { // this will be our ending point endPoint = segment.pointAlongLineSegment(endDistance); } if (startPoint != null && endPoint == null) { // This ist the starting segment, end will come in a later segment result.segments.add(new LineSegment(startPoint, segment.end)); } else if (startPoint == null && endPoint == null) { // Center segment between start and end segment, add completely result.segments.add(segment); } else if (startPoint == null && endPoint != null) { // End segment, start was in earlier segment result.segments.add(new LineSegment(segment.start, endPoint)); } else if (startPoint != null && endPoint != null) { // Start and end on same segment result.segments.add(new LineSegment(startPoint, endPoint)); } if (endPoint != null) break; } return result; }
java
@Override public void stage3SymbolVerification(final Document doc) throws SymbolWarning, IndexingFailure, ResourceDownloadError { final List<ResourceSyntaxWarning> exceptions = new ArrayList<ResourceSyntaxWarning>(); try { namespace.verify(doc); } catch (SymbolWarning e) { exceptions.addAll(e.getResourceSyntaxWarnings()); } if (exceptions.isEmpty()) { return; } final String name = doc.getName(); final String fmt = INVALID_SYMBOLS; final String msg = format(fmt, exceptions.size()); throw new SymbolWarning(name, msg, exceptions); }
python
async def block(self): """|coro| Blocks the user. .. note:: This only applies to non-bot accounts. Raises ------- Forbidden Not allowed to block this user. HTTPException Blocking the user failed. """ await self._state.http.add_relationship(self.id, type=RelationshipType.blocked.value)
java
public void marshall(EsamSignalProcessingNotification esamSignalProcessingNotification, ProtocolMarshaller protocolMarshaller) { if (esamSignalProcessingNotification == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(esamSignalProcessingNotification.getSccXml(), SCCXML_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public byte[] getData(ChronosClientWatcher chronosClientWatcher, String znode) throws IOException { byte[] data = null; for (int i = 0; i <= connectRetryTimes; i++) { try { data = chronosClientWatcher.getZooKeeper().getData(znode, null, null); break; } catch (Exception e) { LOG.info("Exceptioin to get data from ZooKeeper, retry " + i +" times"); if (i == connectRetryTimes) { throw new IOException("Error when getting data from " + znode + " after retrying"); } } } return data; }
python
def to_dict(mapreduce_yaml): """Converts a MapReduceYaml file into a JSON-encodable dictionary. For use in user-visible UI and internal methods for interfacing with user code (like param validation). as a list Args: mapreduce_yaml: The Pyton representation of the mapreduce.yaml document. Returns: A list of configuration dictionaries. """ all_configs = [] for config in mapreduce_yaml.mapreduce: out = { "name": config.name, "mapper_input_reader": config.mapper.input_reader, "mapper_handler": config.mapper.handler, } if config.mapper.params_validator: out["mapper_params_validator"] = config.mapper.params_validator if config.mapper.params: param_defaults = {} for param in config.mapper.params: param_defaults[param.name] = param.default or param.value out["mapper_params"] = param_defaults if config.params: param_defaults = {} for param in config.params: param_defaults[param.name] = param.default or param.value out["params"] = param_defaults if config.mapper.output_writer: out["mapper_output_writer"] = config.mapper.output_writer all_configs.append(out) return all_configs
python
def _svd_step(self, X, shrinkage_value, max_rank=None): """ Returns reconstructed X from low-rank thresholded SVD and the rank achieved. """ if max_rank: # if we have a max rank then perform the faster randomized SVD (U, s, V) = randomized_svd( X, max_rank, n_iter=self.n_power_iterations) else: # perform a full rank SVD using ARPACK (U, s, V) = np.linalg.svd( X, full_matrices=False, compute_uv=True) s_thresh = np.maximum(s - shrinkage_value, 0) rank = (s_thresh > 0).sum() s_thresh = s_thresh[:rank] U_thresh = U[:, :rank] V_thresh = V[:rank, :] S_thresh = np.diag(s_thresh) X_reconstruction = np.dot(U_thresh, np.dot(S_thresh, V_thresh)) return X_reconstruction, rank
python
def rc_channels_scaled_encode(self, time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi): ''' The scaled values of the RC channels received. (-100%) -10000, (0%) 0, (100%) 10000. Channels that are inactive should be set to UINT16_MAX. time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) port : Servo output port (set of 8 outputs = 1 port). Most MAVs will just use one, but this allows for more than 8 servos. (uint8_t) chan1_scaled : RC channel 1 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan2_scaled : RC channel 2 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan3_scaled : RC channel 3 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan4_scaled : RC channel 4 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan5_scaled : RC channel 5 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan6_scaled : RC channel 6 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan7_scaled : RC channel 7 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) chan8_scaled : RC channel 8 value scaled, (-100%) -10000, (0%) 0, (100%) 10000, (invalid) INT16_MAX. (int16_t) rssi : Receive signal strength indicator, 0: 0%, 100: 100%, 255: invalid/unknown. (uint8_t) ''' return MAVLink_rc_channels_scaled_message(time_boot_ms, port, chan1_scaled, chan2_scaled, chan3_scaled, chan4_scaled, chan5_scaled, chan6_scaled, chan7_scaled, chan8_scaled, rssi)
java
public String cleanAllXPackIndices() throws ElasticSearchException{ StringBuilder ret = new StringBuilder(); for(String monitor:monitorIndices) { try { ret.append(this.client.executeHttp(java.net.URLEncoder.encode(monitor, "UTF-8") + "?pretty", HTTP_DELETE)).append("\n"); } catch (Exception e){ ret.append(e.getMessage()).append("\n"); } } return ret.toString(); }
java
@Override public Predicate ge(Expression<? extends Number> arg0, Number arg1) { // TODO Auto-generated method stub return new ComparisonPredicate(arg0, arg1, ConditionalOperator.GTE); }
python
def addTextErr(self, text): """add red text""" self._currentColor = self._red self.addText(text)
java
public void setParts(java.util.Collection<PartListElement> parts) { if (parts == null) { this.parts = null; return; } this.parts = new java.util.ArrayList<PartListElement>(parts); }
java
public ChannelFuture bindAsync() { SocketAddress localAddress = (SocketAddress) getOption("localAddress"); if (localAddress == null) { throw new IllegalStateException("localAddress option is not set."); } return bindAsync(localAddress); }
python
def query(self, query_samples): """ Query docs with query_samples number of Gibbs sampling iterations. """ self.sampled_topics = np.zeros((self.samples, self.N), dtype=np.int) for s in range(self.samples): self.sampled_topics[s, :] = \ samplers_lda.sampler_query(self.docid, self.tokens, self.topic_seed, np.ascontiguousarray( self.tt[:, :, s], dtype=np.float), self.N, self.K, self.D, self.alpha, query_samples) print("Sample %d queried" % s) self.dt = np.zeros((self.D, self.K, self.samples)) for s in range(self.samples): self.dt[:, :, s] = \ samplers_lda.dt_comp(self.docid, self.sampled_topics[s, :], self.N, self.K, self.D, self.alpha)
java
private void updateEligibility() { checkState(Thread.holdsLock(root), "Must hold lock to update eligibility"); synchronized (root) { if (!parent.isPresent()) { return; } if (isEligibleToStartNext()) { parent.get().addOrUpdateSubGroup(this); } else { parent.get().eligibleSubGroups.remove(this); lastStartMillis = 0; } parent.get().updateEligibility(); } }
java
public void dumpRequest(final HttpServletRequest req) { try { Enumeration names = req.getHeaderNames(); String title = "Request headers"; debug(title); while (names.hasMoreElements()) { String key = (String)names.nextElement(); String val = req.getHeader(key); debug(" " + key + " = \"" + val + "\""); } names = req.getParameterNames(); title = "Request parameters"; debug(title + " - global info and uris"); debug("getRemoteAddr = " + req.getRemoteAddr()); debug("getRequestURI = " + req.getRequestURI()); debug("getRemoteUser = " + req.getRemoteUser()); debug("getRequestedSessionId = " + req.getRequestedSessionId()); debug("HttpUtils.getRequestURL(req) = " + req.getRequestURL()); debug("contextPath=" + req.getContextPath()); debug("query=" + req.getQueryString()); debug("contentlen=" + req.getContentLength()); debug("request=" + req); debug("parameters:"); debug(title); while (names.hasMoreElements()) { String key = (String)names.nextElement(); String val = req.getParameter(key); debug(" " + key + " = \"" + val + "\""); } } catch (Throwable t) { } }
java
protected <T extends ExecutableRule<?>> Status getStatus(T executableRule, List<String> columnNames, List<Map<String, Object>> rows, AnalyzerContext context) throws RuleException { return context.verify(executableRule, columnNames, rows); }
python
def copy_directory_structure(destination_directory, relative_path): """Create all the intermediate directories required for relative_path to exist within destination_directory. This assumes that relative_path is a directory located within root_dir. Examples: destination_directory: /tmp/destination relative_path: test/unit/ will create: /tmp/destination/test/unit Args: destination_directory (str): root of the destination directory where the directory structure will be created. relative_path (str): relative path that will be created within destination_directory """ full_path = os.path.join(destination_directory, relative_path) if os.path.exists(full_path): return os.makedirs(destination_directory, relative_path)
java
public void addMessageSelectorFactory(MessageSelectorFactory<?> factory) { if (factory instanceof BeanFactoryAware) { ((BeanFactoryAware) factory).setBeanFactory(beanFactory); } this.factories.add(factory); }
java
public Observable<SubnetInner> beginCreateOrUpdateAsync(String resourceGroupName, String virtualNetworkName, String subnetName, SubnetInner subnetParameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, virtualNetworkName, subnetName, subnetParameters).map(new Func1<ServiceResponse<SubnetInner>, SubnetInner>() { @Override public SubnetInner call(ServiceResponse<SubnetInner> response) { return response.body(); } }); }
python
def _path_to_value(cls, path, parent_dict): """Return a value from a dictionary at the given path""" keys = cls._path_to_keys(path) # Traverse to the tip of the path child_dict = parent_dict for key in keys[:-1]: child_dict = child_dict.get(key) if child_dict is None: return return child_dict.get(keys[-1])
python
def from_task(cls, task): """Create a new target representing a task and its parameters Args: task: Task instance to create target for; the task class has to inherit from :class:`ozelot.tasks.TaskBase`. Returns: ozelot.tasks.ORMTarget: a new target instance """ target = cls(name=task.get_name(), params=task.get_param_string()) return target
python
def delete(self, **kwargs): """ Used for deleting objects from the facebook graph. Just pass the id of the object to be deleted. But in case of like, have to pass the cat ("likes") and object id as a like has no id itself in the facebook graph """ if 'cat' not in kwargs.keys(): kwargs['cat']='' cat=kwargs['cat'] del kwargs['cat'] res=request.publish_cat1("DELETE", self.con, self.token, cat, kwargs) return res
python
def parse_list(text, off=0, trim=True): ''' Parse a list (likely for comp type) coming from a command line input. The string elements within the list may optionally be quoted. ''' if not nextchar(text, off, '('): raise s_exc.BadSyntax(at=off, mesg='expected open paren for list') off += 1 valus = [] while off < len(text): _, off = nom(text, off, whites) valu, off = parse_valu(text, off) _, off = nom(text, off, whites) # check for foo=bar kw tuple syntax if nextchar(text, off, '='): _, off = nom(text, off + 1, whites) vval, off = parse_valu(text, off) _, off = nom(text, off, whites) valu = (valu, vval) valus.append(valu) _, off = nom_whitespace(text, off) if nextchar(text, off, ')'): return valus, off + 1 if not nextchar(text, off, ','): raise s_exc.BadSyntax(at=off, text=text, mesg='expected comma in list') off += 1 raise s_exc.BadSyntax(at=off, mesg='unexpected and of text during list')
java
public static String digitToChinese(Number n) { if(null == n) { return "零"; } return NumberChineseFormater.format(n.doubleValue(), true, true); }
java
public HttpAuthServiceBuilder add(Iterable<? extends Authorizer<HttpRequest>> authorizers) { requireNonNull(authorizers, "authorizers"); authorizers.forEach(a -> { requireNonNull(a, "authorizers contains null."); add(a); }); return this; }
java
@SuppressWarnings("unused") public SwipeActionAdapter setDimBackgrounds(boolean dimBackgrounds){ this.mDimBackgrounds = dimBackgrounds; if(mListView != null) mTouchListener.setDimBackgrounds(dimBackgrounds); return this; }
java
protected void sendConfigureRequest(MemberState member, ConfigureRequest request) { logger.debug("{} - Configuring {}", context.getCluster().member().address(), member.getMember().address()); // Start the configure to the member. member.startConfigure(); context.getConnections().getConnection(member.getMember().serverAddress()).whenComplete((connection, error) -> { context.checkThread(); if (open) { if (error == null) { sendConfigureRequest(connection, member, request); } else { // Complete the configure to the member. member.completeConfigure(); // Trigger reactions to the request failure. handleConfigureRequestFailure(member, request, error); } } }); }
python
def set_settings(self, releases=None, default_release=None): """set path to storage""" if (self._storage is None or getattr(self, 'releases', {}) != releases or getattr(self, 'default_release', '') != default_release): self._storage = {} self.releases = releases or {} self.default_release = default_release
java
public void init(){ try { zk = new ZooKeeper(parent.getProperties().get(TeknekDaemon.ZK_SERVER_LIST).toString(), 30000, this); } catch (IOException e1) { throw new RuntimeException(e1); } Feed feed = DriverFactory.buildFeed(plan.getFeedDesc()); List<WorkerStatus> workerStatus; try { workerStatus = parent.getWorkerDao().findAllWorkerStatusForPlan(plan, otherWorkers); } catch (WorkerDaoException e1) { throw new RuntimeException(e1); } if (workerStatus.size() >= feed.getFeedPartitions().size()){ throw new RuntimeException("Number of running workers " + workerStatus.size()+" >= feed partitions " + feed.getFeedPartitions().size() +" plan should be fixed " +plan.getName()); } FeedPartition toProcess = findPartitionToProcess(workerStatus, feed.getFeedPartitions()); if (toProcess != null){ driver = DriverFactory.createDriver(toProcess, plan, parent.getMetricRegistry()); driver.initialize(); WorkerStatus iGotThis = new WorkerStatus(myId.toString(), toProcess.getPartitionId() , parent.getMyId()); try { parent.getWorkerDao().registerWorkerStatus(zk, plan, iGotThis); } catch (WorkerDaoException e) { throw new RuntimeException(e); } } else { throw new RuntimeException("Could not start plan "+plan.getName()); } }
java
public void importRtfFragment(InputStream documentSource, RtfImportMappings mappings, EventListener[] events ) throws IOException, DocumentException { if(!this.open) { throw new DocumentException("The document must be open to import RTF fragments."); } RtfParser rtfImport = new RtfParser(this.document); if(events != null) { for(int idx=0;idx<events.length;idx++) { rtfImport.addListener(events[idx]); } } rtfImport.importRtfFragment(documentSource, this.rtfDoc, mappings); }
java
public String getStateName() { CmsResourceState state = m_resource.getState(); String name; if (m_request == null) { name = org.opencms.workplace.explorer.Messages.get().getBundle().key( org.opencms.workplace.explorer.Messages.getStateKey(state)); } else { name = org.opencms.workplace.explorer.Messages.get().getBundle(m_request.getLocale()).key( org.opencms.workplace.explorer.Messages.getStateKey(state)); } return name; }
python
def make_serviceitem_servicedllsignatureexists(dll_sig_exists, condition='is', negate=False): """ Create a node for ServiceItem/serviceDLLSignatureExists :return: A IndicatorItem represented as an Element node """ document = 'ServiceItem' search = 'ServiceItem/serviceDLLSignatureExists' content_type = 'bool' content = dll_sig_exists ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate) return ii_node
java
public final void setToken(AnalyzedTokenReadings[] tokens, int index, int next) { int idx = index; if (index >= tokens.length) { // TODO: hacky workaround, find a proper solution. See EnglishPatternRuleTest.testBug() idx = tokens.length - 1; } setToken(tokens[idx]); IncludeRange includeSkipped = match.getIncludeSkipped(); if (next > 1 && includeSkipped != IncludeRange.NONE) { StringBuilder sb = new StringBuilder(); if (includeSkipped == IncludeRange.FOLLOWING) { formattedToken = null; } for (int k = index + 1; k < index + next; k++) { if (tokens[k].isWhitespaceBefore() && !(k == index + 1 && includeSkipped == IncludeRange.FOLLOWING)) { sb.append(' '); } sb.append(tokens[k].getToken()); } skippedTokens = sb.toString(); } else { skippedTokens = ""; } }
python
def get_node_by_name(graph, name): """Return a node ID given its name.""" for id, attrs in graph.nodes(data=True): if attrs['n'] == name: return id
java
public int getStateId(LR1ItemSet targetSet) throws GrammarException { for (int stateId = 0; stateId < itemSetCollection.size(); stateId++) { LR1ItemSet lr1ItemSet = itemSetCollection.get(stateId); if (lr1ItemSet.equals(targetSet)) { return stateId; } } throw new GrammarException("Target set '" + targetSet + "' was not found!"); }
java
@Override public CreatePullRequestResult createPullRequest(CreatePullRequestRequest request) { request = beforeClientExecution(request); return executeCreatePullRequest(request); }
java
public EClass getIfcFlowMovingDevice() { if (ifcFlowMovingDeviceEClass == null) { ifcFlowMovingDeviceEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(246); } return ifcFlowMovingDeviceEClass; }
python
def eratosthene(n): """Prime numbers by sieve of Eratosthene :param n: positive integer :assumes: n > 2 :returns: list of prime numbers <n :complexity: O(n loglog n) """ P = [True] * n answ = [2] for i in range(3, n, 2): if P[i]: answ.append(i) for j in range(2 * i, n, i): P[j] = False return answ
java
public String getSharedFileEndpoint(Object projectIdOrKey, Object sharedFileId) throws BacklogException { return buildEndpoint("projects/" + projectIdOrKey + "/files/" + sharedFileId); }
java
synchronized void renewSession(String prevSessionToken) throws SFException, SnowflakeSQLException { if (sessionToken != null && !sessionToken.equals(prevSessionToken)) { logger.debug("not renew session because session token has not been updated."); return; } SessionUtil.LoginInput loginInput = new SessionUtil.LoginInput(); loginInput.setServerUrl( (String) connectionPropertiesMap.get(SFSessionProperty.SERVER_URL)) .setSessionToken(sessionToken) .setMasterToken(masterToken) .setIdToken(idToken) .setLoginTimeout(loginTimeout) .setDatabaseName(this.getDatabase()) .setSchemaName(this.getSchema()) .setRole(this.getRole()) .setWarehouse(this.getWarehouse()); SessionUtil.LoginOutput loginOutput = SessionUtil.renewSession(loginInput); if (loginOutput.isUpdatedByTokenRequestIssue()) { setCurrentObjects(loginInput, loginOutput); } sessionToken = loginOutput.getSessionToken(); masterToken = loginOutput.getMasterToken(); }
java
@Nullable public static String getSessionID (@Nonnull final String sValue) { ValueEnforcer.notNull (sValue, "Value"); // Get the session ID parameter final int nIndex = sValue.indexOf (';'); return nIndex == -1 ? null : sValue.substring (nIndex + 1); }
python
def state(self, state): """Set enum metric state.""" with self._lock: self._value = self._states.index(state)
python
def _ItemsToUrns(self, items): """Converts collection items to aff4 urns suitable for downloading.""" for item in items: try: yield flow_export.CollectionItemToAff4Path(item, self.client_id) except flow_export.ItemNotExportableError: pass