language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def update_metadata_filters(metadata, jupyter_md, cell_metadata): """Update or set the notebook and cell metadata filters""" cell_metadata = [m for m in cell_metadata if m not in ['language', 'magic_args']] if 'cell_metadata_filter' in metadata.get('jupytext', {}): metadata_filter = metadata_filter_as_dict(metadata.get('jupytext', {})['cell_metadata_filter']) if isinstance(metadata_filter.get('excluded'), list): metadata_filter['excluded'] = [key for key in metadata_filter['excluded'] if key not in cell_metadata] metadata_filter.setdefault('additional', []) if isinstance(metadata_filter.get('additional'), list): for key in cell_metadata: if key not in metadata_filter['additional']: metadata_filter['additional'].append(key) metadata.setdefault('jupytext', {})['cell_metadata_filter'] = metadata_filter_as_string(metadata_filter) if not jupyter_md: # Set a metadata filter equal to the current metadata in script cell_metadata = {'additional': cell_metadata, 'excluded': 'all'} metadata.setdefault('jupytext', {})['notebook_metadata_filter'] = '-all' metadata.setdefault('jupytext', {})['cell_metadata_filter'] = metadata_filter_as_string(cell_metadata)
python
def parse(self, xml_file, view_name=None) -> XmlNode: """Parses xml file with xml_path and returns XmlNode""" self._setup_parser() try: self._view_name = view_name self._parser.ParseFile(xml_file) except ExpatError as error: # pylint: disable=E1101 raise XmlError(errors.messages[error.code], ViewInfo(view_name, error.lineno)) root = self._root self._reset() return root
java
protected PListAccessor getInfoPListAccessor(XCodeContext.SourceCodeLocation location, String configuration, String sdk) throws MojoExecutionException, XCodeException { File plistFile = getPListFile(location, configuration, sdk); if (!plistFile.isFile()) { throw new MojoExecutionException("The Xcode project refers to the Info.plist file '" + plistFile + "' that does not exist."); } return new PListAccessor(plistFile); }
python
def send_notification(self, user, sender=None, **kwargs): """ An intermediary function for sending an notification email informing a pre-existing, active user that they have been added to a new organization. """ if not user.is_active: return False self.email_message( user, self.notification_subject, self.notification_body, sender, **kwargs ).send() return True
java
public static void main(String args[]) { // Here's an example of how to use JSONEmitter. This produces the following JSON: // {"doc": { // "fields": [ // {"Title": "Zen and the Art of Motorcycle Maintenance"}, // {"Amazon-Link": "http://www.amazon.com/"}, // {"Author": "Robert Pirsig"}, // {"Publisher": "Harper Perennial Modern Classics"} // ] // }} // Note: Not necessarily correct Doradus syntax! JSONEmitter json = new JSONEmitter(); json.startDocument() .startGroup("doc") .startArray("fields") .addObject("Title", "Zen and the Art of Motorcycle Maintenance") .addObject("Amazon-Link", "http://www.amazon.com/") .addObject("Author", "Robert\tPirsig") .addObject("Publisher", "Harper Perennial Modern Classics\r\n") .endArray() // fields .endGroup() // doc .endDocument(); String text = json.toString(); System.out.println(text); }
python
def base_ws_uri(): """Base websocket URL that is advertised to external clients. Useful when the websocket URL advertised to the clients needs to be customized (typically when running behind NAT, firewall, etc.) """ scheme = config['wsserver']['advertised_scheme'] host = config['wsserver']['advertised_host'] port = config['wsserver']['advertised_port'] return '{}://{}:{}'.format(scheme, host, port)
python
def logout(request, next_page=None): """ Redirects to CAS logout page :param: request RequestObj :param: next_page Page to redirect to """ auth.logout(request) if not next_page: next_page = _redirect_url(request) if settings.CAS_LOGOUT_COMPLETELY: return HttpResponseRedirect(_logout_url(request, next_page)) else: return HttpResponseRedirect(next_page)
java
public static <T> T findValue(ServiceRegistry registry, ServiceName name) { ServiceController<T> service = findService(registry, name); return ((service != null) && (service.getState() == State.UP)) ? service.getValue() : null; }
python
def hash_edge(source, target, edge_data: EdgeData) -> str: """Convert an edge tuple to a SHA-512 hash. :param BaseEntity source: The source BEL node :param BaseEntity target: The target BEL node :param edge_data: The edge's data dictionary :return: A hashed version of the edge tuple using md5 hash of the binary pickle dump of u, v, and the json dump of d """ edge_tuple = _get_edge_tuple(source, target, edge_data) return _hash_tuple(edge_tuple)
python
def _extract_attrs(x, n): """Extracts attributes for an image. n is the index where the attributes begin. Extracted elements are deleted from the element list x. Attrs are returned in pandoc format. """ try: return extract_attrs(x, n) except (ValueError, IndexError): if PANDOCVERSION < '1.16': # Look for attributes attached to the image path, as occurs with # image references for pandoc < 1.16 (pandoc-fignos Issue #14). # See http://pandoc.org/MANUAL.html#images for the syntax. # Note: This code does not handle the "optional title" for # image references (search for link_attributes in pandoc's docs). assert x[n-1]['t'] == 'Image' image = x[n-1] s = image['c'][-1][0] if '%20%7B' in s: path = s[:s.index('%20%7B')] attrs = unquote(s[s.index('%7B'):]) image['c'][-1][0] = path # Remove attr string from the path return PandocAttributes(attrs.strip(), 'markdown').to_pandoc() raise
python
async def requirements(client: Client, search: str) -> dict: """ GET list of requirements for a given UID/Public key :param client: Client to connect to the api :param search: UID or public key :return: """ return await client.get(MODULE + '/requirements/%s' % search, schema=REQUIREMENTS_SCHEMA)
python
def normal_var(data, mean): """ Creates a segment cost function for a time series with a Normal distribution with changing variance Args: data (:obj:`list` of float): 1D time series data variance (float): variance Returns: function: Function with signature (int, int) -> float where the first arg is the starting index, and the second is the last arg. Returns the cost of that segment """ if not isinstance(data, np.ndarray): data = np.array(data) cumm = [0.0] cumm.extend(np.cumsum(np.power(np.abs(data - mean), 2))) def cost(s, t): """ Cost function for normal distribution with variable variance Args: start (int): start index end (int): end index Returns: float: Cost, from start to end """ dist = float(t - s) diff = cumm[t] - cumm[s] return dist * np.log(diff/dist) return cost
python
def view_history(name, gitref): """Serve a page name from git repo (an old version of a page). .. note:: this is a bottle view * this is a GET only method : you can not change a committed page Keyword Arguments: :name: (str) -- name of the rest file (without the .rst extension) :gitref: (str) -- hexsha of the git commit to look into Returns: bottle response object or 404 error page """ response.set_header('Cache-control', 'no-cache') response.set_header('Pragma', 'no-cache') content = read_committed_file(gitref, name + '.rst') if content: html_body = publish_parts(content, writer=AttowikiWriter(), settings=None, settings_overrides=None)['html_body'] history = commit_history(name + '.rst') return template('page', type="history", name=name, extended_name=None, is_repo=check_repo(), history=history, gitref=gitref, content=html_body) else: return abort(404)
java
protected void validate(String operationType) throws Exception { super.validate(operationType); MPSInt ns_vlan_id_validator = new MPSInt(); ns_vlan_id_validator.setConstraintMinValue(MPSConstants.GENERIC_CONSTRAINT, 0); ns_vlan_id_validator.setConstraintMaxValue(MPSConstants.GENERIC_CONSTRAINT, 4095); ns_vlan_id_validator.validate(operationType, ns_vlan_id, "\"ns_vlan_id\""); MPSString ns_vlan_ip_address_validator = new MPSString(); ns_vlan_ip_address_validator.setConstraintMaxStrLen(MPSConstants.GENERIC_CONSTRAINT, 64); ns_vlan_ip_address_validator.setConstraintMinStrLen(MPSConstants.GENERIC_CONSTRAINT, 1); ns_vlan_ip_address_validator.validate(operationType, ns_vlan_ip_address, "\"ns_vlan_ip_address\""); MPSString if_ipv6_routing_validator = new MPSString(); if_ipv6_routing_validator.validate(operationType, if_ipv6_routing, "\"if_ipv6_routing\""); }
python
def stop(self): """Stops logging and closes the file.""" self._flush() filesize = self.file.tell() super(BLFWriter, self).stop() # Write header in the beginning of the file header = [b"LOGG", FILE_HEADER_SIZE, APPLICATION_ID, 0, 0, 0, 2, 6, 8, 1] # The meaning of "count of objects read" is unknown header.extend([filesize, self.uncompressed_size, self.count_of_objects, 0]) header.extend(timestamp_to_systemtime(self.start_timestamp)) header.extend(timestamp_to_systemtime(self.stop_timestamp)) with open(self.file.name, "r+b") as f: f.write(FILE_HEADER_STRUCT.pack(*header))
java
public List<String> getLoggerNames() { return Collections.list(java.util.logging.LogManager.getLogManager().getLoggerNames()); }
java
public com.google.api.ads.admanager.axis.v201811.DateTime getStartTime() { return startTime; }
java
public static Integer valueOf(String s, int radix) throws NumberFormatException { return Integer.valueOf(parseInt(s,radix)); }
java
public static AccountRegisterResult accountRegister(String accessToken, AccountRegister accountRegister) { return accountRegister(accessToken, JsonUtil.toJSONString(accountRegister)); }
python
def create_intent(project_id, display_name, training_phrases_parts, message_texts): """Create an intent of the given intent type.""" import dialogflow_v2 as dialogflow intents_client = dialogflow.IntentsClient() parent = intents_client.project_agent_path(project_id) training_phrases = [] for training_phrases_part in training_phrases_parts: part = dialogflow.types.Intent.TrainingPhrase.Part( text=training_phrases_part) # Here we create a new training phrase for each provided part. training_phrase = dialogflow.types.Intent.TrainingPhrase(parts=[part]) training_phrases.append(training_phrase) text = dialogflow.types.Intent.Message.Text(text=message_texts) message = dialogflow.types.Intent.Message(text=text) intent = dialogflow.types.Intent( display_name=display_name, training_phrases=training_phrases, messages=[message]) response = intents_client.create_intent(parent, intent) print('Intent created: {}'.format(response))
java
public static void main(String[] args) throws Exception { int mb = 1024*1024; //Getting the runtime reference from system Runtime runtime = Runtime.getRuntime(); System.out.println("##### Heap utilization statistics [MB] #####"); //Print used memory System.out.println("Used Memory:" + (runtime.totalMemory() - runtime.freeMemory()) / mb); //Print free memory System.out.println("Free Memory:" + runtime.freeMemory() / mb); //Print total available memory System.out.println("Total Memory:" + runtime.totalMemory() / mb); //Print Maximum available memory System.out.println("Max Memory:" + runtime.maxMemory() / mb); if ( args.length < 1) { System.err.println("First argument needs to be path to fasta file"); return; } File f = new File(args[0]); if ( ! f.exists()) { System.err.println("File does not exist " + args[0]); return; } long timeS = System.currentTimeMillis(); // automatically uncompress files using InputStreamProvider InputStreamProvider isp = new InputStreamProvider(); InputStream inStream = isp.getInputStream(f); FastaReader<ProteinSequence, AminoAcidCompound> fastaReader = new FastaReader<ProteinSequence, AminoAcidCompound>( inStream, new GenericFastaHeaderParser<ProteinSequence, AminoAcidCompound>(), new ProteinSequenceCreator(AminoAcidCompoundSet.getAminoAcidCompoundSet())); LinkedHashMap<String, ProteinSequence> b; int nrSeq = 0; while ((b = fastaReader.process(100)) != null) { for (String key : b.keySet()) { nrSeq++; System.out.println(nrSeq + " : " + key + " " + b.get(key)); if ( nrSeq % 100000 == 0) System.out.println(nrSeq ); } } long timeE = System.currentTimeMillis(); System.out.println("parsed a total of " + nrSeq + " TREMBL sequences! in " + (timeE - timeS)); }
java
public void xdsl_setting_POST(Boolean resellerFastModemShipping, Boolean resellerModemBasicConfig) throws IOException { String qPath = "/me/xdsl/setting"; StringBuilder sb = path(qPath); HashMap<String, Object>o = new HashMap<String, Object>(); addBody(o, "resellerFastModemShipping", resellerFastModemShipping); addBody(o, "resellerModemBasicConfig", resellerModemBasicConfig); exec(qPath, "POST", sb.toString(), o); }
java
private TaskDef getTaskDefFromDB(String name) { final String READ_ONE_TASKDEF_QUERY = "SELECT json_data FROM meta_task_def WHERE name = ?"; return queryWithTransaction(READ_ONE_TASKDEF_QUERY, q -> q.addParameter(name).executeAndFetchFirst(TaskDef.class)); }
java
public com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult getKMapEstimationResult() { if (resultCase_ == 7) { return (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult) result_; } return com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult .getDefaultInstance(); }
python
def export(self, location): """Export the Bazaar repository at the url to the destination location""" temp_dir = tempfile.mkdtemp('-export', 'pip-') self.unpack(temp_dir) if os.path.exists(location): # Remove the location to make sure Bazaar can export it correctly rmtree(location) try: call_subprocess([self.cmd, 'export', location], cwd=temp_dir, filter_stdout=self._filter, show_stdout=False) finally: rmtree(temp_dir)
python
def __is_function_action(self, action_function): """ Detect if given function is really an action function. Args: action_function: Function to test. Note: We don't care if the variable refer to a function but rather if it is callable or not. """ # test if function returns a couple of values is_function_action = True if not hasattr(action_function, '__call__'): return False # OK, callable. Do we receive the right arguments? try: for end_string, context in action_function(): if not isinstance(end_string, basestring): self.log_error("Action function must return end of filename as a string as first argument") if not isinstance(context, dict): self.log_error("Action function must return context as a dict as second argument") break except Exception: is_function_action = False return is_function_action
python
def get_output_stream(encoding=anytemplate.compat.ENCODING, ostream=sys.stdout): """ Get output stream take care of characters encoding correctly. :param ostream: Output stream (file-like object); sys.stdout by default :param encoding: Characters set encoding, e.g. UTF-8 :return: sys.stdout can output encoded strings >>> get_output_stream("UTF-8") # doctest: +ELLIPSIS <encodings.utf_8.StreamWriter ... at 0x...> """ return codecs.getwriter(encoding)(ostream)
java
@Nullable public Capabilities getCapabilities() { MutableCapabilities capabilities = (MutableCapabilities) super.getCapabilities(); if (capabilities != null) { capabilities.setCapability(PLATFORM_NAME, IOS_PLATFORM); } return capabilities; }
python
def shared_prefix(args): """ Find the shared prefix between the strings. For instance: sharedPrefix(['blahblah', 'blahwhat']) returns 'blah'. """ i = 0 while i < min(map(len, args)): if len(set(map(operator.itemgetter(i), args))) != 1: break i += 1 return args[0][:i]
java
public static Bitmap loadBitmapOptimized(Uri uri, Context context, int limit) throws ImageLoadException { return loadBitmapOptimized(new UriSource(uri, context) { }, limit); }
java
@Override public SIBusMessage receiveWithWait(SITransaction siTran, long timeout) throws SISessionUnavailableException, SISessionDroppedException, SIResourceException, SIConnectionLostException, SILimitExceededException, SIErrorException, SIIncorrectCallException { if (TraceComponent.isAnyTracingEnabled() && CoreSPIConsumerSession.tc.isEntryEnabled()) SibTr.entry( CoreSPIConsumerSession.tc, "receiveWithWait", new Object[] { this, new Long(timeout), siTran }); checkTransaction(siTran, "TRANSACTION_RECEIVE_USAGE_ERROR_CWSIP0777"); SIBusMessage jsMsg = null; boolean sucessful = false; //Try to receive a message from the LCP try { jsMsg = _localConsumerPoint.receive(timeout, (TransactionCommon) siTran); sucessful = true; } catch (SINotPossibleInCurrentConfigurationException e) { // No FFDC code needed. // This case indicates that the destination has changed in some way // Probably receiveAllowed = false. if (TraceComponent.isAnyTracingEnabled() && CoreSPIConsumerSession.tc.isEntryEnabled()) SibTr.exit(CoreSPIConsumerSession.tc, "receiveWithWait", "SISessionUnavailableException"); throw new SISessionUnavailableException( nls.getFormattedMessage( "CONSUMER_CLOSED_ERROR_CWSIP0177", new Object[] { _destAddr.getDestinationName(), _messageProcessor.getMessagingEngineName() }, null)); } if (TraceComponent.isAnyTracingEnabled() && UserTrace.tc_mt.isDebugEnabled()) UserTrace.trace_Receive(siTran, (JsMessage) jsMsg, _destAddr, getIdInternal()); if (TraceComponent.isAnyTracingEnabled() && CoreSPIConsumerSession.tc.isEntryEnabled()) SibTr.exit(CoreSPIConsumerSession.tc, "receiveWithWait", jsMsg); //return the message or null return jsMsg; }
java
private final void checkCompatibility(VariableNumMap other) { int i = 0, j = 0; int[] otherNums = other.nums; String[] otherNames = other.names; Variable[] otherVars = other.vars; while (i < nums.length && j < otherNums.length) { if (nums[i] < otherNums[j]) { i++; } else if (nums[i] > otherNums[j]) { j++; } else { // Equal Preconditions.checkArgument(names[i].equals(otherNames[j])); Preconditions.checkArgument(vars[i].getName().equals(otherVars[j].getName())); i++; j++; } } }
java
@Override public void clearCache( CPDefinitionVirtualSetting cpDefinitionVirtualSetting) { entityCache.removeResult(CPDefinitionVirtualSettingModelImpl.ENTITY_CACHE_ENABLED, CPDefinitionVirtualSettingImpl.class, cpDefinitionVirtualSetting.getPrimaryKey()); finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITH_PAGINATION); finderCache.clearCache(FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION); clearUniqueFindersCache((CPDefinitionVirtualSettingModelImpl)cpDefinitionVirtualSetting, true); }
python
def generic_visit(self, node, *args, **kwargs): """Called if no explicit visitor function exists for a node.""" for node in node.iter_child_nodes(): self.visit(node, *args, **kwargs)
python
def lineReceived(self, line): """ Called when a line is received. We expect a length in bytes or an empty line for keep-alive. If we got a length, switch to raw mode to receive that amount of bytes. """ if line and line.isdigit(): self._expectedLength = int(line) self._rawBuffer = [] self._rawBufferLength = 0 self.setRawMode() else: self.keepAliveReceived()
java
public void printStackTrace() { if (detail == null) { super.printStackTrace(); } else { synchronized(System.err) { System.err.println(super.getMessage() + "; nested exception is:"); detail.printStackTrace(); } } }
java
public void constrainViewport(float left, float top, float right, float bottom) { if (right - left < minViewportWidth) { // Minimum width - constrain horizontal zoom! right = left + minViewportWidth; if (left < maxViewport.left) { left = maxViewport.left; right = left + minViewportWidth; } else if (right > maxViewport.right) { right = maxViewport.right; left = right - minViewportWidth; } } if (top - bottom < minViewportHeight) { // Minimum height - constrain vertical zoom! bottom = top - minViewportHeight; if (top > maxViewport.top) { top = maxViewport.top; bottom = top - minViewportHeight; } else if (bottom < maxViewport.bottom) { bottom = maxViewport.bottom; top = bottom + minViewportHeight; } } currentViewport.left = Math.max(maxViewport.left, left); currentViewport.top = Math.min(maxViewport.top, top); currentViewport.right = Math.min(maxViewport.right, right); currentViewport.bottom = Math.max(maxViewport.bottom, bottom); viewportChangeListener.onViewportChanged(currentViewport); }
python
def orbit_on_path(self, path=None, focus=None, step=0.5, viewup=None, bkg=True): """Orbit on the given path focusing on the focus point Parameters ---------- path : vtki.PolyData Path of orbital points. The order in the points is the order of travel focus : list(float) of length 3, optional The point ot focus the camera. step : float, optional The timestep between flying to each camera position viewup : list(float) the normal to the orbital plane """ if focus is None: focus = self.center if viewup is None: viewup = rcParams['camera']['viewup'] if path is None: path = self.generate_orbital_path(viewup=viewup) if not is_vtki_obj(path): path = vtki.PolyData(path) points = path.points def orbit(): """Internal thread for running the orbit""" for point in points: self.set_position(point) self.set_focus(focus) self.set_viewup(viewup) time.sleep(step) if bkg: thread = Thread(target=orbit) thread.start() else: orbit() return
java
private UnicodeSet applyFilter(Filter filter, int src) { // Logically, walk through all Unicode characters, noting the start // and end of each range for which filter.contain(c) is // true. Add each range to a set. // // To improve performance, use an inclusions set which // encodes information about character ranges that are known // to have identical properties. // getInclusions(src) contains exactly the first characters of // same-value ranges for the given properties "source". clear(); int startHasProperty = -1; UnicodeSet inclusions = getInclusions(src); int limitRange = inclusions.getRangeCount(); for (int j=0; j<limitRange; ++j) { // get current range int start = inclusions.getRangeStart(j); int end = inclusions.getRangeEnd(j); // for all the code points in the range, process for (int ch = start; ch <= end; ++ch) { // only add to the unicodeset on inflection points -- // where the hasProperty value changes to false if (filter.contains(ch)) { if (startHasProperty < 0) { startHasProperty = ch; } } else if (startHasProperty >= 0) { add_unchecked(startHasProperty, ch-1); startHasProperty = -1; } } } if (startHasProperty >= 0) { add_unchecked(startHasProperty, 0x10FFFF); } return this; }
python
def fix_bam_header(job, bamfile, sample_type, univ_options, samtools_options, retained_chroms=None): """ Fix the bam header to remove the command line call. Failing to do this causes Picard to reject the bam. :param dict bamfile: The input bam file :param str sample_type: Description of the sample to inject into the filename :param dict univ_options: Dict of universal options used by almost all tools :param dict samtools_options: Options specific to samtools :param list retained_chroms: A list of chromosomes to retain :return: fsID for the output bam :rtype: toil.fileStore.FileID """ if retained_chroms is None: retained_chroms = [] work_dir = os.getcwd() input_files = { sample_type + '.bam': bamfile} input_files = get_files_from_filestore(job, input_files, work_dir, docker=True) parameters = ['view', '-H', input_files[sample_type + '.bam']] with open('/'.join([work_dir, sample_type + '_input_bam.header']), 'w') as headerfile: docker_call(tool='samtools', tool_parameters=parameters, work_dir=work_dir, dockerhub=univ_options['dockerhub'], outfile=headerfile, tool_version=samtools_options['version']) with open(headerfile.name, 'r') as headerfile, \ open('/'.join([work_dir, sample_type + '_output_bam.header']), 'w') as outheaderfile: for line in headerfile: if line.startswith('@PG'): line = '\t'.join([x for x in line.strip().split('\t') if not x.startswith('CL')]) if retained_chroms and line.startswith('@SQ'): if line.strip().split()[1].lstrip('SN:') not in retained_chroms: continue print(line.strip(), file=outheaderfile) parameters = ['reheader', docker_path(outheaderfile.name), input_files[sample_type + '.bam']] with open('/'.join([work_dir, sample_type + '_fixPG.bam']), 'w') as fixpg_bamfile: docker_call(tool='samtools', tool_parameters=parameters, work_dir=work_dir, dockerhub=univ_options['dockerhub'], outfile=fixpg_bamfile, tool_version=samtools_options['version']) output_file = job.fileStore.writeGlobalFile(fixpg_bamfile.name) # The old bam file is now useless. job.fileStore.deleteGlobalFile(bamfile) job.fileStore.logToMaster('Ran reheader on %s:%s successfully' % (univ_options['patient'], sample_type)) return output_file
python
def flatten(value): """value can be any nesting of tuples, arrays, dicts. returns 1D numpy array and an unflatten function.""" if isinstance(value, np.ndarray): def unflatten(vector): return np.reshape(vector, value.shape) return np.ravel(value), unflatten elif isinstance(value, float): return np.array([value]), lambda x: x[0] elif isinstance(value, tuple): if not value: return np.array([]), lambda x: () flattened_first, unflatten_first = flatten(value[0]) flattened_rest, unflatten_rest = flatten(value[1:]) def unflatten(vector): N = len(flattened_first) return (unflatten_first(vector[:N]),) + unflatten_rest(vector[N:]) return np.concatenate((flattened_first, flattened_rest)), unflatten elif isinstance(value, list): if not value: return np.array([]), lambda x: [] flattened_first, unflatten_first = flatten(value[0]) flattened_rest, unflatten_rest = flatten(value[1:]) def unflatten(vector): N = len(flattened_first) return [unflatten_first(vector[:N])] + unflatten_rest(vector[N:]) return np.concatenate((flattened_first, flattened_rest)), unflatten elif isinstance(value, dict): flattened = [] unflatteners = [] lengths = [] keys = [] for k, v in sorted(value.items(), key=itemgetter(0)): cur_flattened, cur_unflatten = flatten(v) flattened.append(cur_flattened) unflatteners.append(cur_unflatten) lengths.append(len(cur_flattened)) keys.append(k) def unflatten(vector): split_ixs = np.cumsum(lengths) pieces = np.split(vector, split_ixs) return {key: unflattener(piece) for piece, unflattener, key in zip(pieces, unflatteners, keys)} return np.concatenate(flattened), unflatten else: raise Exception("Don't know how to flatten type {}".format(type(value)) )
java
private JMenu getSuperMenu (String name, int index) { JMenu superMenu = superMenus.get(name); if (superMenu == null) { // Use an ExtensionPopupMenu so child menus are dismissed superMenu = new ExtensionPopupMenu(name) { private static final long serialVersionUID = 6825880451078204378L; @Override public boolean isEnableForComponent(Component invoker) { return true; } }; superMenus.put(name, superMenu); addMenuItem(superMenu, index); } return superMenu; }
java
public static void addProvidersToPathHandler(PathResourceProvider[] pathResourceProviders, PathHandler pathHandler) { if (pathResourceProviders != null && pathResourceProviders.length > 0) { for (PathResourceProvider pathResourceProvider : pathResourceProviders) { if (pathResourceProvider.isPrefixPath()) { pathHandler.addPrefixPath(pathResourceProvider.getPath(), new ResourceHandler(pathResourceProvider.getResourceManager())); } else { pathHandler.addExactPath(pathResourceProvider.getPath(), new ResourceHandler(pathResourceProvider.getResourceManager())); } } } }
python
def get_analysis_intervals(data, vrn_file, base_dir): """Retrieve analysis regions for the current variant calling pipeline. """ from bcbio.bam import callable if vrn_file and vcfutils.is_gvcf_file(vrn_file): callable_bed = _callable_from_gvcf(data, vrn_file, base_dir) if callable_bed: return callable_bed if data.get("ensemble_bed"): return data["ensemble_bed"] elif dd.get_sample_callable(data): return dd.get_sample_callable(data) elif data.get("align_bam"): return callable.sample_callable_bed(data["align_bam"], dd.get_ref_file(data), data)[0] elif data.get("work_bam"): return callable.sample_callable_bed(data["work_bam"], dd.get_ref_file(data), data)[0] elif data.get("work_bam_callable"): data = utils.deepish_copy(data) data["work_bam"] = data.pop("work_bam_callable") return callable.sample_callable_bed(data["work_bam"], dd.get_ref_file(data), data)[0] elif tz.get_in(["config", "algorithm", "callable_regions"], data): return tz.get_in(["config", "algorithm", "callable_regions"], data) elif tz.get_in(["config", "algorithm", "variant_regions"], data): return tz.get_in(["config", "algorithm", "variant_regions"], data)
java
public static PauseStatus determinePauseStatus(TransferState transferState, boolean forceCancel) { if (forceCancel) { if (transferState == TransferState.Waiting) { return PauseStatus.CANCELLED_BEFORE_START; } else if (transferState == TransferState.InProgress) { return PauseStatus.CANCELLED; } } if (transferState == TransferState.Waiting) { return PauseStatus.NOT_STARTED; } return PauseStatus.NO_EFFECT; }
python
def run_command(nova_creds, nova_args, supernova_args): """ Sets the environment variables for the executable, runs the executable, and handles the output. """ nova_env = supernova_args['nova_env'] # (gtmanfred) make a copy of this object. If we don't copy it, the insert # to 0 happens multiple times because it is the same object in memory. nova_args = copy.copy(nova_args) # Get the environment variables ready env_vars = os.environ.copy() env_vars.update(credentials.prep_shell_environment(nova_env, nova_creds)) # BYPASS_URL is a weird one, so we need to send it as an argument, # not an environment variable. nova_args = check_for_bypass_url(nova_creds[nova_env], nova_args) # Check for OS_EXECUTABLE supernova_args = check_for_executable(supernova_args, env_vars) # Check for a debug override nova_args = check_for_debug(supernova_args, nova_args) # Print a small message for the user (very helpful for groups) msg = "Running %s against %s..." % (supernova_args.get('executable'), nova_env) if not supernova_args.get('quiet'): click.echo("[%s] %s " % (click.style('SUPERNOVA', fg='green'), msg)) # Call executable and connect stdout to the current terminal # so that any unicode characters from the executable's list will be # displayed appropriately. # # In other news, I hate how python 2.6 does unicode. nova_args.insert(0, supernova_args['executable']) nova_args = [nova_arg.strip() for nova_arg in nova_args] process = execute_executable(nova_args, env_vars) # If the user asked us to be quiet, then let's not print stderr if not supernova_args.get('quiet'): handle_stderr(process.stderr) return process.returncode
python
def iau2000a(jd_tt): """Compute Earth nutation based on the IAU 2000A nutation model. `jd_tt` - Terrestrial Time: Julian date float, or NumPy array of floats Returns a tuple ``(delta_psi, delta_epsilon)`` measured in tenths of a micro-arcsecond. Each value is either a float, or a NumPy array with the same dimensions as the input argument. """ # Interval between fundamental epoch J2000.0 and given date. t = (jd_tt - T0) / 36525.0 # Compute fundamental arguments from Simon et al. (1994), in radians. a = fundamental_arguments(t) # ** Luni-solar nutation ** # Summation of luni-solar nutation series (in reverse order). arg = nals_t.dot(a) fmod(arg, tau, out=arg) sarg = sin(arg) carg = cos(arg) stsc = array((sarg, t * sarg, carg)).T ctcs = array((carg, t * carg, sarg)).T dpsi = tensordot(stsc, lunisolar_longitude_coefficients) deps = tensordot(ctcs, lunisolar_obliquity_coefficients) # Compute and add in planetary components. if getattr(t, 'shape', ()) == (): a = t * anomaly_coefficient + anomaly_constant else: a = (outer(anomaly_coefficient, t).T + anomaly_constant).T a[-1] *= t fmod(a, tau, out=a) arg = napl_t.dot(a) fmod(arg, tau, out=arg) sc = array((sin(arg), cos(arg))).T dpsi += tensordot(sc, nutation_coefficients_longitude) deps += tensordot(sc, nutation_coefficients_obliquity) return dpsi, deps
python
def do_some_work(self, can_start_more): '''Run one cycle of the main loop. If the log child has died, restart it. If any of the worker children have died, collect their status codes and remove them from the child set. If there is a worker slot available, start exactly one child. :param bool can_start_more: Allowed to start a child? :return: Time to wait before calling this function again ''' any_happy_children = False any_sad_children = False any_bored_children = False self.debug('loop', 'starting work loop, can_start_more={0!r}' .format(can_start_more)) # See if anyone has died while True: try: pid, status = os.waitpid(-1, os.WNOHANG) except OSError, e: if e.errno == errno.ECHILD: # No children at all pid = 0 else: raise if pid == 0: break elif pid == self.log_child: self.debug('children', 'log child with pid {0} exited'.format(pid)) self.start_log_child() elif pid in self.children: self.children.remove(pid) if os.WIFEXITED(status): code = os.WEXITSTATUS(status) self.debug('children', 'worker {0} exited with code {1}' .format(pid, code)) if code == SingleWorker.EXIT_SUCCESS: any_happy_children = True elif code == SingleWorker.EXIT_EXCEPTION: self.log(logging.WARNING, 'child {0} reported failure'.format(pid)) any_sad_children = True elif code == SingleWorker.EXIT_BORED: any_bored_children = True else: self.log(logging.WARNING, 'child {0} had odd exit code {1}' .format(pid, code)) elif os.WIFSIGNALED(status): self.log(logging.WARNING, 'child {0} exited with signal {1}' .format(pid, os.WTERMSIG(status))) any_sad_children = True else: self.log(logging.WARNING, 'child {0} went away with unknown status {1}' .format(pid, status)) any_sad_children = True else: self.log(logging.WARNING, 'child {0} exited, but we don\'t recognize it' .format(pid)) # ...what next? # (Don't log anything here; either we logged a WARNING message # above when things went badly, or we're in a very normal flow # and don't want to spam the log) if any_sad_children: self.debug('loop', 'exit work loop with sad child') return self.poll_interval if any_bored_children: self.debug('loop', 'exit work loop with no work') return self.poll_interval # This means we get to start a child, maybe. if can_start_more and len(self.children) < self.num_workers: pid = os.fork() if pid == 0: # We are the child self.clear_signal_handlers() if self.log_fd: os.close(self.log_fd) LoopWorker.as_child(yakonfig.get_global_config(), parent=self.worker_id) # This should never return, but just in case sys.exit(SingleWorker.EXIT_EXCEPTION) else: # We are the parent self.debug('children', 'new worker with pid {0}'.format(pid)) self.children.add(pid) self.debug('loop', 'exit work loop with a new worker') return self.spawn_interval # Absolutely nothing is happening; which means we have all # of our potential workers and they're doing work self.debug('loop', 'exit work loop with full system') return self.poll_interval
java
public static double toGrowthRateFromAnnualReturn(double annualReturn, CalendarDateUnit growthRateUnit) { double tmpAnnualGrowthRate = PrimitiveMath.LOG1P.invoke(annualReturn); double tmpYearsPerGrowthRateUnit = CalendarDateUnit.YEAR.convert(growthRateUnit); return tmpAnnualGrowthRate * tmpYearsPerGrowthRateUnit; }
java
public ConfigurationUpdate removePath(String path) { if (path == null || !path.startsWith("/")) { throw new IllegalArgumentException("Path must start with \"/\"."); } paths.put(path, null); return this; }
python
def compose(layers, bbox=None, layer_filter=None, color=None, **kwargs): """ Compose layers to a single :py:class:`PIL.Image`. If the layers do not have visible pixels, the function returns `None`. Example:: image = compose([layer1, layer2]) In order to skip some layers, pass `layer_filter` function which should take `layer` as an argument and return `True` to keep the layer or return `False` to skip:: image = compose( layers, layer_filter=lambda x: x.is_visible() and x.kind == 'type' ) By default, visible layers are composed. .. note:: This function is experimental and does not guarantee Photoshop-quality rendering. Currently the following are ignored: - Adjustments layers - Layer effects - Blending mode (all blending modes become normal) Shape drawing is inaccurate if the PSD file is not saved with maximum compatibility. :param layers: a layer, or an iterable of layers. :param bbox: (left, top, bottom, right) tuple that specifies a region to compose. By default, all the visible area is composed. The origin is at the top-left corner of the PSD document. :param layer_filter: a callable that takes a layer and returns `bool`. :param color: background color in `int` or `tuple`. :return: :py:class:`PIL.Image` or `None`. """ from PIL import Image if not hasattr(layers, '__iter__'): layers = [layers] def _default_filter(layer): return layer.is_visible() layer_filter = layer_filter or _default_filter valid_layers = [x for x in layers if layer_filter(x)] if len(valid_layers) == 0: return None if bbox is None: bbox = extract_bbox(valid_layers) if bbox == (0, 0, 0, 0): return None # Alpha must be forced to correctly blend. mode = get_pil_mode(valid_layers[0]._psd.color_mode, True) result = Image.new( mode, (bbox[2] - bbox[0], bbox[3] - bbox[1]), color=color if color is not None else 'white', ) result.putalpha(0) for layer in valid_layers: if intersect(layer.bbox, bbox) == (0, 0, 0, 0): continue image = layer.compose(**kwargs) if image is None: continue logger.debug('Composing %s' % layer) offset = (layer.left - bbox[0], layer.top - bbox[1]) result = _blend(result, image, offset) return result
java
List<ResourceSet> computeResourceSetList() { List<ResourceSet> sourceFolderSets = resSetSupplier.get(); int size = sourceFolderSets.size() + 4; if (libraries != null) { size += libraries.getArtifacts().size(); } List<ResourceSet> resourceSetList = Lists.newArrayListWithExpectedSize(size); // add at the beginning since the libraries are less important than the folder based // resource sets. // get the dependencies first if (libraries != null) { Set<ResolvedArtifactResult> libArtifacts = libraries.getArtifacts(); // the order of the artifact is descending order, so we need to reverse it. for (ResolvedArtifactResult artifact : libArtifacts) { ResourceSet resourceSet = new ResourceSet( MergeManifests.getArtifactName(artifact), null, null, validateEnabled); resourceSet.setFromDependency(true); resourceSet.addSource(artifact.getFile()); // add to 0 always, since we need to reverse the order. resourceSetList.add(0,resourceSet); } } // add the folder based next resourceSetList.addAll(sourceFolderSets); // We add the generated folders to the main set List<File> generatedResFolders = Lists.newArrayList(); generatedResFolders.addAll(renderscriptResOutputDir.getFiles()); generatedResFolders.addAll(generatedResOutputDir.getFiles()); // add the generated files to the main set. final ResourceSet mainResourceSet = sourceFolderSets.get(0); assert mainResourceSet.getConfigName().equals(BuilderConstants.MAIN); mainResourceSet.addSources(generatedResFolders); return resourceSetList; }
java
void completeMerge(SegmentMetadata sourceMetadata) { long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "completeMerge", sourceMetadata.getId()); Exceptions.checkNotClosed(this.closed, this); Exceptions.checkArgument(sourceMetadata.isDeleted(), "sourceSegmentStreamId", "Given StreamSegmentReadIndex refers to a StreamSegment that has not been deleted yet."); if (sourceMetadata.getLength() == 0) { // beginMerge() does not take any action when the source Segment is empty, so there is nothing for us to do either. return; } // Find the appropriate redirect entry. RedirectIndexEntry redirectEntry; PendingMerge pendingMerge; synchronized (this.lock) { pendingMerge = this.pendingMergers.getOrDefault(sourceMetadata.getId(), null); Exceptions.checkArgument(pendingMerge != null, "sourceSegmentStreamId", "Given StreamSegmentReadIndex's merger with this one has not been initiated using beginMerge. Cannot finalize the merger."); // Get the RedirectIndexEntry. These types of entries are sticky in the cache and DO NOT contribute to the // cache Stats. They are already accounted for in the other Segment's ReadIndex. ReadIndexEntry indexEntry = this.indexEntries.get(pendingMerge.getMergeOffset()); assert indexEntry != null && !indexEntry.isDataEntry() : String.format("pendingMergers points to a ReadIndexEntry that does not exist or is of the wrong type. sourceStreamSegmentId = %d, offset = %d, treeEntry = %s.", sourceMetadata.getId(), pendingMerge.getMergeOffset(), indexEntry); redirectEntry = (RedirectIndexEntry) indexEntry; } StreamSegmentReadIndex sourceIndex = redirectEntry.getRedirectReadIndex(); // Get all the entries from the source index and append them here. List<MergedIndexEntry> sourceEntries = sourceIndex.getAllEntries(redirectEntry.getStreamSegmentOffset()); synchronized (this.lock) { // Remove redirect entry (again, no need to update the Cache Stats, as this is a RedirectIndexEntry). this.indexEntries.remove(pendingMerge.getMergeOffset()); this.pendingMergers.remove(sourceMetadata.getId()); sourceEntries.forEach(this::addToIndex); } List<FutureReadResultEntry> pendingReads = pendingMerge.seal(); if (pendingReads.size() > 0) { log.debug("{}: triggerFutureReads for Pending Merge (Count = {}, MergeOffset = {}, MergeLength = {}).", this.traceObjectId, pendingReads.size(), pendingMerge.getMergeOffset(), sourceIndex.getSegmentLength()); triggerFutureReads(pendingReads); } LoggerHelpers.traceLeave(log, this.traceObjectId, "completeMerge", traceId); }
java
private void path(Attributes attributes) throws SVGParseException { debug("<path>"); if (currentElement == null) throw new SVGParseException("Invalid document. Root element must be <svg>"); SVG.Path obj = new SVG.Path(); obj.document = svgDocument; obj.parent = currentElement; parseAttributesCore(obj, attributes); parseAttributesStyle(obj, attributes); parseAttributesTransform(obj, attributes); parseAttributesConditional(obj, attributes); parseAttributesPath(obj, attributes); currentElement.addChild(obj); }
java
public static nspbr[] get(nitro_service service, nspbr_args args) throws Exception{ nspbr obj = new nspbr(); options option = new options(); option.set_args(nitro_util.object_to_string_withoutquotes(args)); nspbr[] response = (nspbr[])obj.get_resources(service, option); return response; }
python
def create(self, friendly_name=values.unset, apn_credential_sid=values.unset, gcm_credential_sid=values.unset, messaging_service_sid=values.unset, facebook_messenger_page_id=values.unset, default_apn_notification_protocol_version=values.unset, default_gcm_notification_protocol_version=values.unset, fcm_credential_sid=values.unset, default_fcm_notification_protocol_version=values.unset, log_enabled=values.unset, alexa_skill_id=values.unset, default_alexa_notification_protocol_version=values.unset): """ Create a new ServiceInstance :param unicode friendly_name: A string to describe the resource :param unicode apn_credential_sid: The SID of the Credential to use for APN Bindings :param unicode gcm_credential_sid: The SID of the Credential to use for GCM Bindings :param unicode messaging_service_sid: The SID of the Messaging Service to use for SMS Bindings :param unicode facebook_messenger_page_id: Deprecated :param unicode default_apn_notification_protocol_version: The protocol version to use for sending APNS notifications :param unicode default_gcm_notification_protocol_version: The protocol version to use for sending GCM notifications :param unicode fcm_credential_sid: The SID of the Credential to use for FCM Bindings :param unicode default_fcm_notification_protocol_version: The protocol version to use for sending FCM notifications :param bool log_enabled: Whether to log notifications :param unicode alexa_skill_id: Deprecated :param unicode default_alexa_notification_protocol_version: Deprecated :returns: Newly created ServiceInstance :rtype: twilio.rest.notify.v1.service.ServiceInstance """ data = values.of({ 'FriendlyName': friendly_name, 'ApnCredentialSid': apn_credential_sid, 'GcmCredentialSid': gcm_credential_sid, 'MessagingServiceSid': messaging_service_sid, 'FacebookMessengerPageId': facebook_messenger_page_id, 'DefaultApnNotificationProtocolVersion': default_apn_notification_protocol_version, 'DefaultGcmNotificationProtocolVersion': default_gcm_notification_protocol_version, 'FcmCredentialSid': fcm_credential_sid, 'DefaultFcmNotificationProtocolVersion': default_fcm_notification_protocol_version, 'LogEnabled': log_enabled, 'AlexaSkillId': alexa_skill_id, 'DefaultAlexaNotificationProtocolVersion': default_alexa_notification_protocol_version, }) payload = self._version.create( 'POST', self._uri, data=data, ) return ServiceInstance(self._version, payload, )
python
def rename_with_prefix(self, prefix="", new_path=None, in_place=True, remove_desc=True): """Rename every sequence based on a prefix.""" # Temporary path # if new_path is None: prefixed = self.__class__(new_temp_path()) else: prefixed = self.__class__(new_path) # Generator # def prefixed_iterator(): for i,read in enumerate(self): read.id = prefix + read.id if remove_desc: read.description = "" yield read # Do it # prefixed.write(prefixed_iterator()) prefixed.close() # Replace it # if in_place: os.remove(self.path) shutil.move(prefixed, self.path) return prefixed
java
private boolean setupPipelineForAppend(LocatedBlock lastBlock) throws IOException { if (nodes == null || nodes.length == 0) { String msg = "Could not get block locations. " + "Source file \"" + src + "\" - Aborting..."; DFSClient.LOG.warn(msg); setLastException(new IOException(msg)); closed = true; if (streamer != null) streamer.close(); return false; } boolean success = createBlockOutputStream(nodes, dfsClient.clientName, false, true); long oldGenerationStamp = ((LocatedBlockWithOldGS)lastBlock).getOldGenerationStamp(); if (success) { // bump up the generation stamp in NN. Block newBlock = lastBlock.getBlock(); Block oldBlock = new Block(newBlock.getBlockId(), newBlock.getNumBytes(), oldGenerationStamp); dfsClient.namenode.updatePipeline(dfsClient.clientName, oldBlock, newBlock, nodes); } else { DFSClient.LOG.warn("Fall back to block recovery process when trying" + " to setup the append pipeline for file " + src); // set the old generation stamp block.setGenerationStamp(oldGenerationStamp); // fall back the block recovery while(processDatanodeError(true, true)) { try { Thread.sleep(1000); } catch (InterruptedException e) { lastException = new IOException(e); break; } } } return success; }
python
def attach_volume(self, volume_id, instance_id, device): """ Attach an EBS volume to an EC2 instance. :type volume_id: str :param volume_id: The ID of the EBS volume to be attached. :type instance_id: str :param instance_id: The ID of the EC2 instance to which it will be attached. :type device: str :param device: The device on the instance through which the volume will be exposted (e.g. /dev/sdh) :rtype: bool :return: True if successful """ params = {'InstanceId' : instance_id, 'VolumeId' : volume_id, 'Device' : device} return self.get_status('AttachVolume', params, verb='POST')
python
def set_restart_delay(seconds): ''' Set the number of seconds after which the computer will start up after a power failure. .. warning:: This command fails with the following error: ``Error, IOServiceOpen returned 0x10000003`` The setting is not updated. This is an apple bug. It seems like it may only work on certain versions of Mac Server X. This article explains the issue in more detail, though it is quite old. http://lists.apple.com/archives/macos-x-server/2006/Jul/msg00967.html :param int seconds: The number of seconds. Must be a multiple of 30 :return: True if successful, False if not :rtype: bool CLI Example: .. code-block:: bash salt '*' system.set_restart_delay 180 ''' if seconds % 30 != 0: msg = 'Invalid value passed for seconds.\n' \ 'Must be a multiple of 30.\n' \ 'Passed: {0}'.format(seconds) raise SaltInvocationError(msg) cmd = 'systemsetup -setwaitforstartupafterpowerfailure {0}'.format(seconds) __utils__['mac_utils.execute_return_success'](cmd) return __utils__['mac_utils.confirm_updated']( seconds, get_restart_delay, )
python
def index_table(self, axis=None, baseline=None, prune=False): """Return index percentages for a given axis and baseline. The index values represent the difference of the percentages to the corresponding baseline values. The baseline values are the univariate percentages of the corresponding variable. """ proportions = self.proportions(axis=axis) baseline = ( baseline if baseline is not None else self._prepare_index_baseline(axis) ) # Fix the shape to enable correct broadcasting if ( axis == 0 and len(baseline.shape) <= 1 and self.ndim == len(self.get_shape()) ): baseline = baseline[:, None] indexes = proportions / baseline * 100 return self._apply_pruning_mask(indexes) if prune else indexes
python
def addPath(rel_path, prepend=False): """ Adds a directory to the system python path, either by append (doesn't override default or globally installed package names) or by prepend (overrides default/global package names). """ path = lambda *paths: os.path.abspath( os.path.join(os.path.dirname(__file__), *paths)) + '/' if prepend: return sys.path.insert(0, path(rel_path)) return sys.path.append(path(rel_path))
python
def Get_RpRs(d, **kwargs): ''' Returns the value of the planet radius over the stellar radius for a given depth :py:obj:`d`, given the :py:class:`everest.pysyzygy` transit :py:obj:`kwargs`. ''' if ps is None: raise Exception("Unable to import `pysyzygy`.") def Depth(RpRs, **kwargs): return 1 - ps.Transit(RpRs=RpRs, **kwargs)([kwargs.get('t0', 0.)]) def DiffSq(r): return 1.e10 * (d - Depth(r, **kwargs)) ** 2 return fmin(DiffSq, [np.sqrt(d)], disp=False)
java
public Server create(int maxThreads, int minThreads, int threadTimeoutMillis) { Server server; if (maxThreads > 0) { int max = maxThreads; int min = (minThreads > 0) ? minThreads : 8; int idleTimeout = (threadTimeoutMillis > 0) ? threadTimeoutMillis : 60000; server = new Server(new QueuedThreadPool(max, min, idleTimeout)); } else { server = new Server(); } return server; }
python
def com_google_fonts_check_family_equal_glyph_names(ttFonts): """Fonts have equal glyph names?""" fonts = list(ttFonts) all_glyphnames = set() for ttFont in fonts: all_glyphnames |= set(ttFont["glyf"].glyphs.keys()) missing = {} available = {} for glyphname in all_glyphnames: missing[glyphname] = [] available[glyphname] = [] failed = False for ttFont in fonts: fontname = ttFont.reader.file.name these_ones = set(ttFont["glyf"].glyphs.keys()) for glyphname in all_glyphnames: if glyphname not in these_ones: failed = True missing[glyphname].append(fontname) else: available[glyphname].append(fontname) for gn in missing.keys(): if missing[gn]: available_styles = [style(k) for k in available[gn]] missing_styles = [style(k) for k in missing[gn]] if None not in available_styles + missing_styles: # if possible, use stylenames in the log messages. avail = ', '.join(available_styles) miss = ', '.join(missing_styles) else: # otherwise, print filenames: avail = ', '.join(available[gn]) miss = ', '.join(missing[gn]) yield FAIL, (f"Glyphname '{gn}' is defined on {avail}" f" but is missing on {miss}.") if not failed: yield PASS, "All font files have identical glyph names."
java
private static void setOptional(Request req) { GregorianCalendar gc = new GregorianCalendar(); req.setStart(Chrono.timeStamp(gc)); gc.add(GregorianCalendar.MONTH, 6); req.setEnd(Chrono.timeStamp(gc)); // req.setForce("false"); }
python
def notify_block_new(self, block): """A new block was received and passed initial consensus validation""" payload = block.SerializeToString() self._notify( "consensus_notifier_notify_block_new", payload, len(payload))
java
public static int search(String str, String keyw) { int strLen = str.length(); int keywLen = keyw.length(); int pos = 0; int cnt = 0; if (keywLen == 0) { return 0; } while ((pos = str.indexOf(keyw, pos)) != -1) { pos += keywLen; cnt++; if (pos >= strLen) { break; } } return cnt; }
java
@Override public boolean onKeyUp (int keyCode, KeyEvent event){ Fragment frag = getSupportFragmentManager().findFragmentByTag(SAMPLES_FRAGMENT_TAG); if (frag==null) { return super.onKeyUp(keyCode, event); } if (!(frag instanceof BaseSampleFragment)) { return super.onKeyUp(keyCode,event); } MapView mMapView = ((BaseSampleFragment)frag).getmMapView(); if (mMapView==null) return super.onKeyUp(keyCode,event); switch (keyCode) { case KeyEvent.KEYCODE_PAGE_DOWN: mMapView.getController().zoomIn(); return true; case KeyEvent.KEYCODE_PAGE_UP: mMapView.getController().zoomOut(); return true; } return super.onKeyUp(keyCode,event); }
python
def plotPixel(self, x, y, color="black"): """ Doesn't use coordinant system. """ p = Point(x, y) p.fill(color) p.draw(self) p.t = lambda v: v p.tx = lambda v: v p.ty = lambda v: v
python
def get_list(self, input_string): """ Return a list of user input :param input_string: :return: """ if input_string in ('--ensemble_list', '--fpf'): # was the flag set? try: index_low = self.args.index(input_string) + 1 except ValueError: if input_string in self.required: print("\n {flag} is required".format(flag=input_string)) print_short_help() sys.exit(1) else: return None # the flag was set, so check if a value was set, otherwise exit try: if self.args[index_low] in self.flags: print("\n {flag} was set but a value was not specified".format(flag=input_string)) print_short_help() sys.exit(1) except IndexError: print("\n {flag} was set but a value was not specified".format(input_string)) print_short_help() sys.exit(1) # at least one value was set index_high = index_low try: # if the flag wasn't the last argument specified while self.args[index_high] not in self.flags: index_high += 1 except IndexError: # if it was, then handle it accordingly index_high = self.args.index(self.args[-1]) return self.args[index_low:index_high + 1] # return a list of input files if index_low == index_high: inputList = [] inputList.append(self.args[index_low]) return inputList else: return self.args[index_low:index_high]
java
public PropertySourcePropertyResolver addPropertySource(@Nullable PropertySource propertySource) { if (propertySource != null) { propertySources.put(propertySource.getName(), propertySource); processPropertySource(propertySource, propertySource.getConvention()); } return this; }
java
private synchronized void pushRequestContext(ServletContext context, ServletRequest req, ServletResponse resp) { getRequestStack().push(new RequestContext(context, req, resp)); }
java
public static <K, V> ListMultimap<K, V> constrainedListMultimap( ListMultimap<K, V> multimap, MapConstraint<? super K, ? super V> constraint) { return new ConstrainedListMultimap<K, V>(multimap, constraint); }
python
def crc16(cmd, use_byte=False): """ CRC16 检验 - 启用``use_byte`` 则返回 bytes 类型. :param cmd: 无crc检验的指令 :type cmd: :param use_byte: 是否返回byte类型 :type use_byte: :return: 返回crc值 :rtype: """ crc = 0xFFFF # crc16 计算方法, 需要使用 byte if hasattr(cmd, 'encode'): cmd = bytes.fromhex(cmd) for _ in cmd: c = _ & 0x00FF crc ^= c for i in range(8): if crc & 0x0001 > 0: crc >>= 1 crc ^= 0xA001 else: crc >>= 1 # modbus crc16计算时,需要高/低位倒置 t = [(crc & 0x00FF), (crc >> 8 & 0xFF)] crc = '%02X%02X' % (t[0], t[1]) if use_byte: crc = bytes.fromhex(crc) return crc
java
@Nonnull public <V1 extends T1, V2 extends T2> LBiFunctionBuilder<T1, T2, R> aCase(Class<V1> argC1, Class<V2> argC2, LBiFunction<V1, V2, R> function) { PartialCaseWithProduct.The pc = partialCaseFactoryMethod((a1, a2) -> (argC1 == null || argC1.isInstance(a1)) && (argC2 == null || argC2.isInstance(a2))); pc.evaluate(function); return self(); }
python
def VFSOpen(pathspec, progress_callback = None ): """Expands pathspec to return an expanded Path. A pathspec is a specification of how to access the file by recursively opening each part of the path by different drivers. For example the following pathspec: pathtype: OS path: "/dev/sda1" nested_path { pathtype: TSK path: "/home/image2.img" nested_path { pathtype: TSK path: "/home/a.txt" } } Instructs the system to: 1) open /dev/sda1 using the OS driver. 2) Pass the obtained filelike object to the TSK driver to open "/home/image2.img". 3) The obtained filelike object should be passed to the TSK driver to open "/home/a.txt". The problem remains how to get to this expanded path specification. Since the server is not aware of all the files on the client, the server may request this: pathtype: OS path: "/dev/sda1" nested_path { pathtype: TSK path: "/home/image2.img/home/a.txt" } Or even this: pathtype: OS path: "/dev/sda1/home/image2.img/home/a.txt" This function converts the pathspec requested by the server into an expanded pathspec required to actually open the file. This is done by expanding each component of the pathspec in turn. Expanding the component is done by opening each leading directory in turn and checking if it is a directory of a file. If its a file, we examine the file headers to determine the next appropriate driver to use, and create a nested pathspec. Note that for some clients there might be a virtual root specified. This is a directory that gets prepended to all pathspecs of a given pathtype. For example if there is a virtual root defined as ["os:/virtualroot"], a path specification like pathtype: OS path: "/home/user/*" will get translated into pathtype: OS path: "/virtualroot" is_virtualroot: True nested_path { pathtype: OS path: "/dev/sda1" } Args: pathspec: A Path() protobuf to normalize. progress_callback: A callback to indicate that the open call is still working but needs more time. Returns: The open filelike object. This will contain the expanded Path() protobuf as the member fd.pathspec. Raises: IOError: if one of the path components can not be opened. """ # Initialize the dictionary of VFS handlers lazily, if not yet done. if not VFS_HANDLERS: Init() fd = None # Adjust the pathspec in case we are using a vfs_virtualroot. vroot = _VFS_VIRTUALROOTS.get(pathspec.pathtype) # If we have a virtual root for this vfs handler, we need to prepend # it to the incoming pathspec except if the pathspec is explicitly # marked as containing a virtual root already or if it isn't marked but # the path already contains the virtual root. if (not vroot or pathspec.is_virtualroot or pathspec.CollapsePath().startswith(vroot.CollapsePath())): # No virtual root but opening changes the pathspec so we always work on a # copy. working_pathspec = pathspec.Copy() else: # We're in a virtual root, put the target pathspec inside the virtual root # as a nested path. working_pathspec = vroot.Copy() working_pathspec.last.nested_path = pathspec.Copy() # For each pathspec step, we get the handler for it and instantiate it with # the old object, and the current step. while working_pathspec: component = working_pathspec.Pop() try: handler = VFS_HANDLERS[component.pathtype] except KeyError: raise UnsupportedHandlerError(component.pathtype) # Open the component. fd = handler.Open( fd=fd, component=component, handlers=dict(VFS_HANDLERS), pathspec=working_pathspec, progress_callback=progress_callback) if fd is None: raise ValueError("VFSOpen cannot be called with empty PathSpec.") return fd
java
public void forward(final ExpectationForwardCallback expectationForwardCallback) { expectation.thenForward(new HttpObjectCallback().withClientId(registerWebSocketClient(expectationForwardCallback))); mockServerClient.sendExpectation(expectation); }
java
public void addInstance( String applicationName, String parentInstancePath, Instance instance ) throws ApplicationWsException { this.logger.finer( "Adding an instance to the application " + applicationName + "..." ); WebResource path = this.resource.path( UrlConstants.APP ).path( applicationName ).path( "instances" ); if( parentInstancePath != null ) path = path.queryParam( "instance-path", parentInstancePath ); ClientResponse response = this.wsClient.createBuilder( path ) .accept( MediaType.APPLICATION_JSON ).type( MediaType.APPLICATION_JSON ) .post( ClientResponse.class, instance ); handleResponse( response ); this.logger.finer( String.valueOf( response.getStatusInfo())); }
java
public static File leftShift(File file, InputStream data) throws IOException { append(file, data); return file; }
python
def read_constraints_from_config(cp, transforms=None, constraint_section='constraint'): """Loads parameter constraints from a configuration file. Parameters ---------- cp : WorkflowConfigParser An open config parser to read from. transforms : list, optional List of transforms to apply to parameters before applying constraints. constraint_section : str, optional The section to get the constraints from. Default is 'constraint'. Returns ------- list List of ``Constraint`` objects. Empty if no constraints were provided. """ cons = [] for subsection in cp.get_subsections(constraint_section): name = cp.get_opt_tag(constraint_section, "name", subsection) constraint_arg = cp.get_opt_tag( constraint_section, "constraint_arg", subsection) # get any other keyword arguments kwargs = {} section = constraint_section + "-" + subsection extra_opts = [key for key in cp.options(section) if key not in ["name", "constraint_arg"]] for key in extra_opts: val = cp.get(section, key) if key == "required_parameters": val = val.split(_VARARGS_DELIM) else: try: val = float(val) except ValueError: pass kwargs[key] = val cons.append(constraints.constraints[name](constraint_arg, transforms=transforms, **kwargs)) return cons
java
public void registerMockResource(String location, String contents) { try { mockResources.put(location, new GrailsByteArrayResource(contents.getBytes("UTF-8"), location)); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } }
python
def should_series_dispatch(left, right, op): """ Identify cases where a DataFrame operation should dispatch to its Series counterpart. Parameters ---------- left : DataFrame right : DataFrame op : binary operator Returns ------- override : bool """ if left._is_mixed_type or right._is_mixed_type: return True if not len(left.columns) or not len(right.columns): # ensure obj.dtypes[0] exists for each obj return False ldtype = left.dtypes.iloc[0] rdtype = right.dtypes.iloc[0] if ((is_timedelta64_dtype(ldtype) and is_integer_dtype(rdtype)) or (is_timedelta64_dtype(rdtype) and is_integer_dtype(ldtype))): # numpy integer dtypes as timedelta64 dtypes in this scenario return True if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype): # in particular case where right is an array of DateOffsets return True return False
python
def _save_documentation(version, base_url="https://spark.apache.org/docs"): """ Write the spark property documentation to a file """ target_dir = join(dirname(__file__), 'spylon', 'spark') with open(join(target_dir, "spark_properties_{}.json".format(version)), 'w') as fp: all_props = _fetch_documentation(version=version, base_url=base_url) all_props = sorted(all_props, key=lambda x: x[0]) all_props_d = [{"property": p, "default": d, "description": desc} for p, d, desc in all_props] json.dump(all_props_d, fp, indent=2)
python
def get_term_by_name(self, name): """Get the GO term with the given GO term name. If the given name is not associated with any GO term, the function will search for it among synonyms. Parameters ---------- name: str The name of the GO term. Returns ------- GOTerm The GO term with the given name. Raises ------ ValueError If the given name is found neither among the GO term names, nor among synonyms. """ term = None func_name = 'get_term_by_name' try: term = self.terms[self._name2id[name]] except KeyError: try: term = self.terms[self._syn2id[name]] except KeyError: pass else: logger.warning( '%s: GO term name "%s" is a synonym for "%s".', func_name, name, term.name) if term is None: raise ValueError('%s : GO term name "%s" not found!' % (func_name, name)) return term
java
@Override public EClass getIfcElectricDistributionBoardType() { if (ifcElectricDistributionBoardTypeEClass == null) { ifcElectricDistributionBoardTypeEClass = (EClass) EPackage.Registry.INSTANCE .getEPackage(Ifc4Package.eNS_URI).getEClassifiers().get(212); } return ifcElectricDistributionBoardTypeEClass; }
python
def no_empty_value(func): """Raises an exception if function argument is empty.""" @wraps(func) def wrapper(value): if not value: raise Exception("Empty value not allowed") return func(value) return wrapper
java
@Override public void addEntry(String logType, LogEntry entry) { if (!logTypesToInclude.contains(logType)) { return; } if (!localLogs.containsKey(logType)) { List<LogEntry> entries = new ArrayList<>(); entries.add(entry); localLogs.put(logType, entries); } else { localLogs.get(logType).add(entry); } }
python
def _update_file(self, seek_to_end=True): """Open the file for tailing""" try: self.close() self._file = self.open() except IOError: pass else: if not self._file: return self.active = True try: st = os.stat(self._filename) except EnvironmentError, err: if err.errno == errno.ENOENT: self._log_info('file removed') self.close() fid = self.get_file_id(st) if not self._fid: self._fid = fid if fid != self._fid: self._log_info('file rotated') self.close() elif seek_to_end: self._seek_to_end()
java
protected int addConfirmPopupMessage (SmartTable contents, int row) { if (_confirmHTML) { contents.setHTML(row, 0, _confirmMessage, 2, "Message"); } else { contents.setText(row, 0, _confirmMessage, 2, "Message"); } return row + 1; }
java
public String getRemoteUser() { if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){ checkRequestObjectInUse(); } String remoteUser = null; Principal principal = getUserPrincipal(); if (principal == null) { //remoteUser = null; if (_request != null) { remoteUser = _request.getRemoteUser(); } } else { remoteUser = principal.getName(); if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { logger.logp(Level.FINE, CLASS_NAME, "getRemoteUser", "(security enabled)"); } } if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15 logger.logp(Level.FINE, CLASS_NAME, "getRemoteUser", "user=" + remoteUser); } return remoteUser; }
python
def check(predicate): r"""A decorator that adds a check to the :class:`.Command` or its subclasses. These checks could be accessed via :attr:`.Command.checks`. These checks should be predicates that take in a single parameter taking a :class:`.Context`. If the check returns a ``False``\-like value then during invocation a :exc:`.CheckFailure` exception is raised and sent to the :func:`.on_command_error` event. If an exception should be thrown in the predicate then it should be a subclass of :exc:`.CommandError`. Any exception not subclassed from it will be propagated while those subclassed will be sent to :func:`.on_command_error`. .. note:: These functions can either be regular functions or coroutines. Examples --------- Creating a basic check to see if the command invoker is you. .. code-block:: python3 def check_if_it_is_me(ctx): return ctx.message.author.id == 85309593344815104 @bot.command() @commands.check(check_if_it_is_me) async def only_for_me(ctx): await ctx.send('I know you!') Transforming common checks into its own decorator: .. code-block:: python3 def is_me(): def predicate(ctx): return ctx.message.author.id == 85309593344815104 return commands.check(predicate) @bot.command() @is_me() async def only_me(ctx): await ctx.send('Only you!') Parameters ----------- predicate: Callable[:class:`Context`, :class:`bool`] The predicate to check if the command should be invoked. """ def decorator(func): if isinstance(func, Command): func.checks.append(predicate) else: if not hasattr(func, '__commands_checks__'): func.__commands_checks__ = [] func.__commands_checks__.append(predicate) return func return decorator
python
def get_quoted_local_columns(self, platform): """ Returns the quoted representation of the referencing table column names the foreign key constraint is associated with. But only if they were defined with one or the referencing table column name is a keyword reserved by the platform. Otherwise the plain unquoted value as inserted is returned. :param platform: The platform to use for quotation. :type platform: Platform :rtype: list """ columns = [] for column in self._local_column_names.values(): columns.append(column.get_quoted_name(platform)) return columns
java
@Override public GedObject find(final FinderObject owner, final String str) { return owner.findInParent(str); }
java
public static File copy(String srcPath, String destPath, boolean isOverride) throws IORuntimeException { return copy(file(srcPath), file(destPath), isOverride); }
java
public Class<?> resolveClass(final String classname) throws ClassNotFoundException { LOGGER.trace("Try to resolve {} from {} resolvers", classname, resolvers.size()); for (IClassResolver resolver : resolvers) { try { Class<?> candidate = resolver.resolveClass(classname); if (candidate != null) { return candidate; } } catch (ClassNotFoundException e) { LOGGER.trace("ClassResolver {} could not find class: {}", resolver, classname); } catch (RuntimeException e) { LOGGER.warn("ClassResolver {} threw an unexpected exception.", resolver, e); } } throw new ClassNotFoundException(String.format("Class [%s] can't be resolved.", classname)); }
java
public static KieScannerStatus mapScannerStatus(InternalKieScanner.Status status) { switch (status) { case STARTING: return KieScannerStatus.CREATED; case RUNNING: return KieScannerStatus.STARTED; case SCANNING: case UPDATING: return KieScannerStatus.SCANNING; case STOPPED: return KieScannerStatus.STOPPED; case SHUTDOWN: return KieScannerStatus.DISPOSED; default: return KieScannerStatus.UNKNOWN; } }
python
def equal(self, line1, line2): ''' return true if exactly equal or if equal but modified, otherwise return false return type: BooleanPlus ''' eqLine = line1 == line2 if eqLine: return BooleanPlus(True, False) else: unchanged_count = self.count_similar_words(line1, line2) similarity_percent = ( (2.0 * unchanged_count) / (len(line1.split()) + len(line2.split())) ) if similarity_percent >= 0.50: return BooleanPlus(True, True) return BooleanPlus(False, False)
java
@Override public R visitModule(ModuleElement e, P p) { // Use implementation from interface default method return ElementVisitor.super.visitModule(e, p); }
java
public void setResponseCharacterEncoding(String encoding) { ExternalContext ctx = _MyFacesExternalContextHelper.firstInstance.get(); if (ctx == null) { throw new UnsupportedOperationException(); } ctx.setResponseCharacterEncoding(encoding); }