language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
ClientStream returnStream() { synchronized (lock) { if (returnedStream == null) { // apply() has not been called, needs to buffer the requests. delayedStream = new DelayedStream(); return returnedStream = delayedStream; } else { return returnedStream; } } }
java
public String getTwoButtonsHtml( String label1, String label2, String onClick1, String onClick2, boolean firstActive) { StringBuffer result = new StringBuffer(); if (firstActive) { result.append( A_CmsHtmlIconButton.defaultButtonHtml( CmsHtmlIconButtonStyleEnum.SMALL_ICON_TEXT, "id", label1, null, true, A_CmsListDialog.ICON_DETAILS_SHOW, null, onClick1)); result.append("  "); result.append(deactivatedEmphasizedButtonHtml(label2, A_CmsListDialog.ICON_DETAILS_HIDE)); } else { result.append(deactivatedEmphasizedButtonHtml(label1, A_CmsListDialog.ICON_DETAILS_HIDE)); result.append("  "); result.append( A_CmsHtmlIconButton.defaultButtonHtml( CmsHtmlIconButtonStyleEnum.SMALL_ICON_TEXT, "id", label2, null, true, A_CmsListDialog.ICON_DETAILS_SHOW, null, onClick2)); } result.append("  "); return result.toString(); }
java
public Observable<ServiceResponse<PatternAnyEntityExtractor>> getPatternAnyEntityInfoWithServiceResponseAsync(UUID appId, String versionId, UUID entityId) { if (this.client.endpoint() == null) { throw new IllegalArgumentException("Parameter this.client.endpoint() is required and cannot be null."); } if (appId == null) { throw new IllegalArgumentException("Parameter appId is required and cannot be null."); } if (versionId == null) { throw new IllegalArgumentException("Parameter versionId is required and cannot be null."); } if (entityId == null) { throw new IllegalArgumentException("Parameter entityId is required and cannot be null."); } String parameterizedHost = Joiner.on(", ").join("{Endpoint}", this.client.endpoint()); return service.getPatternAnyEntityInfo(appId, versionId, entityId, this.client.acceptLanguage(), parameterizedHost, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<PatternAnyEntityExtractor>>>() { @Override public Observable<ServiceResponse<PatternAnyEntityExtractor>> call(Response<ResponseBody> response) { try { ServiceResponse<PatternAnyEntityExtractor> clientResponse = getPatternAnyEntityInfoDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); }
python
def refund(self, idempotency_key=None, **params): """Return a deferred.""" headers = populate_headers(idempotency_key) url = self.instance_url() + '/refund' d = self.request('post', url, params, headers) return d.addCallback(self.refresh_from).addCallback(lambda _: self)
python
def write_config_file(self, f, comments): """This method write a sample file, with attributes, descriptions, sample values, required flags, using the configuration object properties. """ if len(self.elements) < 1: return super(_Section, self).write_config_file(f, comments) for e in self.elements.values(): e.write_config_file(f, comments) f.write("\n")
python
def check_covariance_Kgrad_x(covar, relchange=1E-5, threshold=1E-2, check_diag=True): """ check_covariance_Kgrad_x(ACovarianceFunction covar, limix::mfloat_t relchange=1E-5, limix::mfloat_t threshold=1E-2, bool check_diag=True) -> bool Parameters ---------- covar: limix::ACovarianceFunction & relchange: limix::mfloat_t threshold: limix::mfloat_t check_diag: bool check_covariance_Kgrad_x(ACovarianceFunction covar, limix::mfloat_t relchange=1E-5, limix::mfloat_t threshold=1E-2) -> bool Parameters ---------- covar: limix::ACovarianceFunction & relchange: limix::mfloat_t threshold: limix::mfloat_t check_covariance_Kgrad_x(ACovarianceFunction covar, limix::mfloat_t relchange=1E-5) -> bool Parameters ---------- covar: limix::ACovarianceFunction & relchange: limix::mfloat_t check_covariance_Kgrad_x(ACovarianceFunction covar) -> bool Parameters ---------- covar: limix::ACovarianceFunction & """ return _core.ACovarianceFunction_check_covariance_Kgrad_x(covar, relchange, threshold, check_diag)
python
def from_connection_string(cls, conn_str, eventhub=None, **kwargs): """Create an EventHubClient from a connection string. :param conn_str: The connection string. :type conn_str: str :param eventhub: The name of the EventHub, if the EntityName is not included in the connection string. :type eventhub: str :param debug: Whether to output network trace logs to the logger. Default is `False`. :type debug: bool :param http_proxy: HTTP proxy settings. This must be a dictionary with the following keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). Additionally the following keys may also be present: 'username', 'password'. :type http_proxy: dict[str, Any] :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. :type auth_timeout: int """ address, policy, key, entity = _parse_conn_str(conn_str) entity = eventhub or entity address = _build_uri(address, entity) return cls(address, username=policy, password=key, **kwargs)
java
public void addTangoUserListener(ITangoUserListener listener, String attrName, boolean stateless) throws DevFailed { addTangoUserListener(listener, attrName, new String[0], stateless); }
java
protected void initField(Element elem, CmsXmlContentDefinition contentDef) throws CmsXmlException { String nameVal = elem.elementText(CmsConfigurationReader.N_PROPERTY_NAME); if (nameVal == null) { throw new CmsXmlException(Messages.get().container(Messages.ERR_XMLCONTENT_BAD_FIELD_NAME_1, nameVal)); } final String name = nameVal.trim(); String ruleRegex = elem.elementText(CmsConfigurationReader.N_RULE_REGEX); String ruleType = elem.elementText(CmsConfigurationReader.N_RULE_TYPE); String error = elem.elementText(CmsConfigurationReader.N_ERROR); if (error == null) { error = ""; } if (!CmsStringUtil.isEmptyOrWhitespaceOnly(ruleRegex)) { addValidationRule(contentDef, name, ruleRegex, error, "warning".equalsIgnoreCase(ruleType)); } String defaultValue = elem.elementText(CmsConfigurationReader.N_DEFAULT); String defaultResolveMacros = elem.elementTextTrim(FieldSettingElems.DefaultResolveMacros.name()); if (!CmsStringUtil.isEmptyOrWhitespaceOnly(defaultValue)) { addDefault(contentDef, name, defaultValue, defaultResolveMacros); } String widget = elem.elementText(CmsConfigurationReader.N_WIDGET); String widgetConfig = elem.elementText(CmsConfigurationReader.N_WIDGET_CONFIG); if (widget != null) { addWidget(contentDef, name, widget); } if (widgetConfig != null) { widgetConfig = widgetConfig.trim(); addConfiguration(contentDef, name, widgetConfig); } String niceName = elem.elementText(CmsConfigurationReader.N_DISPLAY_NAME); if (niceName != null) { m_fieldNiceNames.put(name, niceName); } String description = elem.elementText(CmsConfigurationReader.N_DESCRIPTION); if (description != null) { m_fieldDescriptions.put(name, description); } for (Element mappingElem : elem.elements(FieldSettingElems.Mapping.name())) { String mapTo = mappingElem.elementText(FieldSettingElems.MapTo.name()); String useDefault = mappingElem.elementText(FieldSettingElems.UseDefault.name()); if (mapTo != null) { addMapping(contentDef, name, mapTo, useDefault); } } String display = elem.elementTextTrim(FieldSettingElems.Display.name()); if (!CmsStringUtil.isEmptyOrWhitespaceOnly(display)) { try { addDisplayType(contentDef, name, DisplayType.valueOf(display)); } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); } } String synchronization = elem.elementTextTrim(FieldSettingElems.Synchronization.name()); if (Boolean.parseBoolean(synchronization)) { m_synchronizations.add(name); } for (Element relElem : elem.elements(FieldSettingElems.Relation.name())) { String type = relElem.elementTextTrim(FieldSettingElems.Type.name()); String invalidate = relElem.elementTextTrim(FieldSettingElems.Invalidate.name()); if (type != null) { type = type.toLowerCase(); } if (invalidate != null) { invalidate = invalidate.toLowerCase(); } addCheckRule(contentDef, name, invalidate, type); } for (Element visElem : elem.elements(FieldSettingElems.Visibility.name())) { String params = visElem.getText(); VisibilityConfiguration visConfig = createVisibilityConfiguration(null, params); m_visibilityConfigurations.put(name, visConfig); } for (Element visElem : elem.elements(FieldSettingElems.FieldVisibility.name())) { String className = visElem.elementTextTrim(FieldSettingElems.Class.name()); String params = visElem.elementTextTrim(FieldSettingElems.Params.name()); VisibilityConfiguration visConfig = createVisibilityConfiguration(className, params); m_visibilityConfigurations.put(name, visConfig); } String nestedFormatter = elem.elementTextTrim(FieldSettingElems.NestedFormatter.name()); if (Boolean.parseBoolean(nestedFormatter)) { m_nestedFormatterElements.add(name); } String search = elem.elementTextTrim(FieldSettingElems.Search.name()); if (search != null) { addSimpleSearchSetting(contentDef, name, search); } }
java
public void marshall(MaintenanceWindowAutomationParameters maintenanceWindowAutomationParameters, ProtocolMarshaller protocolMarshaller) { if (maintenanceWindowAutomationParameters == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(maintenanceWindowAutomationParameters.getDocumentVersion(), DOCUMENTVERSION_BINDING); protocolMarshaller.marshall(maintenanceWindowAutomationParameters.getParameters(), PARAMETERS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
private ConcurrentHashMap<String, BaseMethodBinding<?>> getMapForOperation(RestOperationTypeEnum operationType) { ConcurrentHashMap<String, BaseMethodBinding<?>> result = operationBindings.get(operationType); if(result == null) { operationBindings.putIfAbsent(operationType, new ConcurrentHashMap<String, BaseMethodBinding<?>>()); return getMapForOperation(operationType); } else { return result; } }
python
def build_ricecooker_json_tree(args, options, metadata_provider, json_tree_path): """ Download all categories, subpages, modules, and resources from open.edu. """ LOGGER.info('Starting to build the ricecooker_json_tree') channeldir = args['channeldir'] if channeldir.endswith(os.path.sep): channeldir.rstrip(os.path.sep) channelparentdir, channeldirname = os.path.split(channeldir) channelparentdir, channeldirname = os.path.split(channeldir) # Ricecooker tree channel_info = metadata_provider.get_channel_info() thumbnail_chan_path = channel_info.get('thumbnail_chan_path', None) if thumbnail_chan_path: thumbnail_rel_path = rel_path_from_chan_path(thumbnail_chan_path, metadata_provider.channeldir) else: thumbnail_rel_path = None ricecooker_json_tree = dict( dirname=channeldirname, title=channel_info['title'], description=channel_info['description'], source_domain=channel_info['source_domain'], source_id=channel_info['source_id'], language=channel_info['language'], thumbnail=thumbnail_rel_path, children=[], ) channeldir = args['channeldir'] content_folders = sorted(os.walk(channeldir)) # MAIN PROCESSING OF os.walk OUTPUT ############################################################################ # TODO(ivan): figure out all the implications of the # _ = content_folders.pop(0) # Skip over channel folder because handled above for rel_path, _subfolders, filenames in content_folders: LOGGER.info('processing folder ' + str(rel_path)) # IMPLEMENTATION DETAIL: # - `filenames` contains real files in the `channeldir` folder # - `exercises_filenames` contains virtual files whose sole purpse is to set the # order of nodes within a given topic. Since alphabetical order is used to # walk the files in the `channeldir`, we must "splice in" the exercises here if metadata_provider.has_exercises(): dir_chan_path = chan_path_from_rel_path(rel_path, metadata_provider.channeldir) dir_path_tuple = path_to_tuple(dir_chan_path) exercises_filenames = metadata_provider.get_exercises_for_dir(dir_path_tuple) filenames.extend(exercises_filenames) sorted_filenames = sorted(filenames) process_folder(ricecooker_json_tree, rel_path, sorted_filenames, metadata_provider) # Write out ricecooker_json_tree.json write_tree_to_json_tree(json_tree_path, ricecooker_json_tree) LOGGER.info('Folder hierarchy walk result stored in ' + json_tree_path)
python
def get_filesnames(self): """Return a list of all Taskwarrior projects as virtual files in the data directory""" self._update() projects = set(list(self._tasks.keys()) + self._task_projects + ['all_projects', 'unaffiliated']) return [join(self._data_location, p.split()[0]) for p in projects]
python
def MultiOpen(self, urns, mode="rw", token=None, aff4_type=None, age=NEWEST_TIME, follow_symlinks=True): """Opens a bunch of urns efficiently.""" if not data_store.AFF4Enabled(): raise NotImplementedError("AFF4 data store has been disabled.") if token is None: token = data_store.default_token if mode not in ["w", "r", "rw"]: raise ValueError("Invalid mode %s" % mode) symlinks = {} _ValidateAFF4Type(aff4_type) for urn, values in self.GetAttributes(urns, age=age): try: obj = self.Open( urn, mode=mode, token=token, local_cache={urn: values}, age=age, follow_symlinks=False) # We can't pass aff4_type to Open since it will raise on AFF4Symlinks. # Setting it here, if needed, so that BadGetAttributeError checking # works. if aff4_type: obj.aff4_type = aff4_type if follow_symlinks and isinstance(obj, AFF4Symlink): target = obj.Get(obj.Schema.SYMLINK_TARGET) if target is not None: symlinks.setdefault(target, []).append(obj.urn) elif aff4_type: if isinstance(obj, aff4_type): yield obj else: yield obj except IOError: pass if symlinks: for obj in self.MultiOpen( symlinks, mode=mode, token=token, aff4_type=aff4_type, age=age): to_link = symlinks[obj.urn] for additional_symlink in to_link[1:]: clone = obj.__class__(obj.urn, clone=obj) clone.symlink_urn = additional_symlink yield clone obj.symlink_urn = symlinks[obj.urn][0] yield obj
python
def params_to_dict(params, dct): """ Updates the 'dct' dictionary with the 'params' dictionary, filtering out all those whose param value is None. """ for param, val in params.items(): if val is None: continue dct[param] = val return dct
python
def __update_throughput(table_name, key_name, read_units, write_units): """ Update throughput on the DynamoDB table :type table_name: str :param table_name: Name of the DynamoDB table :type key_name: str :param key_name: Configuration option key name :type read_units: int :param read_units: New read unit provisioning :type write_units: int :param write_units: New write unit provisioning """ try: current_ru = dynamodb.get_provisioned_table_read_units(table_name) current_wu = dynamodb.get_provisioned_table_write_units(table_name) except JSONResponseError: raise # Check table status try: table_status = dynamodb.get_table_status(table_name) except JSONResponseError: raise logger.debug('{0} - Table status is {1}'.format(table_name, table_status)) if table_status != 'ACTIVE': logger.warning( '{0} - Not performing throughput changes when table ' 'is {1}'.format(table_name, table_status)) return # If this setting is True, we will only scale down when # BOTH reads AND writes are low if get_table_option(key_name, 'always_decrease_rw_together'): read_units, write_units = __calculate_always_decrease_rw_values( table_name, read_units, current_ru, write_units, current_wu) if read_units == current_ru and write_units == current_wu: logger.info('{0} - No changes to perform'.format(table_name)) return dynamodb.update_table_provisioning( table_name, key_name, int(read_units), int(write_units))
python
def collect(self): """ Collect process stat data """ if not os.access(self.PROC, os.R_OK): return False # Open PROC file file = open(self.PROC, 'r') # Get data for line in file: if line.startswith('slabinfo'): continue if line.startswith('#'): keys = line.split()[1:] continue data = line.split() for key in ['<active_objs>', '<num_objs>', '<objsize>', '<objperslab>', '<pagesperslab>']: i = keys.index(key) metric_name = data[0] + '.' + key.replace( '<', '').replace('>', '') metric_value = int(data[i]) self.publish(metric_name, metric_value) for key in ['<limit>', '<batchcount>', '<sharedfactor>']: i = keys.index(key) metric_name = data[0] + '.tunables.' + key.replace( '<', '').replace('>', '') metric_value = int(data[i]) self.publish(metric_name, metric_value) for key in ['<active_slabs>', '<num_slabs>', '<sharedavail>']: i = keys.index(key) metric_name = data[0] + '.slabdata.' + key.replace( '<', '').replace('>', '') metric_value = int(data[i]) self.publish(metric_name, metric_value) # Close file file.close()
python
def reverse_translate( protein_seq, template_dna=None, leading_seq=None, trailing_seq=None, forbidden_seqs=(), include_stop=True, manufacturer=None): """ Generate a well-behaved DNA sequence from the given protein sequence. If a template DNA sequence is specified, the returned DNA sequence will be as similar to it as possible. Any given restriction sites will not be present in the sequence. And finally, the given leading and trailing sequences will be appropriately concatenated. """ if manufacturer == 'gen9': forbidden_seqs += gen9.reserved_restriction_sites leading_seq = restriction_sites.get(leading_seq, leading_seq or '') trailing_seq = restriction_sites.get(trailing_seq, trailing_seq or '') codon_list = make_codon_list(protein_seq, template_dna, include_stop) sanitize_codon_list(codon_list, forbidden_seqs) dna_seq = leading_seq + ''.join(codon_list) + trailing_seq if manufacturer == 'gen9': gen9.apply_quality_control_checks(dna_seq) return dna_seq
python
def format_all(format_string, env): """ Format the input string using each possible combination of lists in the provided environment. Returns a list of formated strings. """ prepared_env = parse_pattern(format_string, env, lambda x, y: [FormatWrapper(x, z) for z in y]) # Generate each possible combination, format the string with it and yield # the resulting string: for field_values in product(*prepared_env.itervalues()): format_env = dict(izip(prepared_env.iterkeys(), field_values)) yield format_string.format(**format_env)
python
def cut_sequences_relative(records, slices, record_id): """ Cuts records to slices, indexed by non-gap positions in record_id """ with _record_buffer(records) as r: try: record = next(i for i in r() if i.id == record_id) except StopIteration: raise ValueError("Record with id {0} not found.".format(record_id)) new_slices = _update_slices(record, slices) for record in multi_cut_sequences(r(), new_slices): yield record
java
@Override protected List<? extends Object> getFieldValues() { return Arrays.asList(hue(), saturation(), brightness(), opacity()); }
java
@Override public void discard(Object o) { BeanO beanO = (BeanO) o; beanO.destroy(); }
python
def handleUpgradeTxn(self, txn) -> None: """ Handles transaction of type POOL_UPGRADE Can schedule or cancel upgrade to a newer version at specified time :param txn: """ FINALIZING_EVENT_TYPES = [UpgradeLog.Events.succeeded, UpgradeLog.Events.failed] if get_type(txn) != POOL_UPGRADE: return logger.info("Node '{}' handles upgrade txn {}".format(self.nodeName, txn)) txn_data = get_payload_data(txn) action = txn_data[ACTION] version = txn_data[VERSION] justification = txn_data.get(JUSTIFICATION) pkg_name = txn_data.get(PACKAGE, self.config.UPGRADE_ENTRY) upgrade_id = self.get_action_id(txn) # TODO test try: version = src_version_cls(pkg_name)(version) except InvalidVersionError as exc: logger.warning( "{} can't handle upgrade txn with version {} for package {}: {}" .format(self, version, pkg_name, exc) ) return if action == START: # forced txn could have partial schedule list if self.nodeId not in txn_data[SCHEDULE]: logger.info("Node '{}' disregards upgrade txn {}".format( self.nodeName, txn)) return last_event = self.lastActionEventInfo if last_event: if (last_event.data.upgrade_id == upgrade_id and last_event.ev_type in FINALIZING_EVENT_TYPES): logger.info( "Node '{}' has already performed an upgrade with upgrade_id {}. " "Last recorded event is {}" .format(self.nodeName, upgrade_id, last_event.data)) return when = txn_data[SCHEDULE][self.nodeId] failTimeout = txn_data.get(TIMEOUT, self.defaultActionTimeout) if isinstance(when, str): when = dateutil.parser.parse(when) new_ev_data = UpgradeLogData(when, version, upgrade_id, pkg_name) if self.scheduledAction: if self.scheduledAction == new_ev_data: logger.debug( "Node {} already scheduled upgrade to version '{}' " .format(self.nodeName, version)) return else: logger.info( "Node '{}' cancels previous upgrade and schedules a new one to {}" .format(self.nodeName, version)) self._cancelScheduledUpgrade(justification) logger.info("Node '{}' schedules upgrade to {}".format(self.nodeName, version)) self._scheduleUpgrade(new_ev_data, failTimeout) return if action == CANCEL: if (self.scheduledAction and self.scheduledAction.version == version): self._cancelScheduledUpgrade(justification) logger.info("Node '{}' cancels upgrade to {}".format( self.nodeName, version)) return logger.error( "Got {} transaction with unsupported action {}".format( POOL_UPGRADE, action))
python
def basic_logging(self) -> None: """ Call this on the client object to create log handlers to output request and response messages. """ # Request handler if len(request_log.handlers) == 0: request_handler = logging.StreamHandler() request_handler.setFormatter( logging.Formatter(fmt=self.DEFAULT_REQUEST_LOG_FORMAT) ) request_log.addHandler(request_handler) request_log.setLevel(logging.INFO) # Response handler if len(response_log.handlers) == 0: response_handler = logging.StreamHandler() response_handler.setFormatter( logging.Formatter(fmt=self.DEFAULT_RESPONSE_LOG_FORMAT) ) response_log.addHandler(response_handler) response_log.setLevel(logging.INFO)
java
protected void notFound(HttpServletRequest request, HttpServletResponse response) throws IOException { if(log.isDebugEnabled())log.debug("Not Found "+request.getRequestURI()); String method=request.getMethod(); // Not found special requests. if (method.equals(HttpRequest.__GET) || method.equals(HttpRequest.__HEAD) || method.equals(HttpRequest.__POST)) { response.sendError(HttpResponse.__404_Not_Found); } else if (method.equals(HttpRequest.__TRACE)) handleTrace(request,response); else if (method.equals(HttpRequest.__OPTIONS)) handleOptions(request,response); else { // Unknown METHOD response.setHeader(HttpFields.__Allow,__AllowString); response.sendError(HttpResponse.__405_Method_Not_Allowed); } }
python
def get_route_io_data_types(self): # type: () -> typing.List[UserDefined] """ Returns a list of all user-defined data types that are referenced as either an argument, result, or error of a route. If a List or Nullable data type is referenced, then the contained data type is returned assuming it's a user-defined type. """ data_types = set() # type: typing.Set[UserDefined] for route in self.routes: data_types |= self.get_route_io_data_types_for_route(route) return sorted(data_types, key=lambda dt: dt.name)
java
@Function public static byte[] wrapMessage(GSSContext context, MessageProp prop, byte[] message) { try { // wrap the data and return the encrypted token byte[] initialToken = context.wrap(message, 0, message.length, prop); return getTokenWithLengthPrefix(initialToken); } catch (GSSException ex) { throw new RuntimeException("Exception wrapping message", ex); } }
java
protected void addRow(final KeyValue row) { long last_ts = 0; if (rows.size() != 0) { // Verify that we have the same metric id and tags. final byte[] key = row.key(); final iRowSeq last = rows.get(rows.size() - 1); final short metric_width = tsdb.metrics.width(); final short tags_offset = (short) (Const.SALT_WIDTH() + metric_width + Const.TIMESTAMP_BYTES); final short tags_bytes = (short) (key.length - tags_offset); String error = null; if (key.length != last.key().length) { error = "row key length mismatch"; } else if ( Bytes.memcmp(key, last.key(), Const.SALT_WIDTH(), metric_width) != 0) { error = "metric ID mismatch"; } else if (Bytes.memcmp(key, last.key(), tags_offset, tags_bytes) != 0) { error = "tags mismatch"; } if (error != null) { throw new IllegalArgumentException(error + ". " + "This Span's last row key is " + Arrays.toString(last.key()) + " whereas the row key being added is " + Arrays.toString(key) + " and metric_width=" + metric_width); } last_ts = last.timestamp(last.size() - 1); // O(n) } final RowSeq rowseq = new RowSeq(tsdb); rowseq.setRow(row); sorted = false; if (last_ts >= rowseq.timestamp(0)) { // scan to see if we need to merge into an existing row for (final iRowSeq rs : rows) { if (Bytes.memcmp(rs.key(), row.key(), Const.SALT_WIDTH(), (rs.key().length - Const.SALT_WIDTH())) == 0) { rs.addRow(row); return; } } } rows.add(rowseq); }
java
public static String addNamedOutput(Job job, String namedOutput, OutputFormat outputFormat, Class<?> keyClass, Class<?> valueClass) throws FileNotFoundException, IOException, URISyntaxException { checkNamedOutputName(job, namedOutput, true); Configuration conf = job.getConfiguration(); String uniqueName = UUID.randomUUID().toString() + '.' + "out-format.dat"; InstancesDistributor.distribute(outputFormat, uniqueName, conf); conf.set(MULTIPLE_OUTPUTS, conf.get(MULTIPLE_OUTPUTS, "") + " " + namedOutput); conf.set(MO_PREFIX + namedOutput + FORMAT_INSTANCE_FILE, uniqueName); conf.setClass(MO_PREFIX + namedOutput + KEY, keyClass, Object.class); conf.setClass(MO_PREFIX + namedOutput + VALUE, valueClass, Object.class); return uniqueName; }
java
@Override public void stop() throws LifecycleException { super.stop(); /* * Service should be stopped and removed otherwise in between start/stop/start of the container, * javax.management.InstanceAlreadyExistsException: mss-tomcat-embedded-6:type=SipApplicationDispatcher * exception will occur, which is related to the JMX MBeanServer, when tomcat embedded tries to register the MBean, the * instance is already there. */ if(service != null) { try { service.stop(); service.destroy(); } catch (LifecycleException e) { log.error("service already stopped ", e); } /* * Issue: http://code.google.com/p/mobicents/issues/detail?id=3116 * * We need to force SipFactory to create a new SipStack each time it is asked. By default SipFactory will return the previously * created SipStack if it exists. This will create a problem in a scenario like: * * 1. container starts -> SipFactory creates sipStack * 2. System.setProperties such as as properties related to SSL * 3. container.restart -> SipFactory returns previously created SipStack that is not aware of the new System properties * * Thats why the SipFactory needs to be reset in order to create a new SipStack every time the * SipFactory.getInstance().createSipFactory (SipStandardService.initialize()) method is called. */ SipFactory.getInstance().resetFactory(); } }
python
def _readFile(self, fname, sldir): ''' private method that reads in and organizes the .DAT file Loads the data of the .DAT File into the variables cattrs and cols. In both these cases they are dictionaries, but in the case of cols, it is a dictionary of numpy array exect for the element , element_name where it is just a list ''' cattrs=[] if sldir.endswith(os.sep): #Making sure fname will be formatted correctly fname = str(sldir)+str(fname) else: fname = str(sldir)+os.sep+str(fname) self.sldir+=os.sep f=open(fname,'r') lines=f.readlines() for i in range(len(lines)): lines[i]=lines[i].strip() cols=lines[0].strip('H') cols=cols.strip() cols=cols.split() for i in range(len(lines)): if lines[i].startswith('#'): # if it is a cycle attribute line lines[i]=lines[i].strip('#') tmp=lines[i].split() tmp1=[] for j in range(len(tmp)): if tmp[j] != '=' or '': tmp1.append(tmp[j]) tmp=tmp1 j=0 while j <len(tmp): cattrs.append(tmp[j]) j+=2 elif not lines[i].startswith('H'): index = i-1 break return cattrs,cols, index
java
public void deinit() { if (channel != null) try { channel.close(); } catch (IOException | TimeoutException e) { log.warn("Failed to close channel in RabbitMQ connector"); } if (connection != null) try { connection.close(); } catch (IOException e) { log.warn("Failed to close connection in RabbitMQ connector"); } }
python
def _makna(self): """Mengembalikan representasi string untuk semua makna entri ini. :returns: String representasi makna-makna :rtype: str """ if len(self.makna) > 1: return '\n'.join( str(i) + ". " + str(makna) for i, makna in enumerate(self.makna, 1) ) return str(self.makna[0])
java
@Nonnull public static <T> LConsumerBuilder<T> consumer(Consumer<LConsumer<T>> consumer) { return new LConsumerBuilder(consumer); }
java
private Method[] removeMessageEndpointMethods(Method[] listenerMethods) { ArrayList<Method> methods = new ArrayList<Method>(); for (Method method : listenerMethods) { String name = method.getName(); Class<?>[] params = method.getParameterTypes(); if ("afterDelivery".equals(name) && params.length == 0) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "removeMessageEndpointMethods: removing afterDelivery"); continue; } else if ("beforeDelivery".equals(name) && params.length == 1 && params[0] == Method.class) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "removeMessageEndpointMethods: removing beforeDelivery"); continue; } else if ("release".equals(name) && params.length == 0) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "removeMessageEndpointMethods: removing release"); continue; } methods.add(method); } if (methods.size() == listenerMethods.length) { return listenerMethods; } return methods.toArray(new Method[methods.size()]); }
python
def do_execute_direct(self, code: str, silent: bool = False) -> [str, dict]: """ This is the main method that takes code from the Jupyter cell and submits it to the SAS server :param code: code from the cell :param silent: :return: str with either the log or list """ if not code.strip(): return {'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}} if self.mva is None: self._allow_stdin = True self._start_sas() if self.lst_len < 0: self._get_lst_len() if code.startswith('Obfuscated SAS Code'): logger.debug("decoding string") tmp1 = code.split() decode = base64.b64decode(tmp1[-1]) code = decode.decode('utf-8') if code.startswith('showSASLog_11092015') == False and code.startswith("CompleteshowSASLog_11092015") == False: logger.debug("code type: " + str(type(code))) logger.debug("code length: " + str(len(code))) logger.debug("code string: " + code) if code.startswith("/*SASKernelTest*/"): res = self.mva.submit(code, "text") else: res = self.mva.submit(code, prompt=self.promptDict) self.promptDict = {} if res['LOG'].find("SAS process has terminated unexpectedly") > -1: print(res['LOG'], '\n' "Restarting SAS session on your behalf") self.do_shutdown(True) return res['LOG'] output = res['LST'] log = res['LOG'] return self._which_display(log, output) elif code.startswith("CompleteshowSASLog_11092015") == True and code.startswith('showSASLog_11092015') == False: full_log = highlight(self.mva.saslog(), SASLogLexer(), HtmlFormatter(full=True, style=SASLogStyle, lineseparator="<br>", title="Full SAS Log")) return full_log.replace('\n', ' ') else: return self.cachedlog.replace('\n', ' ')
python
def output_tree(self, t, seen): """Given a dependency tree of objects, output it in DFS order.""" if not t: return for c in t.children: self.output_tree(c, seen) if t.name in seen: return seen[t.name] = True if t.resource_name: output_class(t.name, t.props, self.override, t.resource_name) else: output_class(t.name, t.props, self.override)
python
def stop(self, timeout=None): """Stop the thread.""" logger.debug("ports plugin - Close thread for scan list {}".format(self._stats)) self._stopper.set()
java
public Stream tumblingWindow(int windowCount, WindowsStoreFactory windowStoreFactory, Fields inputFields, Aggregator aggregator, Fields functionFields) { return window(TumblingCountWindow.of(windowCount), windowStoreFactory, inputFields, aggregator, functionFields); }
java
private static Bundle _loadBundle(BundleContext context, String path, InputStream is, boolean closeStream) throws BundleException { log(Log.LEVEL_INFO, "add bundle:" + path); try { // we make this very simply so an old loader that is calling this still works return context.installBundle(path, is); } finally { // we make this very simply so an old loader that is calling this still works if (closeStream && is != null) { try { is.close(); } catch (Throwable t) { ExceptionUtil.rethrowIfNecessary(t); } } } }
java
public boolean eq(final Matrix B) { final Matrix A = this; if ((B.m_rows != A.m_rows) || (B.m_columns != A.m_columns)) { return false; } for (int i = 0; i < m_rows; i++) { for (int j = 0; j < m_columns; j++) { if (A.m_data[i][j] != B.m_data[i][j]) { return false; } } } return true; }
python
def _send_command(self, cmd_class, command, payload, timeout=3.0): """ Send a BGAPI packet to the dongle and return the response """ if len(payload) > 60: return ValueError("Attempting to send a BGAPI packet with length > 60 is not allowed", actual_length=len(payload), command=command, command_class=cmd_class) header = bytearray(4) header[0] = 0 header[1] = len(payload) header[2] = cmd_class header[3] = command packet = header + bytearray(payload) self._stream.write(bytes(packet)) #Every command has a response so wait for the response here response = self._receive_packet(timeout) return response
java
public String[] get_object_property_list(Database database, String objname, String wildcard) throws DevFailed { if (!database.isAccess_checked()) checkAccess(database); String[] array = new String[2]; array[0] = objname; array[1] = wildcard; DeviceData argIn = new DeviceData(); argIn.insert(array); DeviceData argOut = command_inout(database, "DbGetPropertyList", argIn); return argOut.extractStringArray(); }
java
public static <T> Filter<T> all(Collection<Filter<T>> filters) { return new MultiFilter<T>(filters) { @Override public T accept(T item) throws IOException { for (Filter<T> filter: filters) { filter.accept(item); } return item; } }; }
java
public void setTransformFromLinesSquare( QrCode qr ) { // clear old points storagePairs2D.reset(); storagePairs3D.reset(); // use 3 of the corners to set the coordinate system // set(0, 0, qr.ppCorner,0); <-- prone to damage. Significantly degrades results if used set(0, 7, qr.ppCorner,1); set(7, 7, qr.ppCorner,2); set(7, 0, qr.ppCorner,3); // Use 4 lines to make it more robust errors in these corners // We just need to get the direction right for the lines. the exact grid to image doesn't matter setLine(0,7,0,14,qr.ppCorner,1,qr.ppRight,0); setLine(7,7,7,14,qr.ppCorner,2,qr.ppRight,3); setLine(7,7,14,7,qr.ppCorner,2,qr.ppDown,1); setLine(7,0,14,0,qr.ppCorner,3,qr.ppDown,0); DMatrixRMaj HH = new DMatrixRMaj(3,3); dlt.process(storagePairs2D.toList(),storagePairs3D.toList(),null,HH); H.set(HH); H.invert(Hinv); ConvertFloatType.convert(Hinv, Hinv32); ConvertFloatType.convert(H, H32); }
python
def _evaluate_all_functions(self, xdata, p=None): """ This returns a list of function outputs given the stored data sets. This function relies on a previous call of set_data(). p=None means use the fit results """ if p is None: p = self.results[0] output = [] for n in range(len(self.f)): output.append(self._evaluate_f(n, self._xdata_massaged[n], p) ) return output
python
def training_loop_hparams_from_scoped_overrides(scoped_overrides, trial_id): """Create HParams suitable for training loop from scoped HParams. Args: scoped_overrides: HParams, with keys all scoped by one of HP_SCOPES. These parameters are overrides for the base HParams created by create_loop_hparams. trial_id: str, trial identifier. This is used to register unique HParams names for the underlying model and ppo HParams. Returns: HParams suitable for passing to training_loop. """ trial_hp_overrides = scoped_overrides.values() # Create loop, model, and ppo base HParams loop_hp = create_loop_hparams() model_hp_name = trial_hp_overrides.get( "loop.generative_model_params", loop_hp.generative_model_params) model_hp = registry.hparams(model_hp_name).parse(FLAGS.hparams) base_algo_params_name = trial_hp_overrides.get( "loop.base_algo_params", loop_hp.base_algo_params) algo_hp = registry.hparams(base_algo_params_name) # Merge them and then override with the scoped overrides combined_hp = merge_unscoped_hparams( zip(HP_SCOPES, [loop_hp, model_hp, algo_hp])) combined_hp.override_from_dict(trial_hp_overrides) # Split out the component hparams loop_hp, model_hp, algo_hp = ( split_scoped_hparams(HP_SCOPES, combined_hp)) # Dynamic register the model hp and set the new name in loop_hp model_hp_name = "model_hp_%s" % str(trial_id) dynamic_register_hparams(model_hp_name, model_hp) loop_hp.generative_model_params = model_hp_name # Dynamic register the algo hp and set the new name in loop_hp algo_hp_name = "algo_hp_%s" % str(trial_id) dynamic_register_hparams(algo_hp_name, algo_hp) loop_hp.base_algo_params = algo_hp_name return loop_hp
java
@Override public boolean eIsSet(int featureID) { switch (featureID) { case TypesPackage.JVM_UNKNOWN_TYPE_REFERENCE__QUALIFIED_NAME: return QUALIFIED_NAME_EDEFAULT == null ? qualifiedName != null : !QUALIFIED_NAME_EDEFAULT.equals(qualifiedName); } return super.eIsSet(featureID); }
java
protected base_resource[] get_nitro_bulk_response(nitro_service service, String response) throws Exception { br_disable_responses result = (br_disable_responses) service.get_payload_formatter().string_to_resource(br_disable_responses.class, response); if(result.errorcode != 0) { if (result.errorcode == SESSION_NOT_EXISTS) service.clear_session(); throw new nitro_exception(result.message, result.errorcode, (base_response [])result.br_disable_response_array); } br_disable[] result_br_disable = new br_disable[result.br_disable_response_array.length]; for(int i = 0; i < result.br_disable_response_array.length; i++) { result_br_disable[i] = result.br_disable_response_array[i].br_disable[0]; } return result_br_disable; }
java
String getOption(String[] options, int index) { if ((options != null) && (options.length > index)) { return options[index]; } return null; }
java
@Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BpsimPackage.CALENDAR__VALUE: setValue((String)newValue); return; case BpsimPackage.CALENDAR__ID: setId((String)newValue); return; case BpsimPackage.CALENDAR__NAME: setName((String)newValue); return; } super.eSet(featureID, newValue); }
java
protected CmsResource getResource(CmsObject cms, I_CmsSearchDocument doc) { // check if the resource exits in the VFS, // this will implicitly check read permission and if the resource was deleted CmsResourceFilter filter = CmsResourceFilter.DEFAULT; if (isRequireViewPermission()) { filter = CmsResourceFilter.DEFAULT_ONLY_VISIBLE; } else if (isIgnoreExpiration()) { filter = CmsResourceFilter.IGNORE_EXPIRATION; } return getResource(cms, doc, filter); }
python
def toJulian(dt=None): """Converts a Python datetime to a Julian date, using the formula from Meesus (1991). This formula is reproduced in D.A. Vallado (2004). See: D.A. Vallado, Fundamentals of Astrodynamics and Applications, p. 187 http://books.google.com/books?id=PJLlWzMBKjkC&lpg=PA956&vq=187&pg=PA187 """ if dt is None: dt = datetime.datetime.utcnow() if dt.month < 3: year = dt.year - 1 month = dt.month + 12 else: year = dt.year month = dt.month A = int(year / 100.0) B = 2 - A + int(A / 4.0) C = ( (dt.second / 60.0 + dt.minute) / 60.0 + dt.hour ) / 24.0 jd = int(365.25 * (year + 4716)) jd += int(30.6001 * (month + 1)) + dt.day + B - 1524.5 + C return jd
java
protected synchronized void makeIndex() throws Exception { File indexFile = getIndexFile(filename); if (indexFile.exists()) try { loadIndex(indexFile); setIndexCreated(true); return; } catch (Exception x) { logger.warn(x.getMessage()); } indexCreated = false; long now = System.currentTimeMillis(); int recordLength = 1000; int maxRecords = 1; int maxRecordLength = 0; maxRecords = (int) raFile.length() / recordLength; if (maxRecords == 0) maxRecords = 1; index = new long[maxRecords][3]; String s = null; long start = 0; long end = 0; raFile.seek(0); records = 0; recordLength = 0; while ((s = raFile.readLine()) != null) { if (start == -1) start = raFile.getFilePointer(); if (isRecordEnd(s)) { //fireFrameRead(); if (records >= maxRecords) { index = resize(index, records + (int) (records + (raFile.length() - records * raFile.getFilePointer()) / recordLength)); } end += 4; index[records][0] = start; index[records][1] = end - start; index[records][2] = -1; if (maxRecordLength < index[records][1]) maxRecordLength = (int) index[records][1]; records++; recordLength += end - start; start = raFile.getFilePointer(); } else { end = raFile.getFilePointer(); } } b = new byte[maxRecordLength]; //fireFrameRead(); logger.info("Index created in " + (System.currentTimeMillis() - now) + " ms."); try { saveIndex(indexFile); } catch (Exception x) { logger.error(x); } }
python
def v1_folder_list(request, kvlclient): '''Retrieves a list of folders for the current user. The route for this endpoint is: ``GET /dossier/v1/folder``. (Temporarily, the "current user" can be set via the ``annotator_id`` query parameter.) The payload returned is a list of folder identifiers. ''' return sorted(imap(attrgetter('name'), ifilter(lambda it: it.is_folder(), new_folders(kvlclient, request).list('/'))))
java
public void signal() { if (lockVar.isHeldByCurrentThread()) { throw new RuntimeException("signal() must not be called on same thread as await()"); } try { lockVar.lock(); LOG.log(Level.INFO, "Signalling sleeper..."); isSignal = true; conditionVar.signal(); } finally { lockVar.unlock(); } }
python
def balance(self, account: Address): """ Return the balance of the account of the given address. """ return self.web3.eth.getBalance(to_checksum_address(account), 'pending')
python
def to_date(timeobject): """ Returns the ``datetime.datetime`` object corresponding to the time value conveyed by the specified object, which can be either a UNIXtime, a ``datetime.datetime`` object or an ISO8601-formatted string in the format `YYYY-MM-DD HH:MM:SS+00``. :param timeobject: the object conveying the time value :type timeobject: int, ``datetime.datetime`` or ISO8601-formatted string :returns: a ``datetime.datetime`` object :raises: *TypeError* when bad argument types are provided, *ValueError* when negative UNIXtimes are provided """ if isinstance(timeobject, int): if timeobject < 0: raise ValueError("The time value is a negative number") return datetime.utcfromtimestamp(timeobject).replace(tzinfo=UTC()) elif isinstance(timeobject, datetime): return timeobject.replace(tzinfo=UTC()) elif isinstance(timeobject, str): return datetime.strptime(timeobject, '%Y-%m-%d %H:%M:%S+00').replace(tzinfo=UTC()) else: raise TypeError('The time value must be expressed either by an int ' \ 'UNIX time, a datetime.datetime object or an ' \ 'ISO8601-formatted string')
python
def make_benchark(n_train, n_test, n_dim=2): """ Compute the benchmarks for Ordianry Kriging Parameters ---------- n_train : int number of points in the training set n_test : int number of points in the test set n_dim : int number of dimensions (default=2) Returns ------- res : dict a dictionary with the timing results """ X_train = np.random.rand(n_train, n_dim) y_train = np.random.rand(n_train) X_test = np.random.rand(n_test, n_dim) res = {} for variogram_model in VARIOGRAM_MODELS: tic = time() OK = OrdinaryKriging(X_train[:, 0], X_train[:, 1], y_train, variogram_model='linear', verbose=False, enable_plotting=False) res['t_train_{}'.format(variogram_model)] = time() - tic # All the following tests are performed with the linear variogram model for backend in BACKENDS: for n_closest_points in N_MOVING_WINDOW: if backend == 'vectorized' and n_closest_points is not None: continue # this is not supported tic = time() OK.execute('points', X_test[:, 0], X_test[:, 1], backend=backend, n_closest_points=n_closest_points) res['t_test_{}_{}'.format(backend, n_closest_points)] = time() - tic return res
python
def url(self): """The URL of the request (str).""" query = { 'key': self.api_key or default_api_key, 'person': self.person.to_json(), 'query_params_mode': self.query_params_mode, 'exact_name': self.exact_name, 'prioritize_records_by': ','.join(self._prioritize_records_by), 'filter_records_by': self._filter_records_by, } return SearchAPIRequest.BASE_URL + urllib.urlencode(query, doseq=True)
java
public void displayTitle() throws Exception { JspWriter out = getJsp().getJspContext().getOut(); out.println(htmlStart()); out.println(bodyStart(null)); out.println(dialogStart()); out.println(dialogContentStart(getParamTitle())); out.println(dialogContentEnd()); out.println(dialogEnd()); out.println(bodyEnd()); out.println(htmlEnd()); }
java
@Override public void setDescription(String description, java.util.Locale locale) { _cpSpecificationOption.setDescription(description, locale); }
python
def toggle_fold_trigger(self, block): """ Toggle a fold trigger block (expand or collapse it). :param block: The QTextBlock to expand/collapse """ if not TextBlockHelper.is_fold_trigger(block): return region = FoldScope(block) if region.collapsed: region.unfold() if self._mouse_over_line is not None: self._add_scope_decorations( region._trigger, *region.get_range()) else: region.fold() self._clear_scope_decos() self._refresh_editor_and_scrollbars() self.trigger_state_changed.emit(region._trigger, region.collapsed)
python
def write_validate(ctx, param, value): """ Validate the -w option. Purpose: Validates the `-w`|`--write` option. Two arguments are expected. | The first is the mode, which must be in ['s', 'single', 'm', | 'multiple']. The mode determins if we're writing to one file for | all device output, or to a separate file for each device being | handled. | | The second expected argument is the filepath of the desired | output file. This will automatically be prepended with the IP or | hostname of the device if we're writing to multiple files. @param ctx: The click context paramter, for receiving the object dictionary | being manipulated by other previous functions. Needed by any | function with the @click.pass_context decorator. Callback | functions such as this one receive this automatically. @type ctx: click.Context @param param: param is passed into a validation callback function by click. | We do not use it. @type param: None @param value: The value that the user supplied for the write option. @type value: str @returns: The value that the user supplied, if it passed validation. | Otherwise, raises click.BadParameter @rtype: str """ if value != ("default", "default"): try: mode, dest_file = (value[0], value[1]) except IndexError: raise click.BadParameter('Expecting two arguments, one for how to ' 'output (s, single, m, multiple), and ' 'the second is a filepath where to put' ' the output.') if mode.lower() not in ['s', 'single', 'm', 'multiple']: raise click.BadParameter('The first argument of the -w/--write ' 'option must specifies whether to write' ' to one file per device, or all device' ' output to a single file. Valid options' ' are "s", "single", "m", and "multiple"') # we've passed the checks, so set the 'out' context variable to our # tuple of the mode, and the destination file. ctx.obj['out'] = (mode.lower(), dest_file) else: # they didn't use -w, so set the context variable accordingly. ctx.obj['out'] = None
java
public EntityNameAvailabilityCheckOutputInner checkNameAvailability(String locationName, CheckNameAvailabilityInput parameters) { return checkNameAvailabilityWithServiceResponseAsync(locationName, parameters).toBlocking().single().body(); }
python
def _new_object(self, objtype, name=None): r""" """ if objtype.startswith('net'): obj = openpnm.network.GenericNetwork(project=self, name=name) elif objtype.startswith('geo'): obj = openpnm.geometry.GenericGeometry(project=self, name=name) elif objtype.startswith('pha'): obj = openpnm.phases.GenericPhase(project=self, name=name) elif objtype.startswith('phy'): obj = openpnm.physics.GenericPhysics(project=self, name=name) elif objtype.startswith('alg'): obj = openpnm.algorithm.GenericAlgorithm(project=self, name=name) else: obj = openpnm.core.Base(project=self, name=name) return obj
python
def pause(self, uuid): """ Pause a kvm domain by uuid :param uuid: uuid of the kvm container (same as the used in create) :return: """ args = { 'uuid': uuid, } self._domain_action_chk.check(args) self._client.sync('kvm.pause', args)
python
def identify_repo(repo_url): """Determine if `repo_url` should be treated as a URL to a git or hg repo. Repos can be identified by prepending "hg+" or "git+" to the repo URL. :param repo_url: Repo URL of unknown type. :returns: ('git', repo_url), ('hg', repo_url), or None. """ repo_url_values = repo_url.split('+') if len(repo_url_values) == 2: repo_type = repo_url_values[0] if repo_type in ["git", "hg"]: return repo_type, repo_url_values[1] else: raise UnknownRepoType else: if 'git' in repo_url: return 'git', repo_url elif 'bitbucket' in repo_url: return 'hg', repo_url else: raise UnknownRepoType
java
@Override public Type getType() throws EFapsException { final Attribute attr = this.type.getAttribute(this.attrName); if (attr == null) { LinkToSelectPart.LOG.error("Could not find an Attribute with name '{}' for type:{}", this.attrName, this.type); throw new EFapsException(LinkFromSelect.class, "NoAttribute"); } Type ret = null; try { ret = attr.getLink(); if (ret == null) { LinkToSelectPart.LOG.error("No link for Attribute '{}'", attr); } } catch (final CacheReloadException e) { LinkToSelectPart.LOG.error("Could not get Link for Attribute '{}'", attr); } return ret; }
java
private void updateDownloadStatus(S3Object result) { if (result == null) { download.setState(Transfer.TransferState.Canceled); download.setMonitor(new DownloadMonitor(download, null)); } else { download.setState(Transfer.TransferState.Completed); } }
java
public static Class<?> loadClass(String className) { try { return Class.forName(className); } catch(ClassNotFoundException e) { throw new IllegalArgumentException(e); } }
python
def plot_i1(self, colorbar=True, cb_orientation='vertical', cb_label=None, ax=None, show=True, fname=None, **kwargs): """ Plot the second invariant I1 of the tensor: I1 = vxx*vyy + vyy*vzz + vxx*vzz - vxy**2 - vyz**2 - vxz**2 Usage ----- x.plot_i1([tick_interval, xlabel, ylabel, ax, colorbar, cb_orientation, cb_label, show, fname]) Parameters ---------- tick_interval : list or tuple, optional, default = [30, 30] Intervals to use when plotting the x and y ticks. If set to None, ticks will not be plotted. xlabel : str, optional, default = 'longitude' Label for the longitude axis. ylabel : str, optional, default = 'latitude' Label for the latitude axis. ax : matplotlib axes object, optional, default = None A single matplotlib axes object where the plot will appear. colorbar : bool, optional, default = True If True, plot a colorbar. cb_orientation : str, optional, default = 'vertical' Orientation of the colorbar: either 'vertical' or 'horizontal'. cb_label : str, optional, default = '$I_1$' Text label for the colorbar. show : bool, optional, default = True If True, plot the image to the screen. fname : str, optional, default = None If present, and if axes is not specified, save the image to the specified file. kwargs : optional Keyword arguements that will be sent to the SHGrid.plot() and plt.imshow() methods. """ if cb_label is None: cb_label = self._i1_label if self.i1 is None: self.compute_invar() if ax is None: fig, axes = self.i1.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, show=False, **kwargs) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, axes else: self.i1.plot(colorbar=colorbar, cb_orientation=cb_orientation, cb_label=cb_label, ax=ax, **kwargs)
java
public static void removeEntryFromSSLContextMap(String keyStorePath) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "removeEntryFromSSLContextMap: " + new Object[] { keyStorePath }); List<SSLConfig> removeList = new ArrayList<SSLConfig>(); for (Entry<SSLConfig, SSLContext> entry : sslContextCacheJAVAX.entrySet()) { SSLConfig cachedConfig = entry.getKey(); if (cachedConfig != null) { String ksPropValue = cachedConfig.getProperty(Constants.SSLPROP_KEY_STORE, null); boolean ksFileBased = Boolean.parseBoolean(cachedConfig.getProperty(Constants.SSLPROP_KEY_STORE_FILE_BASED)); String tsPropValue = cachedConfig.getProperty(Constants.SSLPROP_TRUST_STORE, null); boolean tsFileBased = Boolean.parseBoolean(cachedConfig.getProperty(Constants.SSLPROP_TRUST_STORE_FILE_BASED)); if ((ksPropValue != null && keyStorePath.equals(WSKeyStore.getCannonicalPath(ksPropValue, ksFileBased))) || (tsPropValue != null && keyStorePath.equals(WSKeyStore.getCannonicalPath(tsPropValue, tsFileBased)))) { removeList.add(cachedConfig); } } } if (!removeList.isEmpty()) { for (SSLConfig removeEntry : removeList) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "removing from sslContext cache: " + removeEntry.toString()); sslContextCacheJAVAX.remove(removeEntry); } } }
python
def _draw_rectangle(data, obj, draw_options): """Return the PGFPlots code for rectangles. """ # Objects with labels are plot objects (from bar charts, etc). Even those without # labels explicitly set have a label of "_nolegend_". Everything else should be # skipped because they likely correspong to axis/legend objects which are handled by # PGFPlots label = obj.get_label() if label == "": return data, [] # Get actual label, bar charts by default only give rectangles labels of # "_nolegend_". See <https://stackoverflow.com/q/35881290/353337>. handles, labels = obj.axes.get_legend_handles_labels() labelsFound = [ label for h, label in zip(handles, labels) if obj in h.get_children() ] if len(labelsFound) == 1: label = labelsFound[0] left_lower_x = obj.get_x() left_lower_y = obj.get_y() ff = data["float format"] cont = ( "\\draw[{}] (axis cs:" + ff + "," + ff + ") " "rectangle (axis cs:" + ff + "," + ff + ");\n" ).format( ",".join(draw_options), left_lower_x, left_lower_y, left_lower_x + obj.get_width(), left_lower_y + obj.get_height(), ) if label != "_nolegend_" and label not in data["rectangle_legends"]: data["rectangle_legends"].add(label) cont += "\\addlegendimage{{ybar,ybar legend,{}}};\n".format( ",".join(draw_options) ) cont += "\\addlegendentry{{{}}}\n\n".format(label) return data, cont
java
public Vector4d hermite(Vector4dc t0, Vector4dc v1, Vector4dc t1, double t, Vector4d dest) { double t2 = t * t; double t3 = t2 * t; dest.x = (x + x - v1.x() - v1.x() + t1.x() + t0.x()) * t3 + (3.0 * v1.x() - 3.0 * x - t0.x() - t0.x() - t1.x()) * t2 + x * t + x; dest.y = (y + y - v1.y() - v1.y() + t1.y() + t0.y()) * t3 + (3.0 * v1.y() - 3.0 * y - t0.y() - t0.y() - t1.y()) * t2 + y * t + y; dest.z = (z + z - v1.z() - v1.z() + t1.z() + t0.z()) * t3 + (3.0 * v1.z() - 3.0 * z - t0.z() - t0.z() - t1.z()) * t2 + z * t + z; dest.w = (w + w - v1.w() - v1.w() + t1.w() + t0.w()) * t3 + (3.0 * v1.w() - 3.0 * w - t0.w() - t0.w() - t1.w()) * t2 + w * t + w; return dest; }
java
@SuppressWarnings("unchecked") @Override public EList<String> getUKnotsAsString() { return (EList<String>) eGet(Ifc4Package.Literals.IFC_BSPLINE_SURFACE_WITH_KNOTS__UKNOTS_AS_STRING, true); }
python
def _full_keys(keys, ndim): """ Given keys such as those passed to ``__getitem__`` for an array of ndim, return a fully expanded tuple of keys. In all instances, the result of this operation should follow: array[keys] == array[_full_keys(keys, array.ndim)] """ if not isinstance(keys, tuple): keys = (keys,) # Make keys mutable, and take a copy. keys = list(keys) # Count the number of keys which actually slice a dimension. n_keys_non_newaxis = len([key for key in keys if key is not np.newaxis]) # Numpy allows an extra dimension to be an Ellipsis, we remove it here # if Ellipsis is in keys, if this doesn't trigger we will raise an # IndexError. is_ellipsis = [key is Ellipsis for key in keys] if n_keys_non_newaxis - 1 >= ndim and any(is_ellipsis): # Remove the left-most Ellipsis, as numpy does. keys.pop(is_ellipsis.index(True)) n_keys_non_newaxis -= 1 if n_keys_non_newaxis > ndim: raise IndexError('Dimensions are over specified for indexing.') lh_keys = [] # Keys, with the last key first. rh_keys = [] take_from_left = True while keys: if take_from_left: next_key = keys.pop(0) keys_list = lh_keys else: next_key = keys.pop(-1) keys_list = rh_keys if next_key is Ellipsis: next_key = slice(None) take_from_left = not take_from_left keys_list.append(next_key) middle = [slice(None)] * (ndim - n_keys_non_newaxis) return tuple(lh_keys + middle + rh_keys[::-1])
python
def fetch(cls, channel, start, end, bits=None, host=None, port=None, verbose=False, connection=None, type=Nds2ChannelType.any()): """Fetch data from NDS into a `StateVector`. Parameters ---------- channel : `str`, `~gwpy.detector.Channel` the name of the channel to read, or a `Channel` object. start : `~gwpy.time.LIGOTimeGPS`, `float`, `str` GPS start time of required data, any input parseable by `~gwpy.time.to_gps` is fine end : `~gwpy.time.LIGOTimeGPS`, `float`, `str` GPS end time of required data, any input parseable by `~gwpy.time.to_gps` is fine bits : `Bits`, `list`, optional definition of bits for this `StateVector` host : `str`, optional URL of NDS server to use, defaults to observatory site host port : `int`, optional port number for NDS server query, must be given with `host` verify : `bool`, optional, default: `True` check channels exist in database before asking for data connection : `nds2.connection` open NDS connection to use verbose : `bool`, optional print verbose output about NDS progress type : `int`, optional NDS2 channel type integer dtype : `type`, `numpy.dtype`, `str`, optional identifier for desired output data type """ new = cls.DictClass.fetch( [channel], start, end, host=host, port=port, verbose=verbose, connection=connection)[channel] if bits: new.bits = bits return new
java
public static base_response add(nitro_service client, dnspolicylabel resource) throws Exception { dnspolicylabel addresource = new dnspolicylabel(); addresource.labelname = resource.labelname; addresource.transform = resource.transform; return addresource.add_resource(client); }
java
@Override public void propagate(int evtmask) throws ContradictionException { for (int i = 0; i < n; i++) { dMax[i] = deg[i].getUB(); } super.propagate(evtmask); }
python
def self_inventory(self): """ Inventory output will only contain the server name and the session ID when a key is provided. Provide the same format as with the full inventory instead for consistency. """ if self.api_key is None: return {} if self._self_inventory: return self._self_inventory resp, self_inventory = self.get('Inventory?key=%s' % self.api_key) real_self_inventory = dict() for host in self_inventory: real_self_inventory[host[0]] = self.full_inventory[host[0]] self._self_inventory = real_self_inventory return self._self_inventory
java
private static Parameter[] getParameters(Constructor<?> constructor, Type target) { return buildParameters(target, constructor.getParameterTypes(), constructor.getGenericParameterTypes(), TypeHelper.toClass(target).getTypeParameters()); }
java
@Override public boolean passes(final String scope) { if (skipTestScope && SCOPE_TEST.equals(scope)) { return true; } if (skipProvidedScope && SCOPE_PROVIDED.equals(scope)) { return true; } if (skipSystemScope && SCOPE_SYSTEM.equals(scope)) { return true; } if (skipRuntimeScope && SCOPE_RUNTIME.equals(scope)) { return true; } if (skipRuntimeScope && skipSystemScope && SCOPE_COMPILE_PLUS_RUNTIME.equals(scope)) { return true; } return false; }
python
def password( self, length=10, special_chars=True, digits=True, upper_case=True, lower_case=True): """ Generates a random password. @param length: Integer. Length of a password @param special_chars: Boolean. Whether to use special characters !@#$%^&*()_+ @param digits: Boolean. Whether to use digits @param upper_case: Boolean. Whether to use upper letters @param lower_case: Boolean. Whether to use lower letters @return: String. Random password """ choices = "" required_tokens = [] if special_chars: required_tokens.append( self.generator.random.choice("!@#$%^&*()_+")) choices += "!@#$%^&*()_+" if digits: required_tokens.append(self.generator.random.choice(string.digits)) choices += string.digits if upper_case: required_tokens.append( self.generator.random.choice(string.ascii_uppercase)) choices += string.ascii_uppercase if lower_case: required_tokens.append( self.generator.random.choice(string.ascii_lowercase)) choices += string.ascii_lowercase assert len( required_tokens) <= length, "Required length is shorter than required characters" # Generate a first version of the password chars = self.random_choices(choices, length=length) # Pick some unique locations random_indexes = set() while len(random_indexes) < len(required_tokens): random_indexes.add( self.generator.random.randint(0, len(chars) - 1)) # Replace them with the required characters for i, index in enumerate(random_indexes): chars[index] = required_tokens[i] return ''.join(chars)
python
def unregister_node_path(self, node): """ Unregisters given Node path from the **file_system_events_manager**. :param node: Node. :type node: FileNode or DirectoryNode or ProjectNode :return: Method success. :rtype: bool """ path = node.file if hasattr(node, "file") else node.path path = foundations.strings.to_string(path) return self.unregister_file(path)
java
@Override public void unlockSet(int requestNumber, SIMessageHandle[] msgHandles, boolean reply) // f199593, F219476.2 { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "unlockSet", new Object[] { requestNumber, msgHandles, reply }); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { SibTr.debug(this, tc, "Request to unlock " + msgHandles.length + " message(s)"); if (reply) SibTr.debug(this, tc, "The client is expecting a reply"); } try { bifSession.unlockSet(msgHandles); if (reply) { try { getConversation().send(poolManager.allocate(), JFapChannelConstants.SEG_UNLOCK_SET_R, requestNumber, JFapChannelConstants.PRIORITY_MEDIUM, true, ThrottlingPolicy.BLOCK_THREAD, null); } catch (SIException e) { FFDCFilter.processException(e, CLASS_NAME + ".unlockSet", CommsConstants.CATBIFCONSUMER_UNLOCKSET_01, this); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, e.getMessage(), e); SibTr.error(tc, "COMMUNICATION_ERROR_SICO2033", e); } } } catch (SIException e) { //No FFDC code needed //Only FFDC if we haven't received a meTerminated event. if (!((ConversationState) getConversation().getAttachment()).hasMETerminated()) { FFDCFilter.processException(e, CLASS_NAME + ".unlockSet", CommsConstants.CATBIFCONSUMER_UNLOCKSET_02, this); } if (reply) { StaticCATHelper.sendExceptionToClient(e, CommsConstants.CATBIFCONSUMER_UNLOCKSET_02, getConversation(), requestNumber); } else { SibTr.error(tc, "UNABLE_TO_UNLOCK_MSGS_SICO2032", e); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "unlockSet"); }
python
def _finalize_requires(self): """ Set `metadata.python_requires` and fix environment markers in `install_requires` and `extras_require`. """ if getattr(self, 'python_requires', None): self.metadata.python_requires = self.python_requires if getattr(self, 'extras_require', None): for extra in self.extras_require.keys(): # Since this gets called multiple times at points where the # keys have become 'converted' extras, ensure that we are only # truly adding extras we haven't seen before here. extra = extra.split(':')[0] if extra: self.metadata.provides_extras.add(extra) self._convert_extras_requirements() self._move_install_requirements_markers()
java
public JSON with(PrettyPrinter pp) { if (_prettyPrinter == pp) { return this; } return _with(_features, _streamFactory, _treeCodec, _reader, _writer, pp); }
python
def update_cache_for_course(self, courseid): """ Clean/update the cache of all the tasks for a given course (id) :param courseid: """ to_drop = [] for (cid, tid) in self._cache: if cid == courseid: to_drop.append(tid) for tid in to_drop: del self._cache[(courseid, tid)]
python
def addLocalCacheService(self): "adds a CacheService to the instatiated HendrixService" _cache = self.getCacheService() _cache.setName('cache_proxy') _cache.setServiceParent(self.hendrix)
java
public static <T1, T2> BiConsumer<T1, T2> softenBiConsumer(final CheckedBiConsumer<T1, T2> fn) { return (t1, t2) -> { try { fn.accept(t1, t2); } catch (final Throwable e) { throw throwSoftenedException(e); } }; }
java
@SuppressWarnings("checkstyle:hiddenfield") public <T, U extends T> void register(final Class<T> type, final Set<EventHandler<U>> handlers) { this.handlers.put(type, new ExceptionHandlingEventHandler<>( new BroadCastEventHandler<>(handlers), this.errorHandler)); }
python
def actions( self ): """ Returns a list of actions that are associated with this shortcut edit. :return [<QAction>, ..] """ output = [] for i in range(self.uiActionTREE.topLevelItemCount()): output.append(self.uiActionTREE.topLevelItem(i).action()) return output
java
public static scala.collection.Iterable linkedListToScalaIterable(LinkedList<?> linkedList) { return JavaConverters.asScalaIterableConverter(linkedList).asScala(); }
java
public ReadOnlyStyledDocument<PS, SEG, S> build() { ensureNotYetCreated(); if (paragraphList.isEmpty()) { throw new IllegalStateException("Cannot build a ReadOnlyStyledDocument with an empty list of paragraphs!"); } alreadyCreated = true; return new ReadOnlyStyledDocument<>(paragraphList); }
python
def network_from_array(self, array): """impo Defines a network from an array. Parameters ---------- array : array 3D numpy array. """ if len(array.shape) == 2: array = np.array(array, ndmin=3).transpose([1, 2, 0]) teneto.utils.check_TemporalNetwork_input(array, 'array') uvals = np.unique(array) if len(uvals) == 2 and 1 in uvals and 0 in uvals: i, j, t = np.where(array == 1) self.network = pd.DataFrame(data={'i': i, 'j': j, 't': t}) else: i, j, t = np.where(array != 0) w = array[array != 0] self.network = pd.DataFrame( data={'i': i, 'j': j, 't': t, 'weight': w}) self.N = int(array.shape[0]) self.T = int(array.shape[-1]) self._update_network()
java
public AttackDetail withAttackCounters(SummarizedCounter... attackCounters) { if (this.attackCounters == null) { setAttackCounters(new java.util.ArrayList<SummarizedCounter>(attackCounters.length)); } for (SummarizedCounter ele : attackCounters) { this.attackCounters.add(ele); } return this; }
python
def pad_char(text: str, width: int, char: str = '\n') -> str: """Pads a text until length width.""" dis = width - len(text) if dis < 0: raise ValueError if dis > 0: text += char * dis return text
java
public static <T> T min(final T[] a, final Comparator<? super T> cmp) { N.checkArgNotNullOrEmpty(a, "The spcified array 'a' can not be null or empty"); return min(a, 0, a.length, cmp); }
python
def compute_uncertainty_reward(logits, predictions): """Uncertainty reward based on logits.""" # TODO(rsepassi): Add support for L1/L2 loss models. Current code only # works for softmax models. vocab_size = logits.shape[-1] assert vocab_size > 1 log_probs = common_layers.log_prob_from_logits(logits) max_log_probs = common_layers.index_last_dim_with_indices(log_probs, predictions) # Threshold neg_log_prob = tf.nn.relu(-max_log_probs - 0.02) # Sum across all but the batch dimension reduce_dims = list(range(len(neg_log_prob.shape)))[1:] summed = tf.reduce_sum(neg_log_prob, axis=reduce_dims) return summed / 10