language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def _read_header(self): """Read the header info""" data = np.fromfile(self.filename, dtype=native_header, count=1) self.header.update(recarray2dict(data)) data15hd = self.header['15_DATA_HEADER'] sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] # Set the list of available channels: self.mda['available_channels'] = get_available_channels(self.header) self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() if self.mda['available_channels'][i]] self.platform_id = data15hd[ 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] equator_radius = data15hd['GeometricProcessing'][ 'EarthModel']['EquatorialRadius'] * 1000. north_polar_radius = data15hd[ 'GeometricProcessing']['EarthModel']['NorthPolarRadius'] * 1000. south_polar_radius = data15hd[ 'GeometricProcessing']['EarthModel']['SouthPolarRadius'] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 ssp_lon = data15hd['ImageDescription'][ 'ProjectionDescription']['LongitudeOfSSP'] self.mda['projection_parameters'] = {'a': equator_radius, 'b': polar_radius, 'h': 35785831.00, 'ssp_longitude': ssp_lon} north = int(sec15hd['NorthLineSelectedRectangle']['Value']) east = int(sec15hd['EastColumnSelectedRectangle']['Value']) south = int(sec15hd['SouthLineSelectedRectangle']['Value']) west = int(sec15hd['WestColumnSelectedRectangle']['Value']) ncolumns = west - east + 1 nrows = north - south + 1 # check if the file has less rows or columns than # the maximum, if so it is an area of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): self.mda['is_full_disk'] = False # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file modulo = ncolumns % 4 padding = 0 if modulo > 0: padding = 4 - modulo cols_visir = ncolumns + padding # Check the VISIR calculated column dimension against # the header information cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") logger.warning("Header: %d", cols_visir_hdr) logger.warning("Calculated: = %d", cols_visir) # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) self.mda['number_of_columns'] = cols_visir self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) self.mda['hrv_number_of_columns'] = cols_hrv
java
public void forceDeleteModule(String moduleName) throws Exception { OpenCms.getModuleManager().deleteModule( m_cms, moduleName, true, new CmsShellReport(m_cms.getRequestContext().getLocale())); }
python
def get_product_historic_rates(self, product_id, start=None, end=None, granularity=None): """Historic rates for a product. Rates are returned in grouped buckets based on requested `granularity`. If start, end, and granularity aren't provided, the exchange will assume some (currently unknown) default values. Historical rate data may be incomplete. No data is published for intervals where there are no ticks. **Caution**: Historical rates should not be polled frequently. If you need real-time information, use the trade and book endpoints along with the websocket feed. The maximum number of data points for a single request is 200 candles. If your selection of start/end time and granularity will result in more than 200 data points, your request will be rejected. If you wish to retrieve fine granularity data over a larger time range, you will need to make multiple requests with new start/end ranges. Args: product_id (str): Product start (Optional[str]): Start time in ISO 8601 end (Optional[str]): End time in ISO 8601 granularity (Optional[int]): Desired time slice in seconds Returns: list: Historic candle data. Example: [ [ time, low, high, open, close, volume ], [ 1415398768, 0.32, 4.2, 0.35, 4.2, 12.3 ], ... ] """ params = {} if start is not None: params['start'] = start if end is not None: params['end'] = end if granularity is not None: acceptedGrans = [60, 300, 900, 3600, 21600, 86400] if granularity not in acceptedGrans: raise ValueError( 'Specified granularity is {}, must be in approved values: {}'.format( granularity, acceptedGrans) ) params['granularity'] = granularity return self._send_message('get', '/products/{}/candles'.format(product_id), params=params)
java
public static <E extends Enum<E>> StreamableEnumSet<E> of (E first, E... rest) { StreamableEnumSet<E> set = new StreamableEnumSet<E>(first.getDeclaringClass()); set.add(first); for (E e : rest) { set.add(e); } return set; }
java
public static DictionaryMaker combineWhenNotInclude(String[] pathArray) { DictionaryMaker dictionaryMaker = new DictionaryMaker(); logger.info("正在处理主词典" + pathArray[0]); dictionaryMaker.addAll(DictionaryMaker.loadAsItemList(pathArray[0])); for (int i = 1; i < pathArray.length; ++i) { logger.info("正在处理副词典" + pathArray[i] + ",并且过滤已有词典"); dictionaryMaker.addAllNotCombine(DictionaryMaker.normalizeFrequency(DictionaryMaker.loadAsItemList(pathArray[i]))); } return dictionaryMaker; }
java
public static void removeConnectionData(Profile profile, String providerId) { Map<String, List<Map<String, Object>>> allConnections = profile.getAttribute(CONNECTIONS_ATTRIBUTE_NAME); if (MapUtils.isNotEmpty(allConnections)) { allConnections.remove(providerId); } }
python
def reset(self): """Reset accumulated components and metric values""" if self.parallel: from pyannote.metrics import manager_ self.accumulated_ = manager_.dict() self.results_ = manager_.list() self.uris_ = manager_.dict() else: self.accumulated_ = dict() self.results_ = list() self.uris_ = dict() for value in self.components_: self.accumulated_[value] = 0.
java
public Duration getWork(Date startDate, Date endDate, TimeUnit format) { DateRange range = new DateRange(startDate, endDate); Long cachedResult = m_workingDateCache.get(range); long totalTime = 0; if (cachedResult == null) { // // We want the start date to be the earliest date, and the end date // to be the latest date. Set a flag here to indicate if we have swapped // the order of the supplied date. // boolean invert = false; if (startDate.getTime() > endDate.getTime()) { invert = true; Date temp = startDate; startDate = endDate; endDate = temp; } Date canonicalStartDate = DateHelper.getDayStartDate(startDate); Date canonicalEndDate = DateHelper.getDayStartDate(endDate); if (canonicalStartDate.getTime() == canonicalEndDate.getTime()) { ProjectCalendarDateRanges ranges = getRanges(startDate, null, null); if (ranges.getRangeCount() != 0) { totalTime = getTotalTime(ranges, startDate, endDate); } } else { // // Find the first working day in the range // Date currentDate = startDate; Calendar cal = Calendar.getInstance(); cal.setTime(startDate); Day day = Day.getInstance(cal.get(Calendar.DAY_OF_WEEK)); while (isWorkingDate(currentDate, day) == false && currentDate.getTime() < canonicalEndDate.getTime()) { cal.add(Calendar.DAY_OF_YEAR, 1); currentDate = cal.getTime(); day = day.getNextDay(); } if (currentDate.getTime() < canonicalEndDate.getTime()) { // // Calculate the amount of working time for this day // totalTime += getTotalTime(getRanges(currentDate, null, day), currentDate, true); // // Process each working day until we reach the last day // while (true) { cal.add(Calendar.DAY_OF_YEAR, 1); currentDate = cal.getTime(); day = day.getNextDay(); // // We have reached the last day // if (currentDate.getTime() >= canonicalEndDate.getTime()) { break; } // // Skip this day if it has no working time // ProjectCalendarDateRanges ranges = getRanges(currentDate, null, day); if (ranges.getRangeCount() == 0) { continue; } // // Add the working time for the whole day // totalTime += getTotalTime(ranges); } } // // We are now at the last day // ProjectCalendarDateRanges ranges = getRanges(endDate, null, day); if (ranges.getRangeCount() != 0) { totalTime += getTotalTime(ranges, DateHelper.getDayStartDate(endDate), endDate); } } if (invert) { totalTime = -totalTime; } m_workingDateCache.put(range, Long.valueOf(totalTime)); } else { totalTime = cachedResult.longValue(); } return convertFormat(totalTime, format); }
python
async def get_identity_document(client: Client, current_block: dict, pubkey: str) -> Identity: """ Get the identity document of the pubkey :param client: Client to connect to the api :param current_block: Current block data :param pubkey: UID/Public key :rtype: Identity """ # Here we request for the path wot/lookup/pubkey lookup_data = await client(bma.wot.lookup, pubkey) # init vars uid = None timestamp = BlockUID.empty() signature = None # parse results for result in lookup_data['results']: if result["pubkey"] == pubkey: uids = result['uids'] for uid_data in uids: # capture data timestamp = BlockUID.from_str(uid_data["meta"]["timestamp"]) uid = uid_data["uid"] signature = uid_data["self"] # return self-certification document return Identity( version=10, currency=current_block['currency'], pubkey=pubkey, uid=uid, ts=timestamp, signature=signature )
python
def detectAndroid(self): """Return detection of an Android device Detects *any* Android OS-based device: phone, tablet, and multi-media player. Also detects Google TV. """ if UAgentInfo.deviceAndroid in self.__userAgent \ or self.detectGoogleTV(): return True return False
python
def from_region(region, mesh_in, save_edges=False, save_faces=False, localize=False, is_surface=False): """ Create a mesh corresponding to a given region. """ mesh = Mesh( mesh_in.name + "_reg" ) mesh.coors = mesh_in.coors.copy() mesh.ngroups = mesh_in.ngroups.copy() mesh.conns = [] mesh.descs = [] mesh.mat_ids = [] if not is_surface: if region.has_cells(): for ig in region.igs: mesh.descs.append( mesh_in.descs[ig] ) els = region.get_cells( ig ) mesh.mat_ids.append( mesh_in.mat_ids[ig][els,:].copy() ) mesh.conns.append( mesh_in.conns[ig][els,:].copy() ) if save_edges: ed = region.domain.ed for ig in region.igs: edges = region.get_edges( ig ) mesh.descs.append( '1_2' ) mesh.mat_ids.append( ed.data[edges,0] + 1 ) mesh.conns.append( ed.data[edges,-2:].copy() ) if save_faces: mesh._append_region_faces(region) if save_edges or save_faces: mesh.descs.append( {2 : '2_3', 3 : '3_4'}[mesh_in.dim] ) mesh.mat_ids.append( -nm.ones_like( region.all_vertices ) ) mesh.conns.append(make_point_cells(region.all_vertices, mesh_in.dim)) else: mesh._append_region_faces(region, force_faces=True) mesh._set_shape_info() if localize: mesh.localize( region.all_vertices ) return mesh
java
@Override public void channelHandlerContext(ChannelHandlerContext ctx) throws Http2Exception { this.ctx = checkNotNull(ctx, "ctx"); // Writing the pending bytes will not check writability change and instead a writability change notification // to be provided by an explicit call. channelWritabilityChanged(); // Don't worry about cleaning up queued frames here if ctx is null. It is expected that all streams will be // closed and the queue cleanup will occur when the stream state transitions occur. // If any frames have been queued up, we should send them now that we have a channel context. if (isChannelWritable()) { writePendingBytes(); } }
python
def add_configuration(options): """ interactively add a new configuration """ if options.username != None: username = options.username else: username = prompt('Username: ') if options.password != None: password = options.password else: password = prompt('Password: ', hide_input=not options.show_password) if options.app_url != None: app_url = options.app_url else: app_url = prompt('App URL (default: https://app.jut.io just hit enter): ') if app_url.strip() == '': app_url = 'https://app.jut.io' section = '%s@%s' % (username, app_url) if config.exists(section): raise JutException('Configuration for "%s" already exists' % section) token_manager = auth.TokenManager(username=username, password=password, app_url=app_url) authorization = authorizations.get_authorization(token_manager, app_url=app_url) client_id = authorization['client_id'] client_secret = authorization['client_secret'] deployment_name = default_deployment(app_url, client_id, client_secret) config.add(section, **{ 'app_url': app_url, 'deployment_name': deployment_name, 'username': username, 'client_id': client_id, 'client_secret': client_secret }) if options.default: config.set_default(name=section) else: default_configuration(interactive=False)
java
private Map<String, Object> toMap(CompositeData value) { Map<String, Object> data = new HashMap<String, Object>(); for(String key : value.getCompositeType().keySet()) { data.put(key, value.get(key)); } return data; }
java
private String getVersion(MigrationModel migrationModel) { String version = migrationConfig.getVersion(); if (version == null) { version = migrationModel.getNextVersion(initialVersion); } return version; }
python
def add_conversion_step(self, converter: Converter[S, T], inplace: bool = False): """ Utility method to add a converter to this chain. If inplace is True, this object is modified and None is returned. Otherwise, a copy is returned :param converter: the converter to add :param inplace: boolean indicating whether to modify this object (True) or return a copy (False) :return: None or a copy with the converter added """ # it the current chain is generic, raise an error if self.is_generic() and converter.is_generic(): raise ValueError('Cannot chain this generic converter chain to the provided converter : it is generic too!') # if the current chain is able to transform its input into a valid input for the new converter elif converter.can_be_appended_to(self, self.strict): if inplace: self._converters_list.append(converter) # update the current destination type self.to_type = converter.to_type return else: new = copy(self) new._converters_list.append(converter) # update the current destination type new.to_type = converter.to_type return new else: raise TypeError('Cannnot register a converter on this conversion chain : source type \'' + get_pretty_type_str(converter.from_type) + '\' is not compliant with current destination type of the chain : \'' + get_pretty_type_str(self.to_type) + ' (this chain performs ' + ('' if self.strict else 'non-') + 'strict mode matching)')
python
def exit_enable_mode(self, exit_command=""): """Exit enable mode. :param exit_command: Command that exits the session from privileged mode :type exit_command: str """ output = "" if self.check_enable_mode(): self.write_channel(self.normalize_cmd(exit_command)) output += self.read_until_prompt() if self.check_enable_mode(): raise ValueError("Failed to exit enable mode.") return output
python
def convertVariable(self, key, varName, varValue): """Puts the function in the globals() of the main module.""" if isinstance(varValue, encapsulation.FunctionEncapsulation): result = varValue.getFunction() # Update the global scope of the function to match the current module mainModule = sys.modules["__main__"] result.__name__ = varName result.__globals__.update(mainModule.__dict__) setattr(mainModule, varName, result) shared.elements[key].update({ varName: result, })
python
def _gotitem(self, key, ndim, subset=None): """ Sub-classes to define. Return a sliced object. Parameters ---------- key : string / list of selections ndim : 1,2 requested ndim of result subset : object, default None subset to act on """ # create a new object to prevent aliasing if subset is None: subset = self.obj # we need to make a shallow copy of ourselves # with the same groupby kwargs = {attr: getattr(self, attr) for attr in self._attributes} # Try to select from a DataFrame, falling back to a Series try: groupby = self._groupby[key] except IndexError: groupby = self._groupby self = self.__class__(subset, groupby=groupby, parent=self, **kwargs) self._reset_cache() if subset.ndim == 2: if is_scalar(key) and key in subset or is_list_like(key): self._selection = key return self
python
def xmlprint(element): """ pretty prints an ElementTree (or an Element of it), or the XML representation of a SaltDocument (or an element thereof, e.g. a node, edge, layer etc.) """ if isinstance(element, (etree._Element, etree._ElementTree)): print etree.tostring(element, pretty_print=True) else: if hasattr(element, 'xml'): print etree.tostring(element.xml, pretty_print=True)
python
def _check_overlap(self, fragment): """ Check that the interval of the given fragment does not overlap any existing interval in the list (except at its boundaries). Raises an error if not OK. """ # # NOTE bisect does not work if there is a configuration like: # # *********** <- existing interval # *** <- query interval # # TODO one should probably check this by doing bisect # over the begin and end lists separately # for existing_fragment in self.fragments: if existing_fragment.interval.relative_position_of(fragment.interval) not in self.ALLOWED_POSITIONS: self.log_exc(u"interval overlaps another already present interval", None, True, ValueError)
python
def get_color(self, value): """Helper method to validate and map values used in the instantiation of of the Color object to the correct unicode value. """ if value in COLOR_SET: value = COLOR_MAP[value] else: try: value = int(value) if value >= 8: raise ValueError() except ValueError as exc: raise ValueError( 'Colors should either a member of: {} or a positive ' 'integer below 8'.format(', '.join(COLOR_NAMES)) ) return '{}{}'.format(self.PREFIX, value)
java
@Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case AfplibPackage.MEDIUM_ORIENTATION__MED_ORIENT: return getMedOrient(); } return super.eGet(featureID, resolve, coreType); }
python
def connect(self, server): "Connects to a server and return a connection id." if 'connections' not in session: session['connections'] = {} session.save() conns = session['connections'] id = str(len(conns)) conn = Connection(server) conns[id] = conn yield request.environ['cogen.core'].events.AddCoro(conn.pull) yield id
java
public static <K, V> V first( NavigableMap<K, V> map ) { return map.firstEntry().getValue(); }
java
public static <T> T handleExceptionAndThrowRuntimeEx(Logger log, Throwable throwable, String method, Object... params) { handleException(log, throwable, method, params); throw new RuntimeException(throwable); }
java
private void add2MBR(int[] entrySorting, List<Heap<DoubleIntPair>> pqUB, List<Heap<DoubleIntPair>> pqLB, int index) { SpatialComparable currMBR = node.getEntry(entrySorting[index]); for(int d = 0; d < currMBR.getDimensionality(); d++) { double max = currMBR.getMax(d); pqUB.get(d).add(new DoubleIntPair(max, index)); double min = currMBR.getMin(d); pqLB.get(d).add(new DoubleIntPair(min, index)); } }
java
public Collection<? extends CRL> engineGetCRLs(CRLSelector selector) throws CertStoreException { if (selector != null && !(selector instanceof X509CRLSelector)) { throw new IllegalArgumentException(); } if (crlDelegate.getCollection() == null) { return new Vector<X509CRL>(); } // Given that we always only use subject, how can we improve performance // here. Custom if (selector == null) { return crlDelegate.getCollection(); } else { Vector<X509CRL> certSet = new Vector<X509CRL>(); for (X509CRL crl : crlDelegate.getCollection()) { if (selector.match(crl)) { certSet.add(crl); } } return certSet; } }
java
public boolean addEventExecution(EventExecution eventExecution) { boolean added = executionDAO.addEventExecution(eventExecution); if (added) { indexDAO.addEventExecution(eventExecution); } return added; }
java
public List<DynamoDBMapper.FailedBatch> batchSave(Iterable<T> objectsToSave) { return mapper.batchWrite(objectsToSave, (Iterable<T>)Collections.<T>emptyList()); }
python
def generate_supplied_diff_subparser(subparsers): """Adds a sub-command parser to `subparsers` to run a diff query using the supplied results sets.""" parser = subparsers.add_parser( 'sdiff', description=constants.SUPPLIED_DIFF_DESCRIPTION, epilog=constants.SUPPLIED_DIFF_EPILOG, formatter_class=ParagraphFormatter, help=constants.SUPPLIED_DIFF_HELP) parser.set_defaults(func=supplied_diff) utils.add_common_arguments(parser) utils.add_tokenizer_argument(parser) utils.add_db_arguments(parser, True) utils.add_supplied_query_arguments(parser)
java
public Observable<ServiceResponse<DatabaseVulnerabilityAssessmentRuleBaselineInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String managedInstanceName, String databaseName, String ruleId, VulnerabilityAssessmentPolicyBaselineName baselineName, List<DatabaseVulnerabilityAssessmentRuleBaselineItem> baselineResults) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (managedInstanceName == null) { throw new IllegalArgumentException("Parameter managedInstanceName is required and cannot be null."); } if (databaseName == null) { throw new IllegalArgumentException("Parameter databaseName is required and cannot be null."); } if (ruleId == null) { throw new IllegalArgumentException("Parameter ruleId is required and cannot be null."); } if (baselineName == null) { throw new IllegalArgumentException("Parameter baselineName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } if (baselineResults == null) { throw new IllegalArgumentException("Parameter baselineResults is required and cannot be null."); } Validator.validate(baselineResults); final String vulnerabilityAssessmentName = "default"; DatabaseVulnerabilityAssessmentRuleBaselineInner parameters = new DatabaseVulnerabilityAssessmentRuleBaselineInner(); parameters.withBaselineResults(baselineResults); return service.createOrUpdate(resourceGroupName, managedInstanceName, databaseName, vulnerabilityAssessmentName, ruleId, baselineName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), parameters, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<DatabaseVulnerabilityAssessmentRuleBaselineInner>>>() { @Override public Observable<ServiceResponse<DatabaseVulnerabilityAssessmentRuleBaselineInner>> call(Response<ResponseBody> response) { try { ServiceResponse<DatabaseVulnerabilityAssessmentRuleBaselineInner> clientResponse = createOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); }
python
def script(self, **kwargs): """ Define update action to take: https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html for more details. Note: the API only accepts a single script, so calling the script multiple times will overwrite. Example:: ubq = Search() ubq = ubq.script(source="ctx._source.likes++"") ubq = ubq.script(source="ctx._source.likes += params.f"", lang="expression", params={'f': 3}) """ ubq = self._clone() if ubq._script: ubq._script = {} ubq._script.update(kwargs) return ubq
python
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'debug') and self.debug is not None: _dict['debug'] = self.debug if hasattr(self, 'restart') and self.restart is not None: _dict['restart'] = self.restart if hasattr(self, 'alternate_intents') and self.alternate_intents is not None: _dict['alternate_intents'] = self.alternate_intents if hasattr(self, 'return_context') and self.return_context is not None: _dict['return_context'] = self.return_context return _dict
python
def check_backend() -> bool: """Check if the backend is available.""" try: import bluepy.btle # noqa: F401 #pylint: disable=unused-import return True except ImportError as importerror: _LOGGER.error('bluepy not found: %s', str(importerror)) return False
java
private Future<Channel> acquireHealthyFromPoolOrNew(final Promise<Channel> promise) { try { final Channel ch = pollChannel(); if (ch == null) { // No Channel left in the pool bootstrap a new Channel Bootstrap bs = bootstrap.clone(); bs.attr(POOL_KEY, this); ChannelFuture f = connectChannel(bs); if (f.isDone()) { notifyConnect(f, promise); } else { f.addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { notifyConnect(future, promise); } }); } return promise; } EventLoop loop = ch.eventLoop(); if (loop.inEventLoop()) { doHealthCheck(ch, promise); } else { loop.execute(new Runnable() { @Override public void run() { doHealthCheck(ch, promise); } }); } } catch (Throwable cause) { promise.tryFailure(cause); } return promise; }
java
public final void synpred12_DRL6Expressions_fragment() throws RecognitionException { // src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:438:8: ( squareArguments shiftExpression ) // src/main/resources/org/drools/compiler/lang/DRL6Expressions.g:438:9: squareArguments shiftExpression { pushFollow(FOLLOW_squareArguments_in_synpred12_DRL6Expressions2066); squareArguments(); state._fsp--; if (state.failed) return; pushFollow(FOLLOW_shiftExpression_in_synpred12_DRL6Expressions2068); shiftExpression(); state._fsp--; if (state.failed) return; } }
python
def _format_json(data, theme): """Pretty print a dict as a JSON, with colors if pygments is present.""" output = json.dumps(data, indent=2, sort_keys=True) if pygments and sys.stdout.isatty(): style = get_style_by_name(theme) formatter = Terminal256Formatter(style=style) return pygments.highlight(output, JsonLexer(), formatter) return output
python
def shape(self) -> Tuple[int, ...]: """Required shape of |NetCDFVariableDeep.array|. For the default configuration, the first axis corresponds to the number of devices, and the second one to the number of timesteps. We show this for the 0-dimensional input sequence |lland_inputs.Nied|: >>> from hydpy.core.examples import prepare_io_example_1 >>> nodes, elements = prepare_io_example_1() >>> from hydpy.core.netcdftools import NetCDFVariableDeep >>> ncvar = NetCDFVariableDeep('input_nied', isolate=False, timeaxis=1) >>> for element in elements: ... ncvar.log(element.model.sequences.inputs.nied, None) >>> ncvar.shape (3, 4) For higher dimensional sequences, each new entry corresponds to the maximum number of fields the respective sequences require. In the next example, we select the 1-dimensional sequence |lland_fluxes.NKor|. The maximum number 3 (last value of the returned |tuple|) is due to the third element defining three hydrological response units: >>> ncvar = NetCDFVariableDeep('flux_nkor', isolate=False, timeaxis=1) >>> for element in elements: ... ncvar.log(element.model.sequences.fluxes.nkor, None) >>> ncvar.shape (3, 4, 3) When using the first axis for time (`timeaxis=0`) the order of the first two |tuple| entries turns: >>> ncvar = NetCDFVariableDeep('flux_nkor', isolate=False, timeaxis=0) >>> for element in elements: ... ncvar.log(element.model.sequences.fluxes.nkor, None) >>> ncvar.shape (4, 3, 3) """ nmb_place = len(self.sequences) nmb_time = len(hydpy.pub.timegrids.init) nmb_others = collections.deque() for sequence in self.sequences.values(): nmb_others.append(sequence.shape) nmb_others_max = tuple(numpy.max(nmb_others, axis=0)) return self.sort_timeplaceentries(nmb_time, nmb_place) + nmb_others_max
java
public DirectionsApiRequest waypointsFromPlaceIds(String... waypoints) { Waypoint[] objWaypoints = new Waypoint[waypoints.length]; for (int i = 0; i < waypoints.length; i++) { objWaypoints[i] = new Waypoint(prefixPlaceId(waypoints[i])); } return waypoints(objWaypoints); }
python
def createMemoryParserCtxt(buffer, size): """Create a parser context for an XML in-memory document. """ ret = libxml2mod.xmlCreateMemoryParserCtxt(buffer, size) if ret is None:raise parserError('xmlCreateMemoryParserCtxt() failed') return parserCtxt(_obj=ret)
java
@GuardedBy("evictionLock") boolean admit(K candidateKey, K victimKey) { int victimFreq = frequencySketch().frequency(victimKey); int candidateFreq = frequencySketch().frequency(candidateKey); if (candidateFreq > victimFreq) { return true; } else if (candidateFreq <= 5) { // The maximum frequency is 15 and halved to 7 after a reset to age the history. An attack // exploits that a hot candidate is rejected in favor of a hot victim. The threshold of a warm // candidate reduces the number of random acceptances to minimize the impact on the hit rate. return false; } int random = ThreadLocalRandom.current().nextInt(); return ((random & 127) == 0); }
python
def add_device(self, model, serial): """ Returns 'device object' of newly created device. http://docs.exosite.com/portals/#create-device http://docs.exosite.com/portals/#device-object """ device = { 'model': model, 'vendor': self.vendor(), 'sn': serial, 'type': 'vendor' } headers = { 'User-Agent': self.user_agent(), } headers.update(self.headers()) r = requests.post( self.portals_url()+'/portals/'+self.portal_id()+'/devices', data=json.dumps(device), headers=headers, auth=self.auth()) if HTTP_STATUS.ADDED == r.status_code: # fix the 'meta' to be dictionary instead of string device_obj = r.json() return dictify_device_meta(device_obj) else: print("add_device: Something went wrong: <{0}>: {1}".format( r.status_code, r.reason)) r.raise_for_status()
python
def add_context(request): """ Add variables to all dictionaries passed to templates. """ # Whether the user has president privileges try: PRESIDENT = Manager.objects.filter( incumbent__user=request.user, president=True, ).count() > 0 except TypeError: PRESIDENT = False # If the user is logged in as an anymous user if request.user.username == ANONYMOUS_USERNAME: request.session["ANONYMOUS_SESSION"] = True ANONYMOUS_SESSION = request.session.get("ANONYMOUS_SESSION", False) # A list with items of form (RequestType, number_of_open_requests) request_types = list() if request.user.is_authenticated(): for request_type in RequestType.objects.filter(enabled=True): requests = Request.objects.filter( request_type=request_type, status=Request.OPEN, ) if not request_type.managers.filter(incumbent__user=request.user): requests = requests.exclude( ~Q(owner__user=request.user), private=True, ) request_types.append((request_type, requests.count())) profile_requests_count = ProfileRequest.objects.all().count() admin_unread_count = profile_requests_count return { "REQUEST_TYPES": request_types, "HOUSE": settings.HOUSE_NAME, "ANONYMOUS_USERNAME": ANONYMOUS_USERNAME, "SHORT_HOUSE": settings.SHORT_HOUSE_NAME, "ADMIN": settings.ADMINS[0], "NUM_OF_PROFILE_REQUESTS": profile_requests_count, "ADMIN_UNREAD_COUNT": admin_unread_count, "ANONYMOUS_SESSION": ANONYMOUS_SESSION, "PRESIDENT": PRESIDENT, }
python
def _set_upload_url(self): """Generate the full URL for a POST.""" # pylint: disable=protected-access self._upload_url = "/".join( [self.jss._url, self._url, self.resource_type, self.id_type, str(self._id)])
java
public static StorageVol findVolume(Connect connection, String path) throws LibvirtException { log.debug("Looking up StorageVolume for path '{}'", path); for (String s : connection.listStoragePools()) { StoragePool sp = connection.storagePoolLookupByName(s); for (String v : sp.listVolumes()) { StorageVol vol = sp.storageVolLookupByName(v); if (vol.getPath().equals(path)) { log.debug("Found volume '{}' for path '{}'", vol.getName(), path); return vol; } } } throw new LibvirtRuntimeException("no volume found for path " + path); }
python
def csv_to_list(csv_file): """ Open and transform a CSV file into a matrix (list of lists). .. code:: python reusables.csv_to_list("example.csv") # [['Name', 'Location'], # ['Chris', 'South Pole'], # ['Harry', 'Depth of Winter'], # ['Bob', 'Skull']] :param csv_file: Path to CSV file as str :return: list """ with open(csv_file, 'r' if PY3 else 'rb') as f: return list(csv.reader(f))
python
def eventgroup_delete(self, event_group_id="0.0.0", account=None, **kwargs): """ Delete an eventgroup. This needs to be **propose**. :param str event_group_id: ID of the event group to be deleted :param str account: (optional) Account used to verify the operation""" if not account: if "default_account" in config: account = config["default_account"] if not account: raise ValueError("You need to provide an Account") account = Account(account) eventgroup = EventGroup(event_group_id) op = operations.Event_group_delete( **{ "fee": {"amount": 0, "asset_id": "1.3.0"}, "event_group_id": eventgroup["id"], "prefix": self.prefix, } ) return self.finalizeOp(op, account["name"], "active", **kwargs)
java
public static boolean validOptions(String[][] options, DocErrorReporter reporter) { return SARL_DOCLET.configuration.validOptions(options, reporter); }
java
public IndexFacesResult withFaceRecords(FaceRecord... faceRecords) { if (this.faceRecords == null) { setFaceRecords(new java.util.ArrayList<FaceRecord>(faceRecords.length)); } for (FaceRecord ele : faceRecords) { this.faceRecords.add(ele); } return this; }
java
public CreateApiKeyRequest withStageKeys(StageKey... stageKeys) { if (this.stageKeys == null) { setStageKeys(new java.util.ArrayList<StageKey>(stageKeys.length)); } for (StageKey ele : stageKeys) { this.stageKeys.add(ele); } return this; }
java
public Iterable<Di18n> queryByUpdatedDate(java.util.Date updatedDate) { return queryByField(null, Di18nMapper.Field.UPDATEDDATE.getFieldName(), updatedDate); }
java
protected void updateText(PageElement pageElement, String textOrKey, CharSequence keysToSend, Object... args) throws TechnicalException, FailureException { String value = getTextOrKey(textOrKey); if (!"".equals(value)) { try { final WebElement element = Context.waitUntil(ExpectedConditions.elementToBeClickable(Utilities.getLocator(pageElement, args))); element.clear(); if (DriverFactory.IE.equals(Context.getBrowser())) { final String javascript = "arguments[0].value='" + value + "';"; ((JavascriptExecutor) getDriver()).executeScript(javascript, element); } else { element.sendKeys(value); } if (keysToSend != null) { element.sendKeys(keysToSend); } } catch (final Exception e) { new Result.Failure<>(e.getMessage(), Messages.format(Messages.getMessage(Messages.FAIL_MESSAGE_ERROR_ON_INPUT), pageElement, pageElement.getPage().getApplication()), true, pageElement.getPage().getCallBack()); } } else { logger.debug("Empty data provided. No need to update text. If you want clear data, you need use: \"I clear text in ...\""); } }
java
@Deprecated public static JpaModule newServerModule(EntityManagerFactory emFactory, EntityManager em, TransactionRunner transactionRunner) { JpaModuleConfig config = new JpaModuleConfig(); config.exposeAllEntities(emFactory); return new JpaModule(config, emFactory, () -> em, transactionRunner); }
java
static LHS getLHSObjectField( Object object, String fieldName ) throws UtilEvalError, ReflectError { if ( object instanceof This ) return new LHS( ((This)object).namespace, fieldName, false ); try { Invocable f = resolveExpectedJavaField( object.getClass(), fieldName, false/*staticOnly*/ ); return new LHS(object, f); } catch ( ReflectError e ) { NameSpace ns = getThisNS(object); if (isGeneratedClass(object.getClass()) && null != ns && ns.isClass) { Variable var = ns.getVariableImpl(fieldName, true); if ( null != var && (!var.hasModifier("private") || haveAccessibility()) ) return new LHS(ns, fieldName); } // not a field, try property access if ( hasObjectPropertySetter( object.getClass(), fieldName ) ) return new LHS( object, fieldName ); else throw e; } }
python
def set_logscale(self,t=True): """ - set_logscale(): If M is the matrix of the image, it defines the image M as log10(M+1). """ if(t == self.get_logscale()): return else: if(t): self.__image = np.log10(self.__image+1) self.__logscale_flag = True; else: self.__image = 10**self.__image-1. self.__logscale_flag = False;
python
def _set_bearer_user_vars_local(token, allowed_client_ids, scopes): """Validate the oauth bearer token on the dev server. Since the functions in the oauth module return only example results in local development, this hits the tokeninfo endpoint and attempts to validate the token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we can get the user from the token. Args: token: String with the oauth token to validate. allowed_client_ids: List of client IDs that are acceptable. scopes: List of acceptable scopes. """ # Get token info from the tokeninfo endpoint. result = urlfetch.fetch( '%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token}))) if result.status_code != 200: try: error_description = json.loads(result.content)['error_description'] except (ValueError, KeyError): error_description = '' _logger.error('Token info endpoint returned status %s: %s', result.status_code, error_description) return token_info = json.loads(result.content) # Validate email. if 'email' not in token_info: _logger.warning('Oauth token doesn\'t include an email address.') return if token_info.get('email_verified') != 'true': _logger.warning('Oauth token email isn\'t verified.') return # Validate client ID. client_id = token_info.get('azp') if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and client_id not in allowed_client_ids): _logger.warning('Client ID is not allowed: %s', client_id) return # Verify at least one of the scopes matches. _, sufficient_scopes = _process_scopes(scopes) authorized_scopes = token_info.get('scope', '').split(' ') if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes): _logger.warning('Oauth token scopes don\'t match any acceptable scopes.') return os.environ[_ENV_AUTH_EMAIL] = token_info['email'] os.environ[_ENV_AUTH_DOMAIN] = '' _logger.debug('Local dev returning user from token.')
java
public void marshall(AddIpRoutesRequest addIpRoutesRequest, ProtocolMarshaller protocolMarshaller) { if (addIpRoutesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(addIpRoutesRequest.getDirectoryId(), DIRECTORYID_BINDING); protocolMarshaller.marshall(addIpRoutesRequest.getIpRoutes(), IPROUTES_BINDING); protocolMarshaller.marshall(addIpRoutesRequest.getUpdateSecurityGroupForDirectoryControllers(), UPDATESECURITYGROUPFORDIRECTORYCONTROLLERS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
protected final void setRequiresCheckpoint() { final String methodName = "setRequiresCheckpoint"; if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled()) trace.entry(this, cclass, methodName, new Object[] { new Boolean(requiresPersistentCheckpoint) }); // The states for which a checkpoint log Record must be written to the log unless this transactions ends first. // Has any logging been done? If The transaction enters one of thse states after this // call then all of its state will be in the log after CheckpointStart. final boolean checkpointRequired[] = { false , false , false , true // ActivePersistent. , false , false , true // PrePreparedPersistent. , false , false , true // PreparedPersistent. , false , false , true // CommitingPersistent. Not needed because of synchronize in commit. , false , false , true // BackingOutPersistent. Not needed because of synchronize in commit. , false }; requiresPersistentCheckpoint = checkpointRequired[state]; if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled()) trace.exit(this, cclass, methodName, new Object[] { new Boolean(requiresPersistentCheckpoint) }); }
python
def template(key, books=None, optional=False): """Starts a Pretty Tensor graph template. ## Template Mode Templates allow you to define a graph with some unknown values. The most common use case is to leave the input undefined and then define a graph normally. The variables are only defined once the first time the graph is constructed. For example: template = (pretty_tensor.template('input') .fully_connected(200, name='l1') .fully_connected(200, name='l2')) train_output = template.construct(input=train_data) # All parameters are reused when the same template object is called again. test_output = template.construct(input=test_data) Any argument to a pretty tensor method can be substituted by using an `UnboundVariable`. This allows you to parameterize a graph in arbitrary ways. The most cannonical usage would be to substitute a phase variable. with pretty_tensor.defaults_scope(phase=UnboundVariable('train')): # dropout uses train to optionaly disable itself. template = (pretty_tensor.template('input') .fully_connected(200, name='l1') .fully_connected(200, name='l2') .dropout(.8)) train_output = template.construct(input=train_data, train=True) test_output = template.construct(input=test_data, train=False) You should use caution because if a template is called with incompatible values (e.g. train and test using different widths), then it will break. This is because we guarantee variable reuse across instantiations. template = (pretty_tensor.template('input') .fully_connected(200, name='l1') .fully_connected( pretty_tensor.UnboundVariable('width'), name='l2')) train_output = template.construct(input=train_data, width=200) # The following line will die because the shared parameter is the wrong # size. test_output = template.construct(input=test_data, width=100) A Layer in the resulting graph can be realized by calling `bind(key=value)` and then `construct`. Args: key: A key for this template, used for assigning the correct substitution. books: The bookkeeper. optional: If this template is an optional value. Returns: A template that can be constructed or attached to other layers and that guarantees parameter reuse when constructed/attached multiple times. """ if books is None: books = bookkeeper.for_default_graph() def set_input_from_unbound_var(data): """Sets the input from the given unbound_var.""" if data is not None: return wrap(data, books) else: return None if optional: data = UnboundVariable(key=key, default=None) else: data = UnboundVariable(key=key) return _DeferredLayer(books, set_input_from_unbound_var, [data], {})
java
public String getAttribute(final DirContext ctx, final String dn, final String attributeName) throws NamingException { final Attributes attributes = ctx.getAttributes(dn); return getAttribute(attributes, attributeName); }
python
def hplogistic(self, data: ['SASdata', str] = None, by: str = None, cls: [str, list] = None, code: str = None, freq: str = None, id: str = None, model: str = None, out: [str, bool, 'SASdata'] = None, partition: str = None, score: [str, bool, 'SASdata'] = True, selection: str = None, weight: str = None, procopts: str = None, stmtpassthrough: str = None, **kwargs: dict) -> 'SASresults': """ Python method to call the HPLOGISTIC procedure Documentation link. https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=stathpug&docsetTarget=stathpug_hplogistic_toc.htm&locale=en :param data: SASdata object or string. This parameter is required. :parm by: The by variable can only be a string type. :parm cls: The cls variable can be a string or list type. It refers to the categorical, or nominal variables. :parm code: The code variable can only be a string type. :parm freq: The freq variable can only be a string type. :parm id: The id variable can only be a string type. :parm model: The model variable can only be a string type. :parm out: The out variable can be a string, boolean or SASdata type. The member name for a boolean is "_output". :parm partition: The partition variable can only be a string type. :parm score: The score variable can only be a string type. :parm selection: The selection variable can only be a string type. :parm weight: The weight variable can only be a string type. :parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type. :parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type. :return: SAS Result Object """
java
SubscriptionMessageHandler removeRemoteSubscription( SIBUuid12 topicSpace, String topic, SubscriptionMessageHandler messageHandler, boolean sendProxy) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "removeRemoteSubscription", new Object[] { topicSpace, topic, messageHandler}); messageHandler = removeSubscription(topicSpace, topic, messageHandler, iRemoteSubscriptions, sendProxy); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "removeRemoteSubscription", messageHandler); return messageHandler; }
java
public FleetLaunchTemplateConfig withOverrides(FleetLaunchTemplateOverrides... overrides) { if (this.overrides == null) { setOverrides(new com.amazonaws.internal.SdkInternalList<FleetLaunchTemplateOverrides>(overrides.length)); } for (FleetLaunchTemplateOverrides ele : overrides) { this.overrides.add(ele); } return this; }
python
def api_handler(input_data, cloud, api, url_params=None, batch_size=None, **kwargs): """ Sends finalized request data to ML server and receives response. If a batch_size is specified, breaks down a request into smaller component requests and aggregates the results. """ url_params = url_params or {} input_data = standardize_input_data(input_data) cloud = cloud or config.cloud host = "%s.indico.domains" % cloud if cloud else config.host # LOCAL DEPLOYMENTS if not (host.endswith('indico.domains') or host.endswith('indico.io')): url_protocol = "http" else: url_protocol = config.url_protocol headers = dict(JSON_HEADERS) headers["X-ApiKey"] = url_params.get("api_key") or config.api_key url = create_url(url_protocol, host, api, dict(kwargs, **url_params)) return collect_api_results(input_data, url, headers, api, batch_size, kwargs)
python
def is_installable_dir(path): """Return True if `path` is a directory containing a setup.py file.""" if not os.path.isdir(path): return False setup_py = os.path.join(path, "setup.py") if os.path.isfile(setup_py): return True return False
python
def call(method, path, ret_type, ret_is_list=False, data=None, params=None, api_version=1): """ Generic function for calling a resource method and automatically dealing with serialization of parameters and deserialization of return values. @param method: method to call (must be bound to a resource; e.g., "resource_root.get"). @param path: the full path of the API method to call. @param ret_type: return type of the call. @param ret_is_list: whether the return type is an ApiList. @param data: Optional data to send as payload to the call. @param params: Optional query parameters for the call. @param api_version: minimum API version for the call. """ check_api_version(method.im_self, api_version) if data is not None: data = json.dumps(Attr(is_api_list=True).to_json(data, False)) ret = method(path, data=data, params=params) else: ret = method(path, params=params) if ret_type is None: return elif ret_is_list: return ApiList.from_json_dict(ret, method.im_self, ret_type) elif isinstance(ret, list): return [ ret_type.from_json_dict(x, method.im_self) for x in ret ] else: return ret_type.from_json_dict(ret, method.im_self)
python
def command_schema(self, name=None): ''' Prints current database schema (according sqlalchemy database model):: ./manage.py sqla:schema [name] ''' meta_name = table_name = None if name: if isinstance(self.metadata, MetaData): table_name = name elif '.' in name: meta_name, table_name = name.split('.', 1) else: meta_name = name def _print_metadata_schema(metadata): if table_name is None: for table in metadata.sorted_tables: print(self._schema(table)) else: try: table = metadata.tables[table_name] except KeyError: sys.exit('Table {} is not found'.format(name)) print(self._schema(table)) if isinstance(self.metadata, MetaData): _print_metadata_schema(self.metadata) else: for current_meta_name, metadata in self.metadata.items(): if meta_name not in (current_meta_name, None): continue _print_metadata_schema(metadata)
python
def WriteFlowLogEntries(self, entries): """Writes flow output plugin log entries for a given flow.""" flow_ids = [(e.client_id, e.flow_id) for e in entries] for f in flow_ids: if f not in self.flows: raise db.AtLeastOneUnknownFlowError(flow_ids) for e in entries: dest = self.flow_log_entries.setdefault((e.client_id, e.flow_id), []) to_write = e.Copy() to_write.timestamp = rdfvalue.RDFDatetime.Now() dest.append(to_write)
java
byte []readBinary() throws IOException { TempOutputStream tos = new TempOutputStream(); while (true) { int ch = read(); int len; switch (ch) { default: _peek = ch; return tos.toByteArray(); case 0x20: case 0x21: case 0x22: case 0x23: case 0x24: case 0x25: case 0x26: case 0x27: case 0x28: case 0x29: case 0x2a: case 0x2b: case 0x2c: case 0x2d: case 0x2e: case 0x2f: len = ch - 0x20; readBinary(tos, len); return tos.toByteArray(); case 0x34: case 0x35: case 0x36: case 0x37: len = ((ch - 0x34) << 8) + read(); readBinary(tos, len); return tos.toByteArray(); case 'B': len = readShort(); readBinary(tos, len); return tos.toByteArray(); case 'A': len = readShort(); readBinary(tos, len); break; } } }
java
@BetaApi public final Operation deleteTargetHttpsProxy( ProjectGlobalTargetHttpsProxyName targetHttpsProxy) { DeleteTargetHttpsProxyHttpRequest request = DeleteTargetHttpsProxyHttpRequest.newBuilder() .setTargetHttpsProxy(targetHttpsProxy == null ? null : targetHttpsProxy.toString()) .build(); return deleteTargetHttpsProxy(request); }
python
def _replace_numeric_markers(operation, string_parameters): """ Replaces qname, format, and numeric markers in the given operation, from the string_parameters list. Raises ProgrammingError on wrong number of parameters or bindings when using qmark. There is no error checking on numeric parameters. """ def replace_markers(marker, op, parameters): param_count = len(parameters) marker_index = 0 start_offset = 0 while True: found_offset = op.find(marker, start_offset) if not found_offset > -1: break if marker_index < param_count: op = op[:found_offset]+op[found_offset:].replace(marker, parameters[marker_index], 1) start_offset = found_offset + len(parameters[marker_index]) marker_index += 1 else: raise ProgrammingError("Incorrect number of bindings " "supplied. The current statement uses " "%d or more, and there are %d " "supplied." % (marker_index + 1, param_count)) if marker_index != 0 and marker_index != param_count: raise ProgrammingError("Incorrect number of bindings " "supplied. The current statement uses " "%d or more, and there are %d supplied." % (marker_index + 1, param_count)) return op # replace qmark parameters and format parameters operation = replace_markers('?', operation, string_parameters) operation = replace_markers(r'%s', operation, string_parameters) # replace numbered parameters # Go through them backwards so smaller numbers don't replace # parts of larger ones for index in range(len(string_parameters), 0, -1): operation = operation.replace(':' + str(index), string_parameters[index - 1]) return operation
python
def show_image(kwargs, call=None): ''' Show the details from aliyun image ''' if call != 'function': raise SaltCloudSystemExit( 'The show_images function must be called with ' '-f or --function' ) if not isinstance(kwargs, dict): kwargs = {} location = get_location() if 'location' in kwargs: location = kwargs['location'] params = { 'Action': 'DescribeImages', 'RegionId': location, 'ImageId': kwargs['image'] } ret = {} items = query(params=params) # DescribeImages so far support input multi-image. And # if not found certain image, the response will include # blank image list other than 'not found' error message if 'Code' in items or not items['Images']['Image']: raise SaltCloudNotFound('The specified image could not be found.') log.debug( 'Total %s image found in Region %s', items['TotalCount'], location ) for image in items['Images']['Image']: ret[image['ImageId']] = {} for item in image: ret[image['ImageId']][item] = six.text_type(image[item]) return ret
java
public Selection restrictArgumentType(int arg, Class type) { List<Function> lst; int i; Class tmp; Class[] paras; lst = new ArrayList<Function>(); for (i = 0; i < functions.length; i++) { paras = functions[i].getParameterTypes(); if (arg < paras.length) { tmp = paras[arg]; tmp = ClassRef.wrappedType(tmp); if (tmp.isAssignableFrom(type)) { lst.add(functions[i]); } } } return new Selection(lst); }
python
def mouseReleaseEvent( self, event ): """ Overloads the base QGraphicsScene method to reset the selection \ signals and finish the connection. :param event <QMouseReleaseEvent> """ if event.button() == Qt.MidButton: self.setViewMode(False) event.accept() return super(XNodeScene, self).mouseReleaseEvent(event) # reset the selection blocked signals self.blockSelectionSignals(False) # finish the connection if ( self.isConnecting() ): self.finishConnection() # emit a menu request signal if necessary elif ( not event.isAccepted() and event.button() == Qt.RightButton ): item = self.itemAt(event.scenePos()) if isinstance(item, XNode): self.emitNodeMenuRequested(item) else: self.emitMenuRequested() event.accept()
python
def fit(self, X, y=None): """Fit X into an embedded space. Optionally use y for supervised dimension reduction. Parameters ---------- X : array, shape (n_samples, n_features) or (n_samples, n_samples) If the metric is 'precomputed' X must be a square distance matrix. Otherwise it contains a sample per row. If the method is 'exact', X may be a sparse matrix of type 'csr', 'csc' or 'coo'. y : array, shape (n_samples) A target array for supervised dimension reduction. How this is handled is determined by parameters UMAP was instantiated with. The relevant attributes are ``target_metric`` and ``target_metric_kwds``. """ X = check_array(X, dtype=np.float32, accept_sparse="csr") self._raw_data = X # Handle all the optional arguments, setting default if self.a is None or self.b is None: self._a, self._b = find_ab_params(self.spread, self.min_dist) else: self._a = self.a self._b = self.b if self.metric_kwds is not None: self._metric_kwds = self.metric_kwds else: self._metric_kwds = {} if self.target_metric_kwds is not None: self._target_metric_kwds = self.target_metric_kwds else: self._target_metric_kwds = {} if isinstance(self.init, np.ndarray): init = check_array(self.init, dtype=np.float32, accept_sparse=False) else: init = self.init self._initial_alpha = self.learning_rate self._validate_parameters() if self.verbose: print(str(self)) # Error check n_neighbors based on data size if X.shape[0] <= self.n_neighbors: if X.shape[0] == 1: self.embedding_ = np.zeros((1, self.n_components)) # needed to sklearn comparability return self warn( "n_neighbors is larger than the dataset size; truncating to " "X.shape[0] - 1" ) self._n_neighbors = X.shape[0] - 1 else: self._n_neighbors = self.n_neighbors if scipy.sparse.isspmatrix_csr(X): if not X.has_sorted_indices: X.sort_indices() self._sparse_data = True else: self._sparse_data = False random_state = check_random_state(self.random_state) if self.verbose: print("Construct fuzzy simplicial set") # Handle small cases efficiently by computing all distances if X.shape[0] < 4096: self._small_data = True dmat = pairwise_distances(X, metric=self.metric, **self._metric_kwds) self.graph_ = fuzzy_simplicial_set( dmat, self._n_neighbors, random_state, "precomputed", self._metric_kwds, None, None, self.angular_rp_forest, self.set_op_mix_ratio, self.local_connectivity, self.verbose, ) else: self._small_data = False # Standard case (self._knn_indices, self._knn_dists, self._rp_forest) = nearest_neighbors( X, self._n_neighbors, self.metric, self._metric_kwds, self.angular_rp_forest, random_state, self.verbose, ) self.graph_ = fuzzy_simplicial_set( X, self.n_neighbors, random_state, self.metric, self._metric_kwds, self._knn_indices, self._knn_dists, self.angular_rp_forest, self.set_op_mix_ratio, self.local_connectivity, self.verbose, ) self._search_graph = scipy.sparse.lil_matrix( (X.shape[0], X.shape[0]), dtype=np.int8 ) self._search_graph.rows = self._knn_indices self._search_graph.data = (self._knn_dists != 0).astype(np.int8) self._search_graph = self._search_graph.maximum( self._search_graph.transpose() ).tocsr() if callable(self.metric): self._distance_func = self.metric elif self.metric in dist.named_distances: self._distance_func = dist.named_distances[self.metric] elif self.metric == 'precomputed': warn('Using precomputed metric; transform will be unavailable for new data') else: raise ValueError( "Metric is neither callable, " + "nor a recognised string" ) if self.metric != 'precomputed': self._dist_args = tuple(self._metric_kwds.values()) self._random_init, self._tree_init = make_initialisations( self._distance_func, self._dist_args ) self._search = make_initialized_nnd_search( self._distance_func, self._dist_args ) if y is not None: if len(X) != len(y): raise ValueError( "Length of x = {len_x}, length of y = {len_y}, while it must be equal.".format( len_x=len(X), len_y=len(y) ) ) y_ = check_array(y, ensure_2d=False) if self.target_metric == "categorical": if self.target_weight < 1.0: far_dist = 2.5 * (1.0 / (1.0 - self.target_weight)) else: far_dist = 1.0e12 self.graph_ = categorical_simplicial_set_intersection( self.graph_, y_, far_dist=far_dist ) else: if self.target_n_neighbors == -1: target_n_neighbors = self._n_neighbors else: target_n_neighbors = self.target_n_neighbors # Handle the small case as precomputed as before if y.shape[0] < 4096: ydmat = pairwise_distances(y_[np.newaxis, :].T, metric=self.target_metric, **self._target_metric_kwds) target_graph = fuzzy_simplicial_set( ydmat, target_n_neighbors, random_state, "precomputed", self._target_metric_kwds, None, None, False, 1.0, 1.0, False ) else: # Standard case target_graph = fuzzy_simplicial_set( y_[np.newaxis, :].T, target_n_neighbors, random_state, self.target_metric, self._target_metric_kwds, None, None, False, 1.0, 1.0, False, ) # product = self.graph_.multiply(target_graph) # # self.graph_ = 0.99 * product + 0.01 * (self.graph_ + # # target_graph - # # product) # self.graph_ = product self.graph_ = general_simplicial_set_intersection( self.graph_, target_graph, self.target_weight ) self.graph_ = reset_local_connectivity(self.graph_) if self.n_epochs is None: n_epochs = 0 else: n_epochs = self.n_epochs if self.verbose: print(ts(), "Construct embedding") self.embedding_ = simplicial_set_embedding( self._raw_data, self.graph_, self.n_components, self._initial_alpha, self._a, self._b, self.repulsion_strength, self.negative_sample_rate, n_epochs, init, random_state, self.metric, self._metric_kwds, self.verbose, ) if self.verbose: print(ts() + " Finished embedding") self._input_hash = joblib.hash(self._raw_data) return self
java
@Nonnull public MockHttpServletRequest setParameter (@Nonnull final String sName, @Nullable final String [] aValues) { m_aParameters.remove (sName); m_aParameters.addAll (sName, aValues); return this; }
python
def _listen_commands(self): """Monitor new updates and send them further to self._respond_commands, where bot actions are decided. """ self._last_update = None update_body = {'timeout': 2} while True: latest = self._last_update # increase offset to filter out older updates update_body.update({'offset': latest + 1} if latest else {}) update_resp = self.client.get_updates(update_body) update_resp.add_done_callback(self._respond_commands) yield gen.sleep(5)
java
@Override public String validate(DTDValidatorBase v, char[] cbuf, int start, int end, boolean normalize) throws XMLStreamException { String ok = validateEnumValue(cbuf, start, end, normalize, mEnumValues); if (ok == null) { String val = new String(cbuf, start, (end-start)); return reportValidationProblem(v, "Invalid enumerated value '"+val+"': has to be one of (" +mEnumValues+")"); } return ok; }
python
def connection_made(self, address): """When the connection is made, send something.""" logger.info("connection made to {}".format(address)) self.count = 0 self.connected = True self.transport.write(b'Echo Me')
python
def remove_event_detect(channel): """ :param channel: the channel based on the numbering system you have specified (:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`). """ _check_configured(channel, direction=IN) pin = get_gpio_pin(_mode, channel) event.remove_edge_detect(pin)
java
@Override public GetSnapshotLimitsResult getSnapshotLimits(GetSnapshotLimitsRequest request) { request = beforeClientExecution(request); return executeGetSnapshotLimits(request); }
python
def n2l(c, l): "network to host long" l = U32(c[0] << 24) l = l | (U32(c[1]) << 16) l = l | (U32(c[2]) << 8) l = l | (U32(c[3])) return l
java
public INDArray getArr(boolean enforceExistence){ if(sameDiff.arrayAlreadyExistsForVarName(getVarName())) return sameDiff.getArrForVarName(getVarName()); //initialize value if it's actually a scalar constant (zero or 1 typically...) if(getScalarValue() != null && ArrayUtil.prod(getShape()) == 1) { INDArray arr = Nd4j.valueArrayOf(getShape(),getScalarValue().getDouble(0)); sameDiff.associateArrayWithVariable(arr,this); if(log.isTraceEnabled()){ log.trace("getArr() for variable \"{}\" allocated new scalar array: shape {}", getVarName(), Arrays.toString(getShape())); } } else if(variableType == VariableType.VARIABLE && weightInitScheme != null && shape != null){ INDArray arr = weightInitScheme.create(dataType, shape); sameDiff.associateArrayWithVariable(arr, this); if(log.isTraceEnabled()){ log.trace("getArr() for variable \"{}\" allocated new array: shape {}", getVarName(), Arrays.toString(getShape())); } return arr; } else if(sameDiff.getShapeForVarName(getVarName()) == null) { if (enforceExistence) { throw new IllegalStateException("Cannot get array for SDVariable \"" + getVarName() + "\": no array has" + " been defined, and array shape cannot be calculated"); } if(log.isTraceEnabled()){ log.trace("SDVariable.getArr(): could not get array for variable {}: shape is null", getVarName()); } return null; } // else { // long[] shape = sameDiff.getShapeForVarName(getVarName()); // INDArray newAlloc = getWeightInitScheme().create(dataType(), shape); // sameDiff.associateArrayWithVariable(newAlloc,this); // if(log.isTraceEnabled()){ // log.trace("getArr() for variable \"{}\" allocated new array with shape {}", getVarName(), Arrays.toString(getShape())); // } // } return sameDiff.getArrForVarName(getVarName()); }
java
public void enableMvc(ConfigurableListableBeanFactory factory, BundleContext context) { if (factory == null) { throw new IllegalArgumentException("Method argument factory must not be null."); } if (context == null) { throw new IllegalArgumentException("Method argument context must not be null."); } BundleAwareServletConfig servletConfig = new BundleAwareServletConfig(context); factory.addBeanPostProcessor(new ServletContextAwareProcessor(this.servletContext, servletConfig)); factory.ignoreDependencyInterface(ServletContextAware.class); factory.ignoreDependencyInterface(ServletConfigAware.class); final BundleSpecificDispatcherServlet dispatcherServlet = createBundleSpecificDispatcherServlet(factory, servletConfig); factory.registerSingleton(generateNameFor(BundleSpecificDispatcherServlet.class), dispatcherServlet); this.mvcCapableBundles.put(context.getBundle(), dispatcherServlet); }
python
async def multipart_parser(request, file_handler=default_file_handler): """ :param file_handler: callable to save file, this should always return the file path :return: dictionary containing files and data """ multipart_data = { 'files': {}, 'data': {} } if request.content_type == 'multipart/form-data': reader = MultipartReader.from_response(request) while True: part = await reader.next() if part is None: break if isinstance(part, BodyPartReader): if part.filename: # if body is binary file if file_handler: # in case we just want to parse data and not save file actually e.g. in validator file_data = await part.read(decode=True) file_data = part.decode(file_data) file_path = await file_handler(part.filename, file_data, part.headers[CONTENT_TYPE]) else: file_path = part.filename multipart_data['files'][part.name] = file_path elif part.text(): # if body is text text = await part.text() multipart_data['data'][part.name] = text else: # if body is json or form (not text), not handling this continue else: # if part is recursive multipart , not handling this right now # TODO: do recursive call to handle this continue else: try: multipart_data['data'] = await request.json() except json.JSONDecodeError: pass return multipart_data
java
@Override void addEntry(int hash, K key, V value, int bucketIndex) { super.addEntry(hash, key, value, bucketIndex); // Remove eldest entry if instructed Entry<K, V> eldest = header.after; if (removeEldestEntry(eldest)) { removeEntryForKey(eldest.key); } }
python
def PartitioningQueryIterable(cls, client, query, options, database_link, partition_key): """ Represents a client side partitioning query iterable. This constructor instantiates a QueryIterable for client side partitioning queries, and sets _MultiCollectionQueryExecutionContext as the internal execution context. :param CosmosClient client: Instance of document client :param (str or dict) options: :param dict options: The request options for the request. :param str database_link: Database self link or ID based link :param str partition_key: Partition key for the query """ # This will call the base constructor(__init__ method above) self = cls(client, query, options, None, None) self._database_link = database_link self._partition_key = partition_key return self
python
def is_phy_iface(interface): """Returns True if interface is not virtual, otherwise False.""" if interface: sys_net = '/sys/class/net' if os.path.isdir(sys_net): for iface in glob.glob(os.path.join(sys_net, '*')): if '/virtual/' in os.path.realpath(iface): continue if interface == os.path.basename(iface): return True return False
python
def update(self): """Called before the listing renders """ super(BatchFolderContentsView, self).update() if self.on_batch_folder() and self.can_add_batches(): self.context_actions[_("Add")] = { "url": "createObject?type_name=Batch", "permission": "Add portal content", "icon": "++resource++bika.lims.images/add.png"}
python
def thread_lock(lock): """Return the thread lock for *lock*.""" if hasattr(lock, '_lock'): return lock._lock elif hasattr(lock, 'acquire'): return lock else: raise TypeError('expecting Lock/RLock')
java
public int[] getOccurrences() { int[] res = new int[this.occurrences.size()]; for (int i = 0; i < this.occurrences.size(); i++) { res[i] = this.occurrences.get(i); } return res; }
python
def eval_to_ast(self, e, n, extra_constraints=(), exact=None): """ Evaluate an expression, using the solver if necessary. Returns AST objects. :param e: the expression :param n: the number of desired solutions :param extra_constraints: extra constraints to apply to the solver :param exact: if False, returns approximate solutions :return: a tuple of the solutions, in the form of claripy AST nodes :rtype: tuple """ return self._solver.eval_to_ast(e, n, extra_constraints=self._adjust_constraint_list(extra_constraints), exact=exact)
python
def connect_inputs(self, datas): """ Connects input ``Pipers`` to "datas" input data in the correct order determined, by the ``Piper.ornament`` attribute and the ``Dagger._cmp`` function. It is assumed that the input data is in the form of an iterator and that all inputs have the same number of input items. A pipeline will **deadlock** otherwise. Arguments: - datas (sequence of sequences) An ordered sequence of inputs for all input ``Pipers``. """ start_pipers = self.get_inputs() self.log.debug('%s trying to connect inputs in the order %s' % \ (repr(self), repr(start_pipers))) for piper, data in izip(start_pipers, datas): piper.connect([data]) self.log.debug('%s succesfuly connected inputs' % repr(self))
java
public void marshall(ActivityTaskFailedEventAttributes activityTaskFailedEventAttributes, ProtocolMarshaller protocolMarshaller) { if (activityTaskFailedEventAttributes == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(activityTaskFailedEventAttributes.getReason(), REASON_BINDING); protocolMarshaller.marshall(activityTaskFailedEventAttributes.getDetails(), DETAILS_BINDING); protocolMarshaller.marshall(activityTaskFailedEventAttributes.getScheduledEventId(), SCHEDULEDEVENTID_BINDING); protocolMarshaller.marshall(activityTaskFailedEventAttributes.getStartedEventId(), STARTEDEVENTID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def pipe_item_split(tokens, loc): """Process a pipe item, which could be a partial, an attribute access, a method call, or an expression. Return (type, split) where split is - (expr,) for expression, - (func, pos_args, kwd_args) for partial, - (name, args) for attr/method, and - (op, args) for itemgetter.""" # list implies artificial tokens, which must be expr if isinstance(tokens, list) or "expr" in tokens: internal_assert(len(tokens) == 1, "invalid expr pipe item tokens", tokens) return "expr", (tokens[0],) elif "partial" in tokens: func, args = tokens pos_args, star_args, kwd_args, dubstar_args = split_function_call(args, loc) return "partial", (func, join_args(pos_args, star_args), join_args(kwd_args, dubstar_args)) elif "attrgetter" in tokens: name, args = attrgetter_atom_split(tokens) return "attrgetter", (name, args) elif "itemgetter" in tokens: op, args = tokens return "itemgetter", (op, args) else: raise CoconutInternalException("invalid pipe item tokens", tokens)
java
public Set<Reference> getNonCopytoResult() { final Set<Reference> nonCopytoSet = new LinkedHashSet<>(128); nonCopytoSet.addAll(nonConrefCopytoTargets); for (final URI f : conrefTargets) { nonCopytoSet.add(new Reference(stripFragment(f), currentFileFormat())); } for (final URI f : copytoMap.values()) { nonCopytoSet.add(new Reference(stripFragment(f))); } for (final URI f : ignoredCopytoSourceSet) { nonCopytoSet.add(new Reference(stripFragment(f))); } for (final URI filename : coderefTargetSet) { nonCopytoSet.add(new Reference(stripFragment(filename))); } return nonCopytoSet; }
java
@Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { if (changed || mNeedsResize) { int widthLimit = (right - left) - getCompoundPaddingLeft() - getCompoundPaddingRight(); int heightLimit = (bottom - top) - getCompoundPaddingBottom() - getCompoundPaddingTop(); resizeText(widthLimit, heightLimit); } super.onLayout(changed, left, top, right, bottom); }
java
static float coslook(float a) { double d = a * (.31830989 * (float) COS_LOOKUP_SZ); int i = (int) d; return COS_LOOKUP[i] + ((float) (d - i)) * (COS_LOOKUP[i + 1] - COS_LOOKUP[i]); }
python
def get_remote_evb_mode(self, tlv_data): """Returns the EVB mode in the TLV. """ ret, parsed_val = self._check_common_tlv_format( tlv_data, "mode:", "EVB Configuration TLV") if not ret: return None mode_val = parsed_val[1].split()[0].strip() return mode_val