language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
@Override public void write (@Nonnull final byte [] aBuf, final int nOfs, final int nLen) { // Disable because this can have a performance impact! if (false) ValueEnforcer.isArrayOfsLen (aBuf, nOfs, nLen); if (nLen > 0) { final int nNewCount = m_nCount + nLen; if (nNewCount > m_aBuf.length) m_aBuf = _enlarge (m_aBuf, Math.max (m_aBuf.length << 1, nNewCount)); System.arraycopy (aBuf, nOfs, m_aBuf, m_nCount, nLen); m_nCount = nNewCount; } }
python
def _resample_samplerate(samples, sr, newsr): # type: (np.ndarray, int, int, str) -> np.ndarray """ Uses https://github.com/tuxu/python-samplerate """ try: from samplerate import resample except ImportError: return None ratio = newsr/sr return _applyMultichan(samples, lambda S: resample(S, ratio, 'sinc_best'))
java
@NotNull public static Stream<Integer> range(final int from, final int to) { return IntStream.range(from, to).boxed(); }
java
public Observable<ServiceResponse<KeyBundle>> updateKeyWithServiceResponseAsync(String vaultBaseUrl, String keyName, String keyVersion, List<JsonWebKeyOperation> keyOps, KeyAttributes keyAttributes, Map<String, String> tags) { if (vaultBaseUrl == null) { throw new IllegalArgumentException("Parameter vaultBaseUrl is required and cannot be null."); } if (keyName == null) { throw new IllegalArgumentException("Parameter keyName is required and cannot be null."); } if (keyVersion == null) { throw new IllegalArgumentException("Parameter keyVersion is required and cannot be null."); } if (this.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.apiVersion() is required and cannot be null."); } Validator.validate(keyOps); Validator.validate(keyAttributes); Validator.validate(tags); KeyUpdateParameters parameters = new KeyUpdateParameters(); parameters.withKeyOps(keyOps); parameters.withKeyAttributes(keyAttributes); parameters.withTags(tags); String parameterizedHost = Joiner.on(", ").join("{vaultBaseUrl}", vaultBaseUrl); return service.updateKey(keyName, keyVersion, this.apiVersion(), this.acceptLanguage(), parameters, parameterizedHost, this.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<KeyBundle>>>() { @Override public Observable<ServiceResponse<KeyBundle>> call(Response<ResponseBody> response) { try { ServiceResponse<KeyBundle> clientResponse = updateKeyDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); }
python
def update_index(self): """ Regenerates the permission index for this set Called everytime a rule is added / removed / modified in the set """ # update index idx = {} for _, p in sorted(self.permissions.items(), key=lambda x: str(x[0])): branch = idx parent_p = const.PERM_DENY for k in p.namespace.keys: if not k in branch: branch[k] = {"__": parent_p} branch[k].update(__implicit=True) branch = branch[k] parent_p = branch["__"] branch["__"] = p.value branch["__implicit"] = False self.index = idx # update read access map ramap = {} def update_ramap(branch_idx): r = {"__": False} for k, v in list(branch_idx.items()): if k != "__" and k != "__implicit": r[k] = update_ramap(v) if branch_idx["__"] is not None and (branch_idx["__"] & const.PERM_READ) != 0: r["__"] = True return r for k, v in list(idx.items()): ramap[k] = update_ramap(v) self.read_access_map = ramap return self.index
java
private static void buildHttpClient() { RequestConfig globalConfig = RequestConfig.custom().setConnectTimeout(5000) .setSocketTimeout(5000).build(); CloseableHttpClient httpclient = HttpClients.custom().setKeepAliveStrategy(new HttpClientKeepAliveStrategy()) .setDefaultRequestConfig(globalConfig).build(); HttpClientUtil.httpclient = httpclient; }
java
@Benchmark String charsToBitSet(int reps) { /* * This benchmark now measures the complete parsing of a char[] rather than * a single invocation of {@link BitSet#set}. However this fine because * it is intended to be a comparative benchmark. */ for (int i = 0; i < reps; i++) { for (int n = 0; n < bitString.length; n++) { bitSet.set(n, bitString[n] == '1'); } } return bitSet.toString(); }
java
public static void retrieveSnapshotFiles( File directory, Map<String, Snapshot> namedSnapshotMap, FileFilter filter, boolean validate, SnapshotPathType stype, VoltLogger logger) { NamedSnapshots namedSnapshots = new NamedSnapshots(namedSnapshotMap, stype); retrieveSnapshotFilesInternal(directory, namedSnapshots, filter, validate, stype, logger, 0); }
java
public Datatype.Builder setNullableBuilderFactory(BuilderFactory builderFactory) { if (builderFactory != null) { return setBuilderFactory(builderFactory); } else { return clearBuilderFactory(); } }
python
def store_config(func): """Decorator for storing the configuration in the project's builddir.""" from functools import wraps @wraps(func) def wrap_store_config(self, *args, **kwargs): """Wrapper that contains the actual storage call for the config.""" CFG.store(local.path(self.builddir) / ".benchbuild.yml") return func(self, *args, **kwargs) return wrap_store_config
java
protected SibRaStringGenerator getStringGenerator() { final SibRaStringGenerator generator = new SibRaStringGenerator(this); generator.addParent("connection", _connection); generator.addField("session", _session); return generator; }
java
public TransactionInfoBaseMessage nextTask() throws IOException { if (!hasMoreEntries()) { return null; } ByteBuffer bb = m_container.b(); int position = bb.position(); int length = bb.getInt(); long sourceHSId = bb.getLong(); VoltDbMessageFactory factory = new VoltDbMessageFactory(); /* * create a new buffer that just contains the message, deserialization * of the messsage may assert on the capacity of the buffer */ final int oldLimit = bb.limit(); bb.limit(bb.position() + length); ByteBuffer slice = bb.slice(); bb.limit(oldLimit); VoltMessage msg = factory.createMessageFromBuffer(slice, sourceHSId); // createMessageFromBuffer() doesn't move the position pointer, set it here bb.position(position + length + 8 + 4); // sourceHSId + buf len return (TransactionInfoBaseMessage) msg; }
java
@POST @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) @Path("/{alertId}/notifications") @Description("Creates new notifications for the given alert ID.") public List<NotificationDto> addNotification(@Context HttpServletRequest req, @PathParam("alertId") BigInteger alertId, NotificationDto notificationDto) { if (alertId == null || alertId.compareTo(BigInteger.ZERO) < 1) { throw new WebApplicationException("Alert Id cannot be null and must be a positive non-zero number.", Status.BAD_REQUEST); } if (notificationDto == null) { throw new WebApplicationException("Null notification object cannot be created.", Status.BAD_REQUEST); } Alert alert = alertService.findAlertByPrimaryKey(alertId); if (alert != null) { validateResourceAuthorization(req, alert.getOwner(), getRemoteUser(req)); Notification notification = new Notification(notificationDto.getName(), alert, notificationDto.getNotifierName(), notificationDto.getSubscriptions(), notificationDto.getCooldownPeriod()); notification.setSRActionable(notificationDto.getSRActionable()); notification.setSeverityLevel(notificationDto.getSeverityLevel()); notification.setCustomText(notificationDto.getCustomText()); // TODO: 14.12.16 validateAuthorizationRequest notification notification.setMetricsToAnnotate(new ArrayList<>(notificationDto.getMetricsToAnnotate())); List<Notification> notifications = new ArrayList<Notification>(alert.getNotifications()); notifications.add(notification); alert.setNotifications(notifications); alert.setModifiedBy(getRemoteUser(req)); return NotificationDto.transformToDto(alertService.updateAlert(alert).getNotifications()); } throw new WebApplicationException(Response.Status.NOT_FOUND.getReasonPhrase(), Response.Status.NOT_FOUND); }
python
def sync(self, json_obj=None): """ synchronize this transport with the Ariane server transport :return: """ LOGGER.debug("Transport.sync") if json_obj is None: params = None if self.id is not None: params = SessionService.complete_transactional_req({'ID': self.id}) if params is not None: if MappingService.driver_type != DriverFactory.DRIVER_REST: params['OPERATION'] = 'getTransport' args = {'properties': params} else: args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params} response = TransportService.requester.call(args) if MappingService.driver_type != DriverFactory.DRIVER_REST: response = response.get() if response.rc == 0: json_obj = response.response_content else: err_msg = 'Transport.sync - Problem while syncing transport (id: ' + str(self.id) + '). ' \ 'Reason: ' + str(response.response_content) + ' - ' + str(response.error_message) + \ " (" + str(response.rc) + ")" LOGGER.warning(err_msg) if response.rc == 500 and ArianeMappingOverloadError.ERROR_MSG in response.error_message: raise ArianeMappingOverloadError("Transport.sync", ArianeMappingOverloadError.ERROR_MSG) # traceback.print_stack() elif 'transportID' not in json_obj: err_msg = 'Transport.sync - Problem while syncing transport (id: ' + str(self.id) + '). ' \ 'Reason: inconsistent json_obj' + str(json_obj) + " from : \n" LOGGER.warning(err_msg) # traceback.print_stack() if json_obj is not None: self.id = json_obj['transportID'] self.name = json_obj['transportName'] if MappingService.driver_type != DriverFactory.DRIVER_REST: if 'transportProperties' in json_obj: self.properties = DriverTools.json2properties(json_obj['transportProperties']) else: self.properties = None else: self.properties = json_obj['transportProperties'] if 'transportProperties' in json_obj else None
java
public static com.liferay.commerce.model.CommerceOrder fetchCommerceOrderByUuidAndGroupId( String uuid, long groupId) { return getService().fetchCommerceOrderByUuidAndGroupId(uuid, groupId); }
python
def boolean_sparse(a, b, operation=np.logical_and): """ Find common rows between two arrays very quickly using 3D boolean sparse matrices. Parameters ----------- a: (n, d) int, coordinates in space b: (m, d) int, coordinates in space operation: numpy operation function, ie: np.logical_and np.logical_or Returns ----------- coords: (q, d) int, coordinates in space """ # 3D sparse arrays, using wrapped scipy.sparse # pip install sparse import sparse # find the bounding box of both arrays extrema = np.array([a.min(axis=0), a.max(axis=0), b.min(axis=0), b.max(axis=0)]) origin = extrema.min(axis=0) - 1 size = tuple(extrema.ptp(axis=0) + 2) # put nearby voxel arrays into same shape sparse array sp_a = sparse.COO((a - origin).T, data=np.ones(len(a), dtype=np.bool), shape=size) sp_b = sparse.COO((b - origin).T, data=np.ones(len(b), dtype=np.bool), shape=size) # apply the logical operation # get a sparse matrix out applied = operation(sp_a, sp_b) # reconstruct the original coordinates coords = np.column_stack(applied.coords) + origin return coords
python
def create_branch_and_checkout(self, branch_name: str): """ Creates a new branch if it doesn't exist Args: branch_name: branch name """ self.create_branch(branch_name) self.checkout(branch_name)
java
static void setAgreeShowDialog(Context context, boolean isAgree) { SharedPreferences.Editor editor = getPreferencesEditor(context); editor.putBoolean(PREF_KEY_IS_AGREE_SHOW_DIALOG, isAgree); editor.apply(); }
python
def infer_dtype_from_object(dtype): """ Get a numpy dtype.type-style object for a dtype object. This methods also includes handling of the datetime64[ns] and datetime64[ns, TZ] objects. If no dtype can be found, we return ``object``. Parameters ---------- dtype : dtype, type The dtype object whose numpy dtype.type-style object we want to extract. Returns ------- dtype_object : The extracted numpy dtype.type-style object. """ if isinstance(dtype, type) and issubclass(dtype, np.generic): # Type object from a dtype return dtype elif isinstance(dtype, (np.dtype, PandasExtensionDtype, ExtensionDtype)): # dtype object try: _validate_date_like_dtype(dtype) except TypeError: # Should still pass if we don't have a date-like pass return dtype.type try: dtype = pandas_dtype(dtype) except TypeError: pass if is_extension_array_dtype(dtype): return dtype.type elif isinstance(dtype, str): # TODO(jreback) # should deprecate these if dtype in ['datetimetz', 'datetime64tz']: return DatetimeTZDtype.type elif dtype in ['period']: raise NotImplementedError if dtype == 'datetime' or dtype == 'timedelta': dtype += '64' try: return infer_dtype_from_object(getattr(np, dtype)) except (AttributeError, TypeError): # Handles cases like _get_dtype(int) i.e., # Python objects that are valid dtypes # (unlike user-defined types, in general) # # TypeError handles the float16 type code of 'e' # further handle internal types pass return infer_dtype_from_object(np.dtype(dtype))
python
def calc_parent(self, i, j, h): """ Returns get_big_array and end of span of parent sequence that contains given child. """ N = self.repo.array_size c_i = i c_j = j c_h = h # Calculate the number of the sequence in its row (sequences # with same height), from left to right, starting from 0. c_n = c_i // (N ** c_h) p_n = c_n // N # Position of the child ID in the parent array. p_p = c_n % N # Parent height is child height plus one. p_h = c_h + 1 # Span of sequences in parent row is max size N, to the power of the height. span = N ** p_h # Calculate parent i and j. p_i = p_n * span p_j = p_i + span # Check the parent i,j bounds the child i,j, ie child span is contained by parent span. assert p_i <= c_i, 'i greater on parent than child: {}'.format(p_i, p_j) assert p_j >= c_j, 'j less on parent than child: {}'.format(p_i, p_j) # Return parent i, j, h, p. return p_i, p_j, p_h, p_p
java
@Override public Map<String, String> getValues() { HashMap<String, String> result = new HashMap<>(); if (status != null && status != MandateStatus.NotSpecified) { result.put("status", status.name()); } if (beforeDate != null) result.put("beforeDate", Long.toString(beforeDate)); if (afterDate != null) result.put("afterDate", Long.toString(afterDate)); return result; }
python
def _normalize_items( ctx, items, str_to_node=False, node_to_str=False, allow_task=False, ): """ Normalize given items. Do several things: - Ignore None. - Flatten list. - Unwrap wrapped item in `_ItemWrapper`. :param ctx: BuildContext object. :param items: Items list to normalize. :param str_to_node: Convert string to node. :param node_to_str: Convert node to absolute path. :param allow_task: Whether allow task item. :return: Normalized tuples list. Tuple format is: :: ( normalized_item, # Normalized item. wrapper_type, # Original `_ItemWrapper` type. ) """ # Ensure given context object is BuildContext object _ensure_build_context(ctx) # Normalized tuples list norm_tuple_s = [] # If given items list is empty if not items: # Return empty list return norm_tuple_s # If given items list is not empty. # For given items list's each item for item in items: # If the item is item wrapper if isinstance(item, _ItemWrapper): # Get wrapper type wrapper_type = item.type() # Get real item item = item.item() # If the item is not item wrapper else: # Set wrapper type be None wrapper_type = None # Use the item as real item item = item # If the real item is list if isinstance(item, list): # Use the real item as real items list real_item_s = item # If the real item is not list else: # Create real items list containing the real item real_item_s = [item] # For each real item for real_item in real_item_s: # If the real item is None if real_item is None: # Ignore None continue # If the real item is not None. # If the real item is string elif isinstance(real_item, str): # If need convert string to node if (wrapper_type is not None) or str_to_node: # If the path string is absolute path if os.path.isabs(real_item): # Get error message msg = ( 'Error (7MWU9): Given path is not relative path:' ' {0}.' ).format(real_item) # Raise error raise ValueError(msg) # If the path string is not absolute path. # Create node as normalized item norm_item = create_node(ctx, real_item) # If need convert node to absolute path if node_to_str: # Convert the node to absolute path norm_item = norm_item.abspath() # If not need convert string to node else: # Use the string as normalized item norm_item = real_item # Create normalized tuple norm_tuple = (norm_item, wrapper_type) # If the real item is not string. # If the real item is node elif isinstance(real_item, Node): # If need convert node to absolute path if node_to_str: # Convert the node to absolute path real_item = real_item.abspath() # Create normalized tuple norm_tuple = (real_item, wrapper_type) # If the real item is not node. # If the real item is task elif isinstance(real_item, Task): # If allow task item if allow_task: # Create normalized tuple norm_tuple = (real_item, wrapper_type) # If not allow task item else: # Get error message msg = 'Error (6PVMG): Item type is not valid: {0}.'.format( real_item ) # Raise error raise ValueError(msg) # If the real item is not task. # If the real item is not None, string, node, or task else: # Get error message msg = 'Error (63KUG): Item type is not valid: {0}.'.format( real_item ) # Raise error raise ValueError(msg) # Add the normalized tuple to list norm_tuple_s.append(norm_tuple) # Return the normalized tuples list return norm_tuple_s
java
@Override public UpdateRuleGroupResult updateRuleGroup(UpdateRuleGroupRequest request) { request = beforeClientExecution(request); return executeUpdateRuleGroup(request); }
python
def filter(self, query, output_fields=None): """Query the database and return subject information for those who match the query logic Parameters ---------- query: Query or QueryGroup Query(Group) object to process output_fields: list The fields desired for matching subjects Returns ------- A list of dictionaries whose keys contains at least the default field and at most each key passed in with output_fields, each dictionary representing a surviving row in the database. """ query_keys = query.fields() if not set(query_keys).issubset(set(self.field_names)): raise ValueError("One or more query keys not in project keys") query_keys.append(self.def_field) data = self.export_records(fields=query_keys) matches = query.filter(data, self.def_field) if matches: # if output_fields is empty, we'll download all fields, which is # not desired, so we limit download to def_field if not output_fields: output_fields = [self.def_field] # But if caller passed a string and not list, we need to listify if isinstance(output_fields, basestring): output_fields = [output_fields] return self.export_records(records=matches, fields=output_fields) else: # If there are no matches, then sending an empty list to # export_records will actually return all rows, which is not # what we want return []
python
def _add_supplemental(data): """Add additional supplemental files to CWL sv output, give useful names. """ if "supplemental" not in data["sv"]: data["sv"]["supplemental"] = [] if data["sv"].get("variantcaller"): cur_name = _useful_basename(data) for k in ["cns", "vrn_bed"]: if data["sv"].get(k) and os.path.exists(data["sv"][k]): dname, orig = os.path.split(data["sv"][k]) orig_base, orig_ext = utils.splitext_plus(orig) orig_base = _clean_name(orig_base, data) if orig_base: fname = "%s-%s%s" % (cur_name, orig_base, orig_ext) else: fname = "%s%s" % (cur_name, orig_ext) sup_out_file = os.path.join(dname, fname) utils.symlink_plus(data["sv"][k], sup_out_file) data["sv"]["supplemental"].append(sup_out_file) return data
java
@Override public Collection<V> values() { List<V> values = new ArrayList<>(); valuesR(root.getLeft(), -1, values); return values; }
java
public EEnum getIfcConnectionTypeEnum() { if (ifcConnectionTypeEnumEEnum == null) { ifcConnectionTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(798); } return ifcConnectionTypeEnumEEnum; }
java
public static FunctionalType unaryOperator(TypeMirror type) { checkArgument(!type.getKind().isPrimitive(), "Unexpected primitive type %s", type); return new FunctionalType( QualifiedName.of(UnaryOperator.class).withParameters(type), "apply", ImmutableList.of(type), type); }
python
def parsed_whois(self, query, **kwargs): """Pass in a domain name""" return self._results('parsed-whois', '/v1/{0}/whois/parsed'.format(query), cls=ParsedWhois, **kwargs)
java
public String tokenizeToCoNLLOffsets() { final StringBuilder sb = new StringBuilder(); final String[] sentences = segmenter.segmentSentence(); final List<List<Token>> tokens = tokenizer.tokenize(sentences); for (final List<Token> tokSentence : tokens) { for (final Token token : tokSentence) { String tokenValue = token.getTokenValue(); if (tokenValue.equals(RuleBasedSegmenter.PARAGRAPH)) { tokenValue = DEFAULT_TOKEN_VALUE; } sb.append(tokenValue.trim()).append(DELIMITER).append(token.startOffset()) .append(DELIMITER).append(token.tokenLength()).append(LINE_BREAK); } sb.append(LINE_BREAK); } return sb.toString(); }
python
def load_mpower_data(filename, convert_times=1000000000.0): """ This method loads data in the `mpower <https://www.synapse.org/#!Synapse:syn4993293/wiki/247859>`_ format The format is like: .. code-block:: json [ { "timestamp":19298.67999479167, "x": ... , "y": ..., "z": ..., }, {...}, {...} ] :param filename: The path to load data from :type filename: string :param convert_times: Convert times. The default is from from nanoseconds to seconds. :type convert_times: float """ raw_data = pd.read_json(filename) date_times = pd.to_datetime(raw_data.timestamp * convert_times - raw_data.timestamp[0] * convert_times) time_difference = (raw_data.timestamp - raw_data.timestamp[0]) time_difference = time_difference.values magnitude_sum_acceleration = \ np.sqrt(raw_data.x.values ** 2 + raw_data.y.values ** 2 + raw_data.z.values ** 2) data = {'td': time_difference, 'x': raw_data.x.values, 'y': raw_data.y.values, 'z': raw_data.z.values, 'mag_sum_acc': magnitude_sum_acceleration} data_frame = pd.DataFrame(data, index=date_times, columns=['td', 'x', 'y', 'z', 'mag_sum_acc']) return data_frame
java
public byte[] toBytes() throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); // 0. write command number out.write(getCommand()); // 1. write 4 bytes bin-log position to start at ByteHelper.writeUnsignedIntLittleEndian(binlogPosition, out); // 2. write 2 bytes bin-log flags int binlog_flags = 0; binlog_flags |= BINLOG_SEND_ANNOTATE_ROWS_EVENT; out.write(binlog_flags); out.write(0x00); // 3. write 4 bytes server id of the slave ByteHelper.writeUnsignedIntLittleEndian(this.slaveServerId, out); // 4. write bin-log file name if necessary if (StringUtils.isNotEmpty(this.binlogFileName)) { out.write(this.binlogFileName.getBytes()); } return out.toByteArray(); }
python
def update(self): """Read raw data and calculate temperature and humidity.""" if not self._ok: self.log_error("Trying to restore OK mode w/ soft reset") self._ok = self._soft_reset() try: self._bus.write_byte(self._i2c_add, CMD_READ_TEMP_NOHOLD) sleep(MEASUREMENT_WAIT_TIME) buf_t = self._bus.read_i2c_block_data( self._i2c_add, CMD_READ_TEMP_HOLD, 3) self._bus.write_byte(self._i2c_add, CMD_READ_HUM_NOHOLD) sleep(MEASUREMENT_WAIT_TIME) buf_h = self._bus.read_i2c_block_data( self._i2c_add, CMD_READ_HUM_HOLD, 3) except OSError as exc: self._ok = False self.log_error("Bad reading: %s", exc) return if self._crc8check(buf_t): temp = (buf_t[0] << 8 | buf_t[1]) & 0xFFFC self._temperature = self._calc_temp(temp) if self._crc8check(buf_h): humid = (buf_h[0] << 8 | buf_h[1]) & 0xFFFC rh_actual = self._calc_humid(humid) # For temperature coefficient compensation rh_final = self._temp_coefficient(rh_actual, self._temperature) rh_final = 100.0 if rh_final > 100 else rh_final # Clamp > 100 rh_final = 0.0 if rh_final < 0 else rh_final # Clamp < 0 self._humidity = rh_final else: self._humidity = -255 self._ok = False self.log_error("Bad CRC error with humidity") else: self._temperature = -255 self._ok = False self.log_error("Bad CRC error with temperature")
python
def remove(self, identifier: Union[DataObjectReplica, int]): """ Removes a data object from this collection that has the given unique identifier. A `ValueError` will be raised if a data object with the given identifier does not exist. :param identifier: the identifier of the data object """ if isinstance(identifier, int): self._remove_by_number(identifier) elif isinstance(identifier, DataObjectReplica): self._remove_by_object(identifier) else: raise TypeError("Can only remove by number or by object reference: `%s` given" % type(identifier))
java
public void update(String url, int additionalHeight) { if (iframe.getSrc() != null && url != null && iframe.getSrc().equals(url)) { return; } if (additionalHeight > -1) { this.additionalHeight = additionalHeight; } final Style style = iframe.getStyle(); style.setWidth( 100, Style.Unit.PCT); if (url != null) { //VConsole.log("iframe is updated with url " + url ); iframe.setSrc(url); } }
java
Observable<ChatConversationBase> getConversation(@NonNull String conversationId) { return Observable.create(emitter -> storeFactory.execute(new StoreTransaction<ChatStore>() { @Override protected void execute(ChatStore store) { store.open(); ChatConversationBase c = store.getConversation(conversationId); store.close(); emitter.onNext(c); emitter.onCompleted(); } }), Emitter.BackpressureMode.BUFFER); }
java
public static void katakanaToRomaji(Appendable builder, CharSequence s) throws IOException { ToStringUtil.getRomanization(builder, s); }
java
@Throws(IllegalNullArgumentException.class) public static <T> T notNull(@Nonnull final T reference, @Nullable final String name) { if (reference == null) { throw new IllegalNullArgumentException(name); } return reference; }
java
@SuppressWarnings("unchecked") public static <T> T convertValue(JsonNode node, Class<T> clazz) { if (node instanceof POJONode) { return convertValue(DPathUtils.extractValue((POJONode) node), clazz); } if (clazz == null) { throw new NullPointerException("Class parameter is null!"); } if (node == null || node instanceof NullNode || node instanceof MissingNode) { return null; } if (node instanceof POJONode) { return convertValue(DPathUtils.extractValue((POJONode) node), clazz); } if (Number.class.isAssignableFrom(clazz) || byte.class == clazz || short.class == clazz || int.class == clazz || long.class == clazz || float.class == clazz || double.class == clazz) { return convertNumber(node, clazz); } if (clazz == Boolean.class || clazz == boolean.class) { return (T) convertBoolean(node); } if (clazz == Character.class || clazz == char.class) { return (T) convertChar(node); } if (Date.class.isAssignableFrom(clazz)) { return (T) convertDate(node); } if (Object[].class.isAssignableFrom(clazz) || List.class.isAssignableFrom(clazz)) { return (T) convertArrayOrList(node); } if (clazz.isAssignableFrom(node.getClass())) { return (T) node; } if (clazz == String.class) { return (T) (node.isTextual() ? node.asText() : node.toString()); } throw new IllegalArgumentException( "Cannot convert an object of type [" + node.getClass() + "] to [" + clazz + "]!"); }
java
public void create(final DbProduct dbProduct) { if(repositoryHandler.getProduct(dbProduct.getName()) != null){ throw new WebApplicationException(Response.status(Response.Status.CONFLICT).entity("Product already exist!").build()); } repositoryHandler.store(dbProduct); }
java
public static Map<String, EntryValue> convertBeanToEntryMap(Object bean) { Map<String, Object> buildAttributeValueMap = BeanUtilsExtended.buildObjectAttributeMap(bean); return Maps.transformEntries(buildAttributeValueMap, new ObjectToEntryValueTransformer()); }
java
public static long checkPreconditionL( final long value, final boolean condition, final LongFunction<String> describer) { return innerCheckL(value, condition, describer); }
python
def _main(): """Command line interface to the module. """ from argparse import ArgumentParser, FileType from gutenberg import Error from gutenberg._util.os import reopen_encoded parser = ArgumentParser(description='Remove headers and footers from a ' 'Project Gutenberg text') parser.add_argument('infile', type=FileType('r')) parser.add_argument('outfile', type=FileType('w')) args = parser.parse_args() try: with reopen_encoded(args.infile, 'r', 'utf8') as infile: text = infile.read() clean_text = strip_headers(text) with reopen_encoded(args.outfile, 'w', 'utf8') as outfile: outfile.write(clean_text) except Error as error: parser.error(str(error))
python
def CheckTemplates(self, base_dir, version): """Verify we have at least one template that matches maj.minor version.""" major_minor = ".".join(version.split(".")[0:2]) templates = glob.glob( os.path.join(base_dir, "templates/*%s*.zip" % major_minor)) required_templates = set( [x.replace("maj.minor", major_minor) for x in self.REQUIRED_TEMPLATES]) # Client templates have an extra version digit, e.g. 3.1.0.0 templates_present = set([ re.sub(r"_%s[^_]+_" % major_minor, "_%s_" % major_minor, os.path.basename(x)) for x in templates ]) difference = required_templates - templates_present if difference: raise RuntimeError("Missing templates %s" % difference)
java
public void away (ClientObject caller, String message) { BodyObject body = _locator.forClient(caller); // we modify this field via an invocation service request because a body object is not // modifiable by the client body.setAwayMessage(message); }
python
def put_text(img, text, org, font_face, font_scale, color, thickness=1, line_type=8, bottom_left_origin=False): """Utility for drawing text with line breaks :param img: Image. :param text: Text string to be drawn. :param org: Bottom-left corner of the first line of the text string in the image. :param font_face: Font type. One of FONT_HERSHEY_SIMPLEX, FONT_HERSHEY_PLAIN, FONT_HERSHEY_DUPLEX, FONT_HERSHEY_COMPLEX, FONT_HERSHEY_TRIPLEX, FONT_HERSHEY_COMPLEX_SMALL, FONT_HERSHEY_SCRIPT_SIMPLEX, or FONT_HERSHEY_SCRIPT_COMPLEX, where each of the font ID’s can be combined with FONT_ITALIC to get the slanted letters. :param font_scale: Font scale factor that is multiplied by the font-specific base size. :param color: Text color. :param thickness: Thickness of the lines used to draw a text. :param line_type: Line type. See the line for details. :param bottom_left_origin: When true, the image data origin is at the bottom-left corner. Otherwise, it is at the top-left corner. :return: None; image is modified in place """ # Break out drawing coords x, y = org # Break text into list of text lines text_lines = text.split('\n') # Get height of text lines in pixels (height of all lines is the same) _, line_height = cv2.getTextSize('', font_face, font_scale, thickness)[0] # Set distance between lines in pixels line_gap = line_height // 3 for i, text_line in enumerate(text_lines): # Find total size of text block before this line line_y_adjustment = i * (line_gap + line_height) # Move text down from original line based on line number if not bottom_left_origin: line_y = y + line_y_adjustment else: line_y = y - line_y_adjustment # Draw text cv2.putText(img, text=text_lines[i], org=(x, line_y), fontFace=font_face, fontScale=font_scale, color=color, thickness=thickness, lineType=line_type, bottomLeftOrigin=bottom_left_origin)
java
public static sslservicegroup_sslcertkey_binding[] get(nitro_service service, String servicegroupname) throws Exception{ sslservicegroup_sslcertkey_binding obj = new sslservicegroup_sslcertkey_binding(); obj.set_servicegroupname(servicegroupname); sslservicegroup_sslcertkey_binding response[] = (sslservicegroup_sslcertkey_binding[]) obj.get_resources(service); return response; }
python
def run_bootstrap_post_init(self, config): """ runs a script after initdb or custom bootstrap script is called and waits until completion. """ cmd = config.get('post_bootstrap') or config.get('post_init') if cmd: r = self._local_connect_kwargs if 'host' in r: # '/tmp' => '%2Ftmp' for unix socket path host = quote_plus(r['host']) if r['host'].startswith('/') else r['host'] else: host = '' # https://www.postgresql.org/docs/current/static/libpq-pgpass.html # A host name of localhost matches both TCP (host name localhost) and Unix domain socket # (pghost empty or the default socket directory) connections coming from the local machine. r['host'] = 'localhost' # set it to localhost to write into pgpass if 'user' in r: user = r['user'] + '@' else: user = '' if 'password' in r: import getpass r.setdefault('user', os.environ.get('PGUSER', getpass.getuser())) connstring = 'postgres://{0}{1}:{2}/{3}'.format(user, host, r['port'], r['database']) env = self.write_pgpass(r) if 'password' in r else None try: ret = self.cancellable_subprocess_call(shlex.split(cmd) + [connstring], env=env) except OSError: logger.error('post_init script %s failed', cmd) return False if ret != 0: logger.error('post_init script %s returned non-zero code %d', cmd, ret) return False return True
java
public RunAsType<ServletType<T>> getOrCreateRunAs() { Node node = childNode.getOrCreate("run-as"); RunAsType<ServletType<T>> runAs = new RunAsTypeImpl<ServletType<T>>(this, "run-as", childNode, node); return runAs; }
java
private Runnable decorateTask(Runnable task, boolean isRepeatingTask) { Runnable result = TaskUtils.decorateTaskWithErrorHandler(task, this.errorHandler, isRepeatingTask); if (this.enterpriseConcurrentScheduler) { result = ManagedTaskBuilder.buildManagedTask(result, task.toString()); } return result; }
python
def pad_to_multiple(self, factor): """ Pad the pianoroll with zeros at the end along the time axis with the minimum length that makes the resulting pianoroll length a multiple of `factor`. Parameters ---------- factor : int The value which the length of the resulting pianoroll will be a multiple of. """ remainder = self.pianoroll.shape[0] % factor if remainder: pad_width = ((0, (factor - remainder)), (0, 0)) self.pianoroll = np.pad(self.pianoroll, pad_width, 'constant')
java
private double checkThreshold(int value) { double threshold = (double) value; try { if (threshold < 0 || threshold > 100) { throw new NumberFormatException(); } LOG.info("Using a threshold of " + threshold); } catch (NumberFormatException e) { System.err.println("Expect a double parameter in the range of [0, 100]: " + value); throw e; } return threshold; }
java
public ServiceEndpointPolicyInner beginCreateOrUpdate(String resourceGroupName, String serviceEndpointPolicyName, ServiceEndpointPolicyInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, serviceEndpointPolicyName, parameters).toBlocking().single().body(); }
java
public static void cleanupActiveSession() { Session session = AbstractHandler.ACTIVE_SESSION.get(); if (session != null) { try { AbstractHandler.ACTIVE_SESSION.remove(); session.logout(); LOGGER.debug("Logged out REST service session"); } catch (Exception e) { LOGGER.warn(e, "Error while trying to logout REST service session"); } } }
java
public NfsRmdirResponse rmdir() throws IOException { NfsRmdirResponse response = getNfs().wrapped_sendRmdir(makeRmdirRequest()); setFileHandle(null); return response; }
python
def read_config(ctx, param, config_path): """Callback that is used whenever --config is passed.""" if sys.argv[1] == 'init': return cfg = ctx.ensure_object(Config) if config_path is None: config_path = path.join(sys.path[0], 'v2ex_config.json') if not path.exists(config_path): sys.exit("Can't find config file at {0}.\nPlease read " "https://github.com/lord63/v2ex_daily_mission " "to follow the guide.".format(config_path)) cfg.load_config(config_path) return config_path
java
protected MemoryBlock allocatePage(long required) { MemoryBlock page = taskMemoryManager.allocatePage(Math.max(pageSize, required), this); if (page == null || page.size() < required) { throwOom(page, required); } used += page.size(); return page; }
java
public static RgbaColor fromRgba(String rgba) { if (rgba.length() == 0) return getDefaultColor(); String[] parts = getRgbaParts(rgba).split(","); if (parts.length == 4) { return new RgbaColor(parseInt(parts[0]), parseInt(parts[1]), parseInt(parts[2]), parseFloat(parts[3])); } else { return getDefaultColor(); } }
java
public static String getMapParam(Map<String,String[]> queryMap, String field) { String arr[] = queryMap.get(field); if (arr == null || arr.length == 0) { return null; } return arr[0]; }
java
public EClass getIfcTextStyleWithBoxCharacteristics() { if (ifcTextStyleWithBoxCharacteristicsEClass == null) { ifcTextStyleWithBoxCharacteristicsEClass = (EClass) EPackage.Registry.INSTANCE .getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(607); } return ifcTextStyleWithBoxCharacteristicsEClass; }
java
private static final HttpMessageConverter<?> getHalConverter(List<MediaType> halFlavours) { ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new Jackson2HalModule()); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); MappingJackson2HttpMessageConverter converter = new MappingJackson2HttpMessageConverter(); converter.setObjectMapper(mapper); converter.setSupportedMediaTypes(halFlavours); return converter; }
python
def reportFailed(self, *args, **kwargs): """ Report Run Failed Report a run failed, resolving the run as `failed`. Use this to resolve a run that failed because the task specific code behaved unexpectedly. For example the task exited non-zero, or didn't produce expected output. Do not use this if the task couldn't be run because if malformed payload, or other unexpected condition. In these cases we have a task exception, which should be reported with `reportException`. This method gives output: ``v1/task-status-response.json#`` This method is ``stable`` """ return self._makeApiCall(self.funcinfo["reportFailed"], *args, **kwargs)
java
public String getDocid() { if (Document_Type.featOkTst && ((Document_Type)jcasType).casFeat_docid == null) jcasType.jcas.throwFeatMissing("docid", "de.julielab.jules.types.ace.Document"); return jcasType.ll_cas.ll_getStringValue(addr, ((Document_Type)jcasType).casFeatCode_docid);}
python
def check(self, *exc_classes): """Check if any of ``exc_classes`` caused the failure. Arguments of this method can be exception types or type names (strings **fully qualified**). If captured exception is an instance of exception of given type, the corresponding argument is returned, otherwise ``None`` is returned. """ for cls in exc_classes: cls_name = utils.cls_to_cls_name(cls) if cls_name in self._exc_type_names: return cls return None
python
def solve(self,b): """ Solves system Ax=b. Parameters ---------- b : ndarray Returns ------- x : ndarray """ x = b.copy() self.mumps.set_rhs(x) self.mumps.run(job=3) return x
python
def List(column_names, title=None, boolstyle=None, editable=False, select_col=None, sep='|', data=[], **kwargs): """Present a list of items to select. This will raise a Zenity List Dialog populated with the colomns and rows specified and return either the cell or row that was selected or None if the user hit cancel. column_names - A tuple or list containing the names of the columns. title - The title of the dialog box. boolstyle - Whether the first columns should be a bool option ("checklist", "radiolist") or None if it should be a text field. editable - True if the user can edit the cells. select_col - The column number of the selected cell to return or "ALL" to return the entire row. sep - Token to use as the row separator when parsing Zenity's return. Cells should not contain this token. data - A list or tuple of tuples that contain the cells in the row. The size of the row's tuple must be equal to the number of columns. kwargs - Optional command line parameters for Zenity such as height, width, etc.""" args = [] for column in column_names: args.append('--column=%s' % column) if title: args.append('--title=%s' % title) if boolstyle: if not (boolstyle == 'checklist' or boolstyle == 'radiolist'): raise ValueError('"%s" is not a proper boolean column style.' % boolstyle) args.append('--' + boolstyle) if editable: args.append('--editable') if select_col: args.append('--print-column=%s' % select_col) if sep != '|': args.append('--separator=%s' % sep) for generic_args in kwargs_helper(kwargs): args.append('--%s=%s' % generic_args) for datum in chain(*data): args.append(str(datum)) p = run_zenity('--list', *args) if p.wait() == 0: return p.stdout.read().strip().split(sep)
python
def getArticles(self, page=1, count=100, sortBy = "rel", sortByAsc = False, returnInfo=ReturnInfo()): """ return a list of articles that match the topic page @param page: which page of the results to return (default: 1) @param count: number of articles to return (default: 100) @param sortBy: how are articles sorted. Options: id (internal id), date (publishing date), cosSim (closeness to the event centroid), rel (relevance to the query), sourceImportance (manually curated score of source importance - high value, high importance), sourceImportanceRank (reverse of sourceImportance), sourceAlexaGlobalRank (global rank of the news source), sourceAlexaCountryRank (country rank of the news source), socialScore (total shares on social media), facebookShares (shares on Facebook only) @param sortByAsc: should the results be sorted in ascending order (True) or descending (False) @param returnInfo: what details should be included in the returned information """ assert page >= 1 assert count <= 100 params = { "action": "getArticlesForTopicPage", "resultType": "articles", "dataType": self.topicPage["dataType"], "articlesCount": count, "articlesSortBy": sortBy, "articlesSortByAsc": sortByAsc, "page": page, "topicPage": json.dumps(self.topicPage) } params.update(returnInfo.getParams("articles")) return self.eventRegistry.jsonRequest("/json/article", params)
python
def main(): """ Run through simple demonstration of alarm concept """ alarm = XBeeAlarm('/dev/ttyUSB0', '\x56\x78') routine = SimpleWakeupRoutine(alarm) from time import sleep while True: """ Run the routine with 10 second delays """ try: print "Waiting 5 seconds..." sleep(5) print "Firing" routine.trigger() except KeyboardInterrupt: break
java
public void setMaxSize(long maxSize) { // if the new maxSize is <= 0, clear keyLRUList if (maxSize <= 0) { keyLRUList.clear(); } else if (maxSize > 0 && this.maxSize <= 0) { // if the new maxSize > 0 and the old is <= 0, fill in LRU list - // order will be meaningless for now Iterator keys = cacheLineTable.keySet().iterator(); while (keys.hasNext()) { keyLRUList.add(keys.next()); } } // if the new maxSize is less than the current cache size, shrink the // cache. if (maxSize > 0 && cacheLineTable.size() > maxSize) { while (cacheLineTable.size() > maxSize) { Object lastKey = keyLRUList.getLast(); removeObject(lastKey); } } this.maxSize = maxSize; }
java
public ValueSetContactComponent addContact() { //3 ValueSetContactComponent t = new ValueSetContactComponent(); if (this.contact == null) this.contact = new ArrayList<ValueSetContactComponent>(); this.contact.add(t); return t; }
java
public RandomVariable getImpliedBachelierATMOptionVolatility(RandomVariable optionValue, double optionMaturity, double swapAnnuity){ return optionValue.average().mult(Math.sqrt(2.0 * Math.PI / optionMaturity) / swapAnnuity); }
java
public List<List<String>> decodeLabeledSequences(List<List<SeqElement>> seqs) { List<List<List<String>>> n1 = new ArrayList<List<List<String>>>(); for (List<SeqElement> seq : seqs) { List<List<String>> n2 = new ArrayList<List<String>>(); for (SeqElement el : seq) { n2.add(el.getFeatures()); } n1.add(n2); } return decoder.decodeFromJava(n1); }
python
def worker(self): """ Returns the worker object for loading records for this record box. :return <XOrbLookupWorker> """ if self._worker is None: self._worker = XOrbLookupWorker(self.isThreadEnabled()) self._worker.setBatchSize(self._batchSize) self._worker.setBatched(not self.isThreadEnabled()) # connect the worker self.loadRequested.connect(self._worker.loadRecords) self._worker.loadingStarted.connect(self.markLoadingStarted) self._worker.loadingFinished.connect(self.markLoadingFinished) self._worker.loadedRecords.connect(self.addRecordsFromThread) return self._worker
python
def drawCircle(self, center, radius): """Draw a circle given its center and radius. """ if not radius > 1e-5: raise ValueError("radius must be postive") center = Point(center) p1 = center - (radius, 0) return self.drawSector(center, p1, 360, fullSector=False)
python
def bind(topic, signal=None, kind=MIDDLE, nice=-1): """ This is a decorator function, so you should use it as: @bind('init') def process_init(a, b): ... """ def f(func): if not topic in _receivers: receivers = _receivers[topic] = [] else: receivers = _receivers[topic] if nice == -1: if kind == MIDDLE: n = 500 elif kind == HIGH: n = 100 else: n = 900 else: n = nice if callable(func): func_name = func.__module__ + '.' + func.__name__ func = func else: func_name = func func = None _f = (n, {'func':func, 'signal':signal, 'func_name':func_name}) receivers.append(_f) return func return f
python
def _sort_layers(self): """Sort the layers by depth.""" self._layers = OrderedDict(sorted(self._layers.items(), key=lambda t: t[0]))
python
def output(self): """ Return a 20-byte hash corresponding to this script (or None if not applicable). """ hash160 = self._script_info.get("hash160", None) if hash160: yield ("hash160", b2h(hash160), None) address = self.address() yield ("address", address, "%s address" % self._network.network_name) yield ("%s_address" % self._network.symbol, address, "legacy")
python
def has_value(self): """return True if value has been set""" try: if isinstance(self.__value, Expression): return self.__value.has_value return True except AttributeError: return False
java
private static byte[] getIPv6ByName(CharSequence ip, boolean ipv4Mapped) { final byte[] bytes = new byte[IPV6_BYTE_COUNT]; final int ipLength = ip.length(); int compressBegin = 0; int compressLength = 0; int currentIndex = 0; int value = 0; int begin = -1; int i = 0; int ipv6Separators = 0; int ipv4Separators = 0; int tmp; boolean needsShift = false; for (; i < ipLength; ++i) { final char c = ip.charAt(i); switch (c) { case ':': ++ipv6Separators; if (i - begin > IPV6_MAX_CHAR_BETWEEN_SEPARATOR || ipv4Separators > 0 || ipv6Separators > IPV6_MAX_SEPARATORS || currentIndex + 1 >= bytes.length) { return null; } value <<= (IPV6_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2; if (compressLength > 0) { compressLength -= 2; } // The value integer holds at most 4 bytes from right (most significant) to left (least significant). // The following bit shifting is used to extract and re-order the individual bytes to achieve a // left (most significant) to right (least significant) ordering. bytes[currentIndex++] = (byte) (((value & 0xf) << 4) | ((value >> 4) & 0xf)); bytes[currentIndex++] = (byte) ((((value >> 8) & 0xf) << 4) | ((value >> 12) & 0xf)); tmp = i + 1; if (tmp < ipLength && ip.charAt(tmp) == ':') { ++tmp; if (compressBegin != 0 || (tmp < ipLength && ip.charAt(tmp) == ':')) { return null; } ++ipv6Separators; needsShift = ipv6Separators == 2 && value == 0; compressBegin = currentIndex; compressLength = bytes.length - compressBegin - 2; ++i; } value = 0; begin = -1; break; case '.': ++ipv4Separators; tmp = i - begin; // tmp is the length of the current segment. if (tmp > IPV4_MAX_CHAR_BETWEEN_SEPARATOR || begin < 0 || ipv4Separators > IPV4_SEPARATORS || (ipv6Separators > 0 && (currentIndex + compressLength < 12)) || i + 1 >= ipLength || currentIndex >= bytes.length || ipv4Separators == 1 && // We also parse pure IPv4 addresses as IPv4-Mapped for ease of use. ((!ipv4Mapped || currentIndex != 0 && !isValidIPv4Mapped(bytes, currentIndex, compressBegin, compressLength)) || (tmp == 3 && (!isValidNumericChar(ip.charAt(i - 1)) || !isValidNumericChar(ip.charAt(i - 2)) || !isValidNumericChar(ip.charAt(i - 3))) || tmp == 2 && (!isValidNumericChar(ip.charAt(i - 1)) || !isValidNumericChar(ip.charAt(i - 2))) || tmp == 1 && !isValidNumericChar(ip.charAt(i - 1))))) { return null; } value <<= (IPV4_MAX_CHAR_BETWEEN_SEPARATOR - tmp) << 2; // The value integer holds at most 3 bytes from right (most significant) to left (least significant). // The following bit shifting is to restructure the bytes to be left (most significant) to // right (least significant) while also accounting for each IPv4 digit is base 10. begin = (value & 0xf) * 100 + ((value >> 4) & 0xf) * 10 + ((value >> 8) & 0xf); if (begin < 0 || begin > 255) { return null; } bytes[currentIndex++] = (byte) begin; value = 0; begin = -1; break; default: if (!isValidHexChar(c) || (ipv4Separators > 0 && !isValidNumericChar(c))) { return null; } if (begin < 0) { begin = i; } else if (i - begin > IPV6_MAX_CHAR_BETWEEN_SEPARATOR) { return null; } // The value is treated as a sort of array of numbers because we are dealing with // at most 4 consecutive bytes we can use bit shifting to accomplish this. // The most significant byte will be encountered first, and reside in the right most // position of the following integer value += StringUtil.decodeHexNibble(c) << ((i - begin) << 2); break; } } final boolean isCompressed = compressBegin > 0; // Finish up last set of data that was accumulated in the loop (or before the loop) if (ipv4Separators > 0) { if (begin > 0 && i - begin > IPV4_MAX_CHAR_BETWEEN_SEPARATOR || ipv4Separators != IPV4_SEPARATORS || currentIndex >= bytes.length) { return null; } if (ipv6Separators == 0) { compressLength = 12; } else if (ipv6Separators >= IPV6_MIN_SEPARATORS && (!isCompressed && (ipv6Separators == 6 && ip.charAt(0) != ':') || isCompressed && (ipv6Separators < IPV6_MAX_SEPARATORS && (ip.charAt(0) != ':' || compressBegin <= 2)))) { compressLength -= 2; } else { return null; } value <<= (IPV4_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2; // The value integer holds at most 3 bytes from right (most significant) to left (least significant). // The following bit shifting is to restructure the bytes to be left (most significant) to // right (least significant) while also accounting for each IPv4 digit is base 10. begin = (value & 0xf) * 100 + ((value >> 4) & 0xf) * 10 + ((value >> 8) & 0xf); if (begin < 0 || begin > 255) { return null; } bytes[currentIndex++] = (byte) begin; } else { tmp = ipLength - 1; if (begin > 0 && i - begin > IPV6_MAX_CHAR_BETWEEN_SEPARATOR || ipv6Separators < IPV6_MIN_SEPARATORS || !isCompressed && (ipv6Separators + 1 != IPV6_MAX_SEPARATORS || ip.charAt(0) == ':' || ip.charAt(tmp) == ':') || isCompressed && (ipv6Separators > IPV6_MAX_SEPARATORS || (ipv6Separators == IPV6_MAX_SEPARATORS && (compressBegin <= 2 && ip.charAt(0) != ':' || compressBegin >= 14 && ip.charAt(tmp) != ':'))) || currentIndex + 1 >= bytes.length || begin < 0 && ip.charAt(tmp - 1) != ':' || compressBegin > 2 && ip.charAt(0) == ':') { return null; } if (begin >= 0 && i - begin <= IPV6_MAX_CHAR_BETWEEN_SEPARATOR) { value <<= (IPV6_MAX_CHAR_BETWEEN_SEPARATOR - (i - begin)) << 2; } // The value integer holds at most 4 bytes from right (most significant) to left (least significant). // The following bit shifting is used to extract and re-order the individual bytes to achieve a // left (most significant) to right (least significant) ordering. bytes[currentIndex++] = (byte) (((value & 0xf) << 4) | ((value >> 4) & 0xf)); bytes[currentIndex++] = (byte) ((((value >> 8) & 0xf) << 4) | ((value >> 12) & 0xf)); } i = currentIndex + compressLength; if (needsShift || i >= bytes.length) { // Right shift array if (i >= bytes.length) { ++compressBegin; } for (i = currentIndex; i < bytes.length; ++i) { for (begin = bytes.length - 1; begin >= compressBegin; --begin) { bytes[begin] = bytes[begin - 1]; } bytes[begin] = 0; ++compressBegin; } } else { // Selectively move elements for (i = 0; i < compressLength; ++i) { begin = i + compressBegin; currentIndex = begin + compressLength; if (currentIndex < bytes.length) { bytes[currentIndex] = bytes[begin]; bytes[begin] = 0; } else { break; } } } if (ipv4Separators > 0) { // We only support IPv4-Mapped addresses [1] because IPv4-Compatible addresses are deprecated [2]. // [1] https://tools.ietf.org/html/rfc4291#section-2.5.5.2 // [2] https://tools.ietf.org/html/rfc4291#section-2.5.5.1 bytes[10] = bytes[11] = (byte) 0xff; } return bytes; }
python
async def getitem(self, url: str, url_vars: Dict[str, str] = {}, *, accept: str = sansio.accept_format(), jwt: Opt[str] = None, oauth_token: Opt[str] = None ) -> Any: """Send a GET request for a single item to the specified endpoint.""" data, _ = await self._make_request("GET", url, url_vars, b"", accept, jwt=jwt, oauth_token=oauth_token) return data
java
@Override public <T> T convertValue(String rawValue, Class<T> type) { assertNotClosed(); @SuppressWarnings("unchecked") T value = (T) getConversionManager().convert(rawValue, type); return value; }
java
public static HELM2Notation readRNA(String notation) throws FastaFormatException, NotationException, ChemistryException, NucleotideLoadingException { HELM2Notation helm2notation = new HELM2Notation(); PolymerNotation polymer = new PolymerNotation("RNA1"); if (!(FastaFormat.isNormalDirection(notation))) { String annotation = "3'-5'"; helm2notation.addPolymer(new PolymerNotation(polymer.getPolymerID(), FastaFormat.generateElementsforRNA(notation, polymer.getPolymerID()), annotation)); } else { helm2notation.addPolymer(new PolymerNotation(polymer.getPolymerID(), FastaFormat.generateElementsforRNA(notation, polymer.getPolymerID()))); } return helm2notation; }
java
public static String replaceHtmlEntities(String content, Map<String, Character> map) { for (Entry<String, Character> entry : escapeStrings.entrySet()) { if (content.indexOf(entry.getKey()) != -1) { content = content.replace(entry.getKey(), String.valueOf(entry.getValue())); } } return content; }
python
def glossary(term=None, return_format=None): """List of glossary terms and definitions. :param term: a whole or parital word to "search" in the API """ uri = 'glossary' if term: uri = '/'.join([uri, term]) return _get(uri, return_format)
java
public static void toJson(Object object, JsonBuffer output) { if (object == null) { return; } for (final Method method : object.getClass().getMethods()) { try { if (!method.isSynthetic() && !Modifier.isStatic(method.getModifiers()) && method.getParameterCount() == 0 && (method.getReturnType().isPrimitive() || String.class.equals(method.getReturnType()) || method.getReturnType().isEnum())) { final String name = method.getName(); if (name.startsWith("get") && name.length() > 3) { //$NON-NLS-1$ output.add(makeName(name, 3), method.invoke(object)); } else if (name.startsWith("is") && name.length() > 2) { //$NON-NLS-1$ output.add(makeName(name, 2), method.invoke(object)); } } } catch (Exception e) { // } } }
java
public JsonPropertyBuilder<T, P> router(JsonCoderRouter<P> router) { return new JsonPropertyBuilder<T, P>(coderClass, name, null, router); }
java
public static void setCurrentPayload(Variables stack, String name, WindupVertexFrame frame) throws IllegalArgumentException { Map<String, Iterable<? extends WindupVertexFrame>> vars = stack.peek(); Iterable<? extends WindupVertexFrame> existingValue = vars.get(name); if (!(existingValue == null || existingValue instanceof IterationPayload)) { throw new IllegalArgumentException("Variable \"" + name + "\" has already been assigned and cannot be used as an " + Iteration.class.getSimpleName() + " variable."); } vars.put(name, new IterationPayload<>(frame)); }
python
def pause(self, pause=0): """Insert a `pause`, in seconds.""" fr = self.frames[-1] n = int(self.fps * pause) for i in range(n): fr2 = "/tmp/vpvid/" + str(len(self.frames)) + ".png" self.frames.append(fr2) os.system("cp -f %s %s" % (fr, fr2))
java
public static int nextClearBit(long v, int start) { if(start >= Long.SIZE) { return -1; } start = start < 0 ? 0 : start; long cur = ~v & (LONG_ALL_BITS << start); return cur == 0 ? -1 : Long.numberOfTrailingZeros(cur); }
java
public void marshall(TargetLocation targetLocation, ProtocolMarshaller protocolMarshaller) { if (targetLocation == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(targetLocation.getAccounts(), ACCOUNTS_BINDING); protocolMarshaller.marshall(targetLocation.getRegions(), REGIONS_BINDING); protocolMarshaller.marshall(targetLocation.getTargetLocationMaxConcurrency(), TARGETLOCATIONMAXCONCURRENCY_BINDING); protocolMarshaller.marshall(targetLocation.getTargetLocationMaxErrors(), TARGETLOCATIONMAXERRORS_BINDING); protocolMarshaller.marshall(targetLocation.getExecutionRoleName(), EXECUTIONROLENAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def _get_view_result(view, raw_result, **kwargs): """ Get view results helper. """ if raw_result: return view(**kwargs) if kwargs: return Result(view, **kwargs) return view.result
python
def get(self, variable_path: str, default: t.Optional[t.Any] = None, coerce_type: t.Optional[t.Type] = None, coercer: t.Optional[t.Callable] = None, **kwargs): """ Reads a value of ``variable_path`` from consul kv storage. :param variable_path: a delimiter-separated path to a nested value :param default: default value if there's no object by specified path :param coerce_type: cast a type of a value to a specified one :param coercer: perform a type casting with specified callback :param kwargs: additional arguments inherited parser may need :return: value or default :raises config.exceptions.KVStorageKeyDoestNotExist: if specified ``endpoint`` does not exists :raises config.exceptions.KVStorageValueIsEmpty: if specified ``endpoint`` does not contain a config """ return self.inner_parser.get( variable_path, default=default, coerce_type=coerce_type, coercer=coercer, **kwargs, )
python
def setup_new_conf(self): # pylint: disable=too-many-branches """Setup the new configuration received from Arbiter This function calls the base satellite treatment and manages the configuration needed for a simple satellite daemon that executes some actions (eg. poller or reactionner): - configure the passive mode - configure the workers - configure the tags - configure the modules :return: None """ # Execute the base class treatment... super(Satellite, self).setup_new_conf() # ...then our own specific treatment! with self.conf_lock: logger.info("Received a new configuration") # self_conf is our own configuration from the alignak environment # self_conf = self.cur_conf['self_conf'] # Now manage modules if not self.have_modules: try: self.modules = unserialize(self.cur_conf['modules'], no_load=True) except AlignakClassLookupException as exp: # pragma: no cover, simple protection logger.error('Cannot un-serialize modules configuration ' 'received from arbiter: %s', exp) if self.modules: logger.info("I received some modules configuration: %s", self.modules) self.have_modules = True for module in self.modules: if module.name not in self.q_by_mod: self.q_by_mod[module.name] = {} self.do_load_modules(self.modules) # and start external modules too self.modules_manager.start_external_instances() else: logger.info("I do not have modules") # Initialize connection with all our satellites logger.info("Initializing connection with my satellites:") my_satellites = self.get_links_of_type(s_type='') for satellite in list(my_satellites.values()): logger.info("- : %s/%s", satellite.type, satellite.name) if not self.daemon_connection_init(satellite): logger.error("Satellite connection failed: %s", satellite) # Now I have a configuration! self.have_conf = True
python
def capture_delete_records(records): """Writes all of our delete events to DynamoDB.""" for record in records: model = create_delete_model(record) if model: try: model.delete(condition=(CurrentVPCModel.eventTime <= record['detail']['eventTime'])) except DeleteError: LOG.warning(f'[?] Unable to delete VPC. VPC does not exist. Record: {record}') else: LOG.warning(f'[?] Unable to delete VPC. VPC does not exist. Record: {record}')
java
public SecurityGroup withIpPermissionsEgress(IpPermission... ipPermissionsEgress) { if (this.ipPermissionsEgress == null) { setIpPermissionsEgress(new com.amazonaws.internal.SdkInternalList<IpPermission>(ipPermissionsEgress.length)); } for (IpPermission ele : ipPermissionsEgress) { this.ipPermissionsEgress.add(ele); } return this; }
java
public DBCluster withReadReplicaIdentifiers(String... readReplicaIdentifiers) { if (this.readReplicaIdentifiers == null) { setReadReplicaIdentifiers(new com.amazonaws.internal.SdkInternalList<String>(readReplicaIdentifiers.length)); } for (String ele : readReplicaIdentifiers) { this.readReplicaIdentifiers.add(ele); } return this; }
java
private double phi(long timestampMillis) { long timeDiffMillis; double meanMillis; double stdDeviationMillis; synchronized (heartbeatHistory) { long lastTimestampMillis = lastHeartbeatMillis; if (lastTimestampMillis == NO_HEARTBEAT_TIMESTAMP) { return 0.0; } timeDiffMillis = timestampMillis - lastTimestampMillis; meanMillis = heartbeatHistory.mean(); stdDeviationMillis = ensureValidStdDeviation(heartbeatHistory.stdDeviation()); } return phi(timeDiffMillis, meanMillis + acceptableHeartbeatPauseMillis, stdDeviationMillis); }
java
public ServerGroup withServerList(Server... serverList) { if (this.serverList == null) { setServerList(new java.util.ArrayList<Server>(serverList.length)); } for (Server ele : serverList) { this.serverList.add(ele); } return this; }
java
private void drawLegend(Graphics g) { Graphics2D g2 = (Graphics2D) g; setRenderingHints(g2); g2.setColor(Color.BLACK); Font font = g2.getFont(); Font f = new Font(font.getFontName(), Font.BOLD, font.getSize()); g2.setFont(f); fm = getFontMetrics(f); int fontHeight = fm.getHeight(); for (int i = 0; i < title.size(); i++) { if (fm.stringWidth(title.get(i)) > .8 * this.getWidth()) { f = new Font(font.getFontName(), Font.BOLD, 10); g2.setFont(f); fm = getFontMetrics(f); } g2.drawString(title.get(i), (getSize().width - fm.stringWidth(title.get(i))) / 2, ((i + 1) * fontHeight)); // g2.setFont(font); } // draw the maxPercentage and minPercentage values String label = df.format(minPercentage); g2.drawString(label, left - 5 - (fm.stringWidth(label)), bottom + titleHeight / 6); g2.drawLine(left - 5, bottom, left, bottom); double d = minPercentage + kmfi.yaxisPercentIncrement; //double graphHeight = top - bottom; while (d < maxPercentage) { int yvalue = bottom - (int) (d * (bottom - top)); label = df.format(d * 100); g2.drawString(label, left - 5 - (fm.stringWidth(label)), yvalue + titleHeight / 6); // g2.drawLine(left - 5, yvalue, left, yvalue); d = d + kmfi.yaxisPercentIncrement; } label = df.format(maxPercentage * 100); g2.drawString(label, left - 5 - (fm.stringWidth(label)), top + (titleHeight) / 6); g2.drawLine(left - 5, top, left, top); // Create a rotation transformation for the font. AffineTransform fontAT = new AffineTransform(); // Derive a new font using a rotatation transform fontAT.rotate(270 * java.lang.Math.PI / 180); Font theDerivedFont = f.deriveFont(fontAT); // set the derived font in the Graphics2D context g2.setFont(theDerivedFont); // Render a string using the derived font int yaxisHeight = fm.stringWidth(kmfi.yAxisLegend); g2.drawString(kmfi.yAxisLegend, yaxisLabel, (bottom - (int) (.5 * (bottom - top))) + yaxisHeight / 2); // put the original font back g2.setFont(f); double timeDistance = maxTime - minTime; double timeIncrement = timeDistance * kmfi.xaxisPercentIncrement; double timeInt = (int) Math.floor(timeIncrement); if (timeInt < 1.0) { timeInt = 1.0; } adjustedPercentIncrement = timeInt / timeDistance; d = adjustedPercentIncrement; //kmfi.xaxisPercentIncrement; xAxisTimeValues.clear(); xAxisTimeCoordinates.clear(); //if we don't have time values then use percentage to set time. Not perfect but allows different tics if (kmfi.xAxisLabels.isEmpty()) { xAxisTimeValues.add(minTime); xAxisTimeCoordinates.add(left); while (d <= 1.0) { double xaxisTime = ((minTime * kmfi.timeScale) + d * ((maxTime - minTime) * kmfi.timeScale)); // xAxisTimeValues.add(xaxisTime); Integer coordinate = left + (int) (d * (right - left)); xAxisTimeCoordinates.add(coordinate); // System.out.println(d + " " + left + " " + right + " " + coordinate + " " + minTime + " " + maxTime); d = d + adjustedPercentIncrement; //kmfi.xaxisPercentIncrement; } } else { minTime = kmfi.xAxisLabels.get(0); maxTime = kmfi.xAxisLabels.get(kmfi.xAxisLabels.size() - 1); for (Double xaxisTime : kmfi.xAxisLabels) { xAxisTimeValues.add(xaxisTime); d = (xaxisTime - minTime) / (maxTime - minTime); Integer coordinate = left + (int) (d * (right - left)); xAxisTimeCoordinates.add(coordinate); } } for (int i = 0; i < xAxisTimeValues.size(); i++) { Double xaxisTime = xAxisTimeValues.get(i); Integer xCoordinate = xAxisTimeCoordinates.get(i); label = df.format(xaxisTime); if (i == xAxisTimeValues.size() - 1) { g2.drawString(label, xCoordinate - (fm.stringWidth(label)), bottom + fm.getHeight() + 5); } else { g2.drawString(label, xCoordinate - (fm.stringWidth(label) / 2), bottom + fm.getHeight() + 5); } g2.drawLine(xCoordinate, bottom, xCoordinate, bottom + 5); } // draw the vertical and horizontal lines g2.setStroke(kmfi.axisStroke); g2.drawLine(left, top, left, bottom); g2.drawLine(left, bottom, right, bottom); // draw xAxis legend g2.drawString(kmfi.xAxisLegend, getSize().width / 2 - (fm.stringWidth(kmfi.xAxisLegend) / 2), bottom + 2 * fm.getHeight() + 10); }
python
def update(self, data): """TODO""" self.debug(data) url = "{base}/{uuid}".format( base=self.local_base_url, uuid=data.get('uuid') ) return self.core.update(url, data)