language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def write_crc32(fo, bytes): """A 4-byte, big-endian CRC32 checksum""" data = crc32(bytes) & 0xFFFFFFFF fo.write(pack('>I', data))
java
private int matchAnyToOne(final int wordIndex, final int candIndex) { if (replacementsAnyToOne.containsKey(candidate[candIndex])) { for (final char[] rep : replacementsAnyToOne.get(candidate[candIndex])) { int i = 0; while (i < rep.length && (wordIndex + i) < wordLen && rep[i] == wordProcessed[wordIndex + i]) { i++; } if (i == rep.length) { return i; } } } return 0; }
python
def GetName(self): """ Get the asset name based on its type. Returns: str: 'NEO' or 'NEOGas' """ if self.AssetType == AssetType.GoverningToken: return "NEO" elif self.AssetType == AssetType.UtilityToken: return "NEOGas" if type(self.Name) is bytes: return self.Name.decode('utf-8') return self.Name
java
public NumberExpression<Integer> indexOf(Expression<String> str, int i) { return Expressions.numberOperation(Integer.class, Ops.INDEX_OF_2ARGS, mixin, str, ConstantImpl.create(i)); }
python
def disable_device(self, token, email=None, user_id=None): """ This request manually disable pushes to a device until it comes online again. """ call = "/api/users/disableDevice" payload ={} payload["token"] = str(token) if email is not None: payload["email"] = str(email) if user_id is not None: payload["userId"] = str(user_id) return self.api_call(call= call, method="POST", json=payload)
python
def extract_hash(hash_fn, hash_type='sha256', file_name='', source='', source_hash_name=None): ''' .. versionchanged:: 2016.3.5 Prior to this version, only the ``file_name`` argument was considered for filename matches in the hash file. This would be problematic for cases in which the user was relying on a remote checksum file that they do not control, and they wished to use a different name for that file on the minion from the filename on the remote server (and in the checksum file). For example, managing ``/tmp/myfile.tar.gz`` when the remote file was at ``https://mydomain.tld/different_name.tar.gz``. The :py:func:`file.managed <salt.states.file.managed>` state now also passes this function the source URI as well as the ``source_hash_name`` (if specified). In cases where ``source_hash_name`` is specified, it takes precedence over both the ``file_name`` and ``source``. When it is not specified, ``file_name`` takes precedence over ``source``. This allows for better capability for matching hashes. .. versionchanged:: 2016.11.0 File name and source URI matches are no longer disregarded when ``source_hash_name`` is specified. They will be used as fallback matches if there is no match to the ``source_hash_name`` value. This routine is called from the :mod:`file.managed <salt.states.file.managed>` state to pull a hash from a remote file. Regular expressions are used line by line on the ``source_hash`` file, to find a potential candidate of the indicated hash type. This avoids many problems of arbitrary file layout rules. It specifically permits pulling hash codes from debian ``*.dsc`` files. If no exact match of a hash and filename are found, then the first hash found (if any) will be returned. If no hashes at all are found, then ``None`` will be returned. For example: .. code-block:: yaml openerp_7.0-latest-1.tar.gz: file.managed: - name: /tmp/openerp_7.0-20121227-075624-1_all.deb - source: http://nightly.openerp.com/7.0/nightly/deb/openerp_7.0-20121227-075624-1.tar.gz - source_hash: http://nightly.openerp.com/7.0/nightly/deb/openerp_7.0-20121227-075624-1.dsc CLI Example: .. code-block:: bash salt '*' file.extract_hash /path/to/hash/file sha512 /etc/foo ''' hash_len = HASHES.get(hash_type) if hash_len is None: if hash_type: log.warning( 'file.extract_hash: Unsupported hash_type \'%s\', falling ' 'back to matching any supported hash_type', hash_type ) hash_type = '' hash_len_expr = '{0},{1}'.format(min(HASHES_REVMAP), max(HASHES_REVMAP)) else: hash_len_expr = six.text_type(hash_len) filename_separators = string.whitespace + r'\/' if source_hash_name: if not isinstance(source_hash_name, six.string_types): source_hash_name = six.text_type(source_hash_name) source_hash_name_idx = (len(source_hash_name) + 1) * -1 log.debug( 'file.extract_hash: Extracting %s hash for file matching ' 'source_hash_name \'%s\'', 'any supported' if not hash_type else hash_type, source_hash_name ) if file_name: if not isinstance(file_name, six.string_types): file_name = six.text_type(file_name) file_name_basename = os.path.basename(file_name) file_name_idx = (len(file_name_basename) + 1) * -1 if source: if not isinstance(source, six.string_types): source = six.text_type(source) urlparsed_source = _urlparse(source) source_basename = os.path.basename( urlparsed_source.path or urlparsed_source.netloc ) source_idx = (len(source_basename) + 1) * -1 basename_searches = [x for x in (file_name, source) if x] if basename_searches: log.debug( 'file.extract_hash: %s %s hash for file matching%s: %s', 'If no source_hash_name match found, will extract' if source_hash_name else 'Extracting', 'any supported' if not hash_type else hash_type, '' if len(basename_searches) == 1 else ' either of the following', ', '.join(basename_searches) ) partial = None found = {} with salt.utils.files.fopen(hash_fn, 'r') as fp_: for line in fp_: line = salt.utils.stringutils.to_unicode(line.strip()) hash_re = r'(?i)(?<![a-z0-9])([a-f0-9]{' + hash_len_expr + '})(?![a-z0-9])' hash_match = re.search(hash_re, line) matched = None if hash_match: matched_hsum = hash_match.group(1) if matched_hsum is not None: matched_type = HASHES_REVMAP.get(len(matched_hsum)) if matched_type is None: # There was a match, but it's not of the correct length # to match one of the supported hash types. matched = None else: matched = {'hsum': matched_hsum, 'hash_type': matched_type} if matched is None: log.debug( 'file.extract_hash: In line \'%s\', no %shash found', line, '' if not hash_type else hash_type + ' ' ) continue if partial is None: partial = matched def _add_to_matches(found, line, match_type, value, matched): log.debug( 'file.extract_hash: Line \'%s\' matches %s \'%s\'', line, match_type, value ) found.setdefault(match_type, []).append(matched) hash_matched = False if source_hash_name: if line.endswith(source_hash_name): # Checking the character before where the basename # should start for either whitespace or a path # separator. We can't just rsplit on spaces/whitespace, # because the filename may contain spaces. try: if line[source_hash_name_idx] in string.whitespace: _add_to_matches(found, line, 'source_hash_name', source_hash_name, matched) hash_matched = True except IndexError: pass elif re.match(re.escape(source_hash_name) + r'\s+', line): _add_to_matches(found, line, 'source_hash_name', source_hash_name, matched) hash_matched = True if file_name: if line.endswith(file_name_basename): # Checking the character before where the basename # should start for either whitespace or a path # separator. We can't just rsplit on spaces/whitespace, # because the filename may contain spaces. try: if line[file_name_idx] in filename_separators: _add_to_matches(found, line, 'file_name', file_name, matched) hash_matched = True except IndexError: pass elif re.match(re.escape(file_name) + r'\s+', line): _add_to_matches(found, line, 'file_name', file_name, matched) hash_matched = True if source: if line.endswith(source_basename): # Same as above, we can't just do an rsplit here. try: if line[source_idx] in filename_separators: _add_to_matches(found, line, 'source', source, matched) hash_matched = True except IndexError: pass elif re.match(re.escape(source) + r'\s+', line): _add_to_matches(found, line, 'source', source, matched) hash_matched = True if not hash_matched: log.debug( 'file.extract_hash: Line \'%s\' contains %s hash ' '\'%s\', but line did not meet the search criteria', line, matched['hash_type'], matched['hsum'] ) for found_type, found_str in (('source_hash_name', source_hash_name), ('file_name', file_name), ('source', source)): if found_type in found: if len(found[found_type]) > 1: log.debug( 'file.extract_hash: Multiple %s matches for %s: %s', found_type, found_str, ', '.join( ['{0} ({1})'.format(x['hsum'], x['hash_type']) for x in found[found_type]] ) ) ret = found[found_type][0] log.debug( 'file.extract_hash: Returning %s hash \'%s\' as a match of %s', ret['hash_type'], ret['hsum'], found_str ) return ret if partial: log.debug( 'file.extract_hash: Returning the partially identified %s hash ' '\'%s\'', partial['hash_type'], partial['hsum'] ) return partial log.debug('file.extract_hash: No matches, returning None') return None
java
public IntStream stream() { IntStream.Builder builder = IntStream.builder(); forEach(builder); return builder.build(); }
java
@Override public com.liferay.commerce.product.model.CPOption deleteCPOption( com.liferay.commerce.product.model.CPOption cpOption) throws com.liferay.portal.kernel.exception.PortalException { return _cpOptionLocalService.deleteCPOption(cpOption); }
java
private boolean isUniqueFileResolved(final Set<String> alreadyHandled, final String s) { return this.uniqueFiles && alreadyHandled.contains(s); }
python
def simxSetObjectPosition(clientID, objectHandle, relativeToObjectHandle, position, operationMode): ''' Please have a look at the function description/documentation in the V-REP user manual ''' c_position = (ct.c_float*3)(*position) return c_SetObjectPosition(clientID, objectHandle, relativeToObjectHandle, c_position, operationMode)
java
public Column set(Option option, boolean set) { if (set) { options.add(option); } else { options.remove(option); } return this; }
java
public static String getPropertyNameConvention(Object object, String suffix) { if (object != null) { Class<?> type = object.getClass(); if (type.isArray()) { return getPropertyName(type.getComponentType()) + suffix + "Array"; } if (object instanceof Collection) { Collection coll = (Collection) object; if (coll.isEmpty()) { return "emptyCollection"; } Object first = coll.iterator().next(); if (coll instanceof List) { return getPropertyName(first.getClass()) + suffix + "List"; } if (coll instanceof Set) { return getPropertyName(first.getClass()) + suffix + "Set"; } return getPropertyName(first.getClass()) + suffix + "Collection"; } if (object instanceof Map) { Map map = (Map) object; if (map.isEmpty()) { return "emptyMap"; } Object entry = map.values().iterator().next(); if (entry != null) { return getPropertyName(entry.getClass()) + suffix + "Map"; } } else { return getPropertyName(object.getClass()) + suffix; } } return null; }
java
public static boolean isMultiNamedOutput(JobConf conf, String namedOutput) { checkNamedOutput(conf, namedOutput, false); return conf.getBoolean(MO_PREFIX + namedOutput + MULTI, false); }
python
def assume(self, cond): """ Optimizer hint: assume *cond* is always true. """ fn = self.module.declare_intrinsic("llvm.assume") return self.call(fn, [cond])
java
public static String yyyy_MM_dd(String strDate) { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); ParsePosition pos = new ParsePosition(0); Date strtodate = formatter.parse(strDate, pos); SimpleDateFormat formatter2 = new SimpleDateFormat(dateFormat); return formatter2.format(strtodate); }
java
public ArrayList<InetAddress> getNaturalEndpoints(RingPosition searchPosition) { Token searchToken = searchPosition.getToken(); Token keyToken = TokenMetadata.firstToken(tokenMetadata.sortedTokens(), searchToken); ArrayList<InetAddress> endpoints = getCachedEndpoints(keyToken); if (endpoints == null) { TokenMetadata tm = tokenMetadata.cachedOnlyTokenMap(); // if our cache got invalidated, it's possible there is a new token to account for too keyToken = TokenMetadata.firstToken(tm.sortedTokens(), searchToken); endpoints = new ArrayList<InetAddress>(calculateNaturalEndpoints(searchToken, tm)); cachedEndpoints.put(keyToken, endpoints); } return new ArrayList<InetAddress>(endpoints); }
java
public static final <T> int compare(T o1, T o2, Comparator<T> comp) { if (comp != null) { return comp.compare(o1, o2); } else { return ((Comparable<T>)o1).compareTo(o2); } }
python
def _value_tomof( value, type, indent=0, maxline=MAX_MOF_LINE, line_pos=0, end_space=0, avoid_splits=False): # pylint: disable=redefined-builtin """ Return a MOF string representing a CIM-typed value (scalar or array). In case of an array, the array items are separated by comma, but the surrounding curly braces are not added. Parameters: value (CIM-typed value or list of CIM-typed values): The value. indent (:term:`integer`): Number of spaces to indent any new lines that are generated. maxline (:term:`integer`): Maximum line length for the generated MOF. line_pos (:term:`integer`): Length of content already on the current line. end_space (:term:`integer`): Length of space to be left free on the last line. avoid_splits (bool): Avoid splits at the price of starting a new line instead of using the current line. Returns: tuple of * :term:`unicode string`: MOF string. * new line_pos """ if isinstance(value, list): mof = [] for i, v in enumerate(value): if i > 0: # Assume we would add comma and space as separator line_pos += 2 val_str, line_pos = _scalar_value_tomof( v, type, indent, maxline, line_pos, end_space + 2, avoid_splits) if i > 0: # Add the actual separator mof.append(u',') if val_str[0] != '\n': mof.append(u' ') else: # Adjust by the space we did not need line_pos -= 1 mof.append(val_str) mof_str = u''.join(mof) else: mof_str, line_pos = _scalar_value_tomof( value, type, indent, maxline, line_pos, end_space, avoid_splits) return mof_str, line_pos
python
def url_for(self, attr=None, filter_value=None, service_type=None, endpoint_type="publicURL", service_name=None, volume_service_name=None): """Fetches the public URL from the given service for a particular endpoint attribute. If none given, returns the first. See tests for sample service catalog.""" matching_endpoints = [] # We don't always get a service catalog back ... if "serviceCatalog" not in self.catalog["access"]: return None # Full catalog ... catalog = self.catalog["access"]["serviceCatalog"] for service in catalog: if service.get("type") != service_type: continue endpoints = service["endpoints"] for endpoint in endpoints: if not filter_value or endpoint.get(attr) == filter_value: endpoint["serviceName"] = service.get("name") matching_endpoints.append(endpoint) if not matching_endpoints: raise exc.EndpointNotFound() elif len(matching_endpoints) > 1: raise exc.AmbiguousEndpoints(endpoints=matching_endpoints) else: return matching_endpoints[0][endpoint_type]
python
def save_config(self, cmd="save config", confirm=False, confirm_response=""): """Save Config""" return super(ExtremeErsSSH, self).save_config( cmd=cmd, confirm=confirm, confirm_response=confirm_response )
java
public static Chart getMSDLineWithPowerModelChart(Trajectory t, int lagMin, int lagMax, double timelag, double a, double D) { double[] xData = new double[lagMax - lagMin + 1]; double[] yData = new double[lagMax - lagMin + 1]; double[] modelData = new double[lagMax - lagMin + 1]; MeanSquaredDisplacmentFeature msdeval = new MeanSquaredDisplacmentFeature( t, lagMin); msdeval.setTrajectory(t); msdeval.setTimelag(lagMin); for (int i = lagMin; i < lagMax + 1; i++) { msdeval.setTimelag(i); double msdhelp = msdeval.evaluate()[0]; xData[i - lagMin] = i; yData[i - lagMin] = msdhelp; modelData[i - lagMin] = 4 * D * Math.pow(i * timelag, a); } // Create Chart Chart chart = QuickChart.getChart("MSD Line", "LAG", "MSD", "MSD", xData, yData); chart.addSeries("y=4*D*t^alpha", xData, modelData); // Show it //new SwingWrapper(chart).displayChart(); return chart; }
java
@Bean @ConditionalOnMissingBean AutoAssignChecker autoAssignChecker(final TargetFilterQueryManagement targetFilterQueryManagement, final TargetManagement targetManagement, final DeploymentManagement deploymentManagement, final PlatformTransactionManager transactionManager) { return new AutoAssignChecker(targetFilterQueryManagement, targetManagement, deploymentManagement, transactionManager); }
java
public static void accept(SARLQuickfixProvider provider, Issue issue, IssueResolutionAcceptor acceptor) { final CapacityReferenceRemoveModification modification = new CapacityReferenceRemoveModification(); modification.setIssue(issue); modification.setTools(provider); acceptor.accept(issue, Messages.SARLQuickfixProvider_0, Messages.SARLQuickfixProvider_5, JavaPluginImages.IMG_CORRECTION_REMOVE, modification, IProposalRelevance.REMOVE_ARGUMENTS); }
java
public void setAttribute(ObjectName name, String attrName, Object value) throws Exception { checkClientConnected(); Attribute attribute = new Attribute(attrName, value); mbeanConn.setAttribute(name, attribute); }
python
def berksfile(self): """Return this cookbook's Berksfile instance.""" self.berks_path = os.path.join(self.path, 'Berksfile') if not self._berksfile: if not os.path.isfile(self.berks_path): raise ValueError("No Berksfile found at %s" % self.berks_path) self._berksfile = Berksfile(open(self.berks_path, 'r+')) return self._berksfile
java
public static Locale localeForString(String localeString) { if (localeString == null || localeString.isEmpty()) { return null; } Locale locale; String[] localeParts = localeString.split("_"); switch (localeParts.length) { case 1: locale = new Locale(localeParts[0]); break; case 2: locale = new Locale(localeParts[0], localeParts[1]); break; default: // Use default for 3 and above, merge the parts back and put them all in the varient StringBuilder varient = new StringBuilder(localeParts[2]); for (int i = 3; i < localeParts.length; i++) { varient.append("_"); varient.append(localeParts[i]); } locale = new Locale(localeParts[0], localeParts[1], varient.toString()); break; } return locale; }
python
def list_devices(names=None, continue_from=None, **kwargs): """List devices in settings file and print versions""" if not names: names = [device for device, _type in settings.GOLDEN_DEVICES if _type == 'OpenThread'] if continue_from: continue_from = names.index(continue_from) else: continue_from = 0 for port in names[continue_from:]: try: with OpenThreadController(port) as otc: print('%s: %s' % (port, otc.version)) except: logger.exception('failed to get version of %s' % port)
python
def create_flavor(self, nova, name, ram, vcpus, disk, flavorid="auto", ephemeral=0, swap=0, rxtx_factor=1.0, is_public=True): """Create the specified flavor.""" try: nova.flavors.find(name=name) except (exceptions.NotFound, exceptions.NoUniqueMatch): self.log.debug('Creating flavor ({})'.format(name)) nova.flavors.create(name, ram, vcpus, disk, flavorid, ephemeral, swap, rxtx_factor, is_public)
python
def build_statusbar(self): """construct and return statusbar widget""" info = {} cb = self.current_buffer btype = None if cb is not None: info = cb.get_info() btype = cb.modename info['buffer_no'] = self.buffers.index(cb) info['buffer_type'] = btype info['total_messages'] = self.dbman.count_messages('*') info['pending_writes'] = len(self.dbman.writequeue) info['input_queue'] = ' '.join(self.input_queue) lefttxt = righttxt = u'' if cb is not None: lefttxt, righttxt = settings.get(btype + '_statusbar', (u'', u'')) lefttxt = string_decode(lefttxt, 'UTF-8') lefttxt = lefttxt.format(**info) righttxt = string_decode(righttxt, 'UTF-8') righttxt = righttxt.format(**info) footerleft = urwid.Text(lefttxt, align='left') pending_writes = len(self.dbman.writequeue) if pending_writes > 0: righttxt = ('|' * pending_writes) + ' ' + righttxt footerright = urwid.Text(righttxt, align='right') columns = urwid.Columns([ footerleft, ('pack', footerright)]) footer_att = settings.get_theming_attribute('global', 'footer') return urwid.AttrMap(columns, footer_att)
java
@Override public RebuildWorkspacesResult rebuildWorkspaces(RebuildWorkspacesRequest request) { request = beforeClientExecution(request); return executeRebuildWorkspaces(request); }
python
def _fake_openassociatorinstances(self, namespace, **params): """ Implements WBEM server responder for WBEMConnection.OpenAssociatorInstances with data from the instance repository. """ self._validate_namespace(namespace) self._validate_open_params(**params) params['ObjectName'] = params['InstanceName'] del params['InstanceName'] result = self._fake_associators(namespace, **params) objects = [] if result is None else [x[2] for x in result[0][2]] return self._open_response(objects, namespace, 'PullInstancesWithPath', **params)
python
def _update_with_like_args(ctx, _, value): """Update arguments with options taken from a currently running VS.""" if value is None: return env = ctx.ensure_object(environment.Environment) vsi = SoftLayer.VSManager(env.client) vs_id = helpers.resolve_id(vsi.resolve_ids, value, 'VS') like_details = vsi.get_instance(vs_id) like_args = { 'hostname': like_details['hostname'], 'domain': like_details['domain'], 'hourly': like_details['hourlyBillingFlag'], 'datacenter': like_details['datacenter']['name'], 'network': like_details['networkComponents'][0]['maxSpeed'], 'userdata': like_details['userData'] or None, 'postinstall': like_details.get('postInstallScriptUri'), 'dedicated': like_details['dedicatedAccountHostOnlyFlag'], 'private': like_details['privateNetworkOnlyFlag'], 'placement_id': like_details.get('placementGroupId', None), } like_args['flavor'] = utils.lookup(like_details, 'billingItem', 'orderItem', 'preset', 'keyName') if not like_args['flavor']: like_args['cpu'] = like_details['maxCpu'] like_args['memory'] = '%smb' % like_details['maxMemory'] tag_refs = like_details.get('tagReferences', None) if tag_refs is not None and len(tag_refs) > 0: like_args['tag'] = [t['tag']['name'] for t in tag_refs] # Handle mutually exclusive options like_image = utils.lookup(like_details, 'blockDeviceTemplateGroup', 'globalIdentifier') like_os = utils.lookup(like_details, 'operatingSystem', 'softwareLicense', 'softwareDescription', 'referenceCode') if like_image: like_args['image'] = like_image elif like_os: like_args['os'] = like_os if ctx.default_map is None: ctx.default_map = {} ctx.default_map.update(like_args)
java
private void flushHotCacheSlot(SlotReference slot) { // Iterate over a copy to avoid concurrent modification issues for (Map.Entry<DeckReference, TrackMetadata> entry : new HashMap<DeckReference,TrackMetadata>(hotCache).entrySet()) { if (slot == SlotReference.getSlotReference(entry.getValue().trackReference)) { logger.debug("Evicting cached metadata in response to unmount report {}", entry.getValue()); hotCache.remove(entry.getKey()); } } }
python
def create_widget(self): """ Create the underlying widget. """ d = self.declaration self.widget = TimePicker(self.get_context(), None, d.style or '@attr/timePickerStyle')
python
def getstats(self, names, default=numpy.nan): """Get the requested stats as a tuple. If a requested stat is not an attribute (implying it hasn't been stored), then the default value is returned for that stat. Parameters ---------- names : list of str The names of the stats to get. default : float, optional What to return if a requested stat is not an attribute of self. Default is ``numpy.nan``. Returns ------- tuple A tuple of the requested stats. """ return tuple(getattr(self, n, default) for n in names)
java
public ExpectedCondition<?> clickAndExpectOneOf(final List<ExpectedCondition<?>> conditions) { dispatcher.beforeClick(this, conditions); getElement().click(); if (Boolean.parseBoolean(Config.getConfigProperty(ConfigProperty.ENABLE_GUI_LOGGING))) { logUIAction(UIActions.CLICKED); } // If there are no expected objects, then it means user wants this method // to behave as a clickonly. So lets skip processing of alerts and leave // that to the user. if (conditions == null || conditions.size() <= 0) { return null; } if (parent != null) { WebDriverWaitUtils.waitUntilPageIsLoaded(parent.getCurrentPage()); } validatePresenceOfAlert(); long timeout = Grid.getExecutionTimeoutValue() / 1000; try { WebDriverWait wait = new WebDriverWait(Grid.driver(), timeout); wait.ignoring(NoSuchElementException.class); wait.ignoring(ExpectOneOfException.class); ExpectedCondition<?> matchedCondition = wait.until(new Function<WebDriver, ExpectedCondition<?>>() { // find the first condition that matches and return it @Override public ExpectedCondition<?> apply(WebDriver webDriver) { StringBuilder sb = new StringBuilder(); int i = 1; for (final ExpectedCondition<?> condition : conditions) { try { Object value = condition.apply(webDriver); if (value instanceof Boolean) { if (Boolean.TRUE.equals(value)) { return condition; } } else if (value != null) { return condition; } } catch (WebDriverException e) { sb.append("\n\tObject " + i + ":\n"); sb.append("\t" + ExceptionUtils.getRootCauseMessage(e).split("\n")[0] + "\n"); sb.append("\t\t" + StringUtils.substringBetween(ExceptionUtils.getStackTrace(e), "\n")); } i++; } throw new ExpectOneOfException(sb.toString()); } }); return matchedCondition; } finally { // Attempt at taking screenshots even when there are time-outs triggered from the wait* methods. processScreenShot(); dispatcher.afterClick(this, conditions); } }
java
public static Event constructEvent(String payload, String sigHeader, String secret, long tolerance) throws SignatureVerificationException { Event event = ApiResource.GSON.fromJson(payload, Event.class); Signature.verifyHeader(payload, sigHeader, secret, tolerance); return event; }
java
private static int parseLimits(int i, int end, char[] data, int[] limits) throws PatternSyntaxException { if (limits.length != LIMITS_LENGTH) throw new IllegalArgumentException("limits.length=" + limits.length + ", should be " + LIMITS_LENGTH); limits[LIMITS_PARSE_RESULT_INDEX] = LIMITS_OK; int ind = 0; int v = 0; char c; while (i < end) { c = data[i++]; switch (c) { case ' ': continue; case ',': if (ind > 0) throw new PatternSyntaxException("illegal construction: {.. , , ..}"); limits[ind++] = v; v = -1; continue; case '}': limits[ind] = v; if (ind == 0) limits[1] = v; return i; default: if (c > '9' || c < '0') { //throw new PatternSyntaxException("illegal symbol in iterator: '{"+c+"}'"); limits[LIMITS_PARSE_RESULT_INDEX] = LIMITS_FAILURE; return i; } if (v < 0) v = 0; v = v * 10 + (c - '0'); } } throw new PatternSyntaxException("malformed quantifier"); }
java
@Override public FastJsonWriter value( String value ) { if (value == null) { return nullValue(); } writeDeferredName(); beforeValue(false); string(value); return this; }
python
def help_center_user_segment_topics(self, user_segment_id, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/user_segments#list-topics-using-a-user-segment" api_path = "/api/v2/help_center/user_segments/{user_segment_id}/topics.json" api_path = api_path.format(user_segment_id=user_segment_id) return self.call(api_path, **kwargs)
python
def delete_node(self, loadbalancer, node): """Removes the node from its load balancer.""" lb = node.parent if not lb: raise exc.UnattachedNode("No parent Load Balancer for this node " "could be determined.") resp, body = self.api.method_delete("/loadbalancers/%s/nodes/%s" % (lb.id, node.id)) return resp, body
java
private static void heapify(int[] arr) { int n = arr.length; for (int i = n / 2 - 1; i >= 0; i--) SortUtils.siftDown(arr, i, n - 1); }
python
def alterar(self, id_user_group, name, read, write, edit, remove): """Edit user group data from its identifier. :param id_user_group: User group id. :param name: User group name. :param read: If user group has read permission ('S' ou 'N'). :param write: If user group has write permission ('S' ou 'N'). :param edit: If user group has edit permission ('S' ou 'N'). :param remove: If user group has remove permission ('S' ou 'N'). :return: None :raise NomeGrupoUsuarioDuplicadoError: User group name already exists. :raise ValorIndicacaoPermissaoInvalidoError: Read, write, edit or remove value is invalid. :raise GrupoUsuarioNaoExisteError: User Group not found. :raise InvalidParameterError: At least one of the parameters is invalid or none. :raise DataBaseError: Networkapi failed to access database. :raise XMLError: Networkapi fails generating response XML. """ if not is_valid_int_param(id_user_group): raise InvalidParameterError( u'Invalid or inexistent user group id.') url = 'ugroup/' + str(id_user_group) + '/' ugroup_map = dict() ugroup_map['nome'] = name ugroup_map['leitura'] = read ugroup_map['escrita'] = write ugroup_map['edicao'] = edit ugroup_map['exclusao'] = remove code, xml = self.submit({'user_group': ugroup_map}, 'PUT', url) return self.response(code, xml)
java
@Override public Request<CreateFpgaImageRequest> getDryRunRequest() { Request<CreateFpgaImageRequest> request = new CreateFpgaImageRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; }
java
public void fill_1(byte[] v, int byteoff, int nbits, int bitoff) { if (nbits < 0) throw new NegativeArraySizeException("nbits < 0: " + nbits); if (byteoff < 0) throw new IndexOutOfBoundsException("byteoff < 0: "+ byteoff); if (bitoff < 0) throw new IndexOutOfBoundsException("bitoff < 0: " + bitoff); assert v == null || byteoff + ((nbits-1) >> 3) + 1 <= v.length; _val = v; _nbits = nbits; _bitoff = bitoff; _byteoff = byteoff; }
python
def get_reviews(self, publisher_name, extension_name, count=None, filter_options=None, before_date=None, after_date=None): """GetReviews. [Preview API] Returns a list of reviews associated with an extension :param str publisher_name: Name of the publisher who published the extension :param str extension_name: Name of the extension :param int count: Number of reviews to retrieve (defaults to 5) :param str filter_options: FilterOptions to filter out empty reviews etcetera, defaults to none :param datetime before_date: Use if you want to fetch reviews older than the specified date, defaults to null :param datetime after_date: Use if you want to fetch reviews newer than the specified date, defaults to null :rtype: :class:`<ReviewsResult> <azure.devops.v5_1.gallery.models.ReviewsResult>` """ route_values = {} if publisher_name is not None: route_values['publisherName'] = self._serialize.url('publisher_name', publisher_name, 'str') if extension_name is not None: route_values['extensionName'] = self._serialize.url('extension_name', extension_name, 'str') query_parameters = {} if count is not None: query_parameters['count'] = self._serialize.query('count', count, 'int') if filter_options is not None: query_parameters['filterOptions'] = self._serialize.query('filter_options', filter_options, 'str') if before_date is not None: query_parameters['beforeDate'] = self._serialize.query('before_date', before_date, 'iso-8601') if after_date is not None: query_parameters['afterDate'] = self._serialize.query('after_date', after_date, 'iso-8601') response = self._send(http_method='GET', location_id='5b3f819f-f247-42ad-8c00-dd9ab9ab246d', version='5.1-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('ReviewsResult', response)
java
public static <P extends Enum<P>> Point optPoint(final JSONObject json, P e, boolean emptyForNull) { Point p = optPoint(json, e); if (p == null && emptyForNull) { p = new Point(); } return p; }
python
def RunShellWithReturnCode(command, print_output=False, universal_newlines=True, env=os.environ): """Executes a command and returns the output from stdout and the return code. Args: command: Command to execute. print_output: If True, the output is printed to stdout. If False, both stdout and stderr are ignored. universal_newlines: Use universal_newlines flag (default: True). Returns: Tuple (output, return code) """ logging.info("Running %s", command) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=use_shell, universal_newlines=universal_newlines, env=env) if print_output: output_array = [] while True: line = p.stdout.readline() if not line: break print line.strip("\n") output_array.append(line) output = "".join(output_array) else: output = p.stdout.read() p.wait() errout = p.stderr.read() if print_output and errout: print >>sys.stderr, errout p.stdout.close() p.stderr.close() return output, p.returncode
java
public static String createTimestampedFilename(String filenamePrefix, Date date) { SimpleDateFormat formatter = new SimpleDateFormat(FORMAT_JOURNAL_FILENAME_TIMESTAMP); formatter.setTimeZone(TimeZone.getTimeZone("GMT")); return filenamePrefix + formatter.format(date) + "Z"; }
python
def mount_point(cls, file_path): """ Return mount point that, where the given path is reside on :param file_path: target path to search :return: WMountPoint or None (if file path is outside current mount points) """ mount = None for mp in cls.mounts(): mp_path = mp.path() if file_path.startswith(mp_path) is True: if mount is None or len(mount.path()) <= len(mp_path): mount = mp return mount
java
@Override public SQLPPMapping createSQLPreProcessedMapping(ImmutableList<SQLPPTriplesMap> ppMappingAxioms, MappingMetadata mappingMetadata) throws DuplicateMappingException { try { /** * Instantiation */ Constructor constructor = findFirstConstructor(SQLPPMapping.class); return (SQLPPMapping) constructor.newInstance(ppMappingAxioms, mappingMetadata); /** * Exception management */ } catch (InvocationTargetException e) { Throwable targetException = e.getTargetException(); /** * Expected exception: rethrown */ if (targetException instanceof DuplicateMappingException) { throw (DuplicateMappingException) targetException; } /** * Unexcepted: throw a unexpected RuntimeException. */ throw new RuntimeException(targetException.getMessage()); } catch (Exception e) { throw new RuntimeException(e); } }
python
def read_config(config_fname=None): """Parse input configuration file and return a config dict.""" if not config_fname: config_fname = DEFAULT_CONFIG_FNAME try: with open(config_fname, 'r') as config_file: config = yaml.load(config_file) except IOError as exc: if exc.errno == errno.ENOENT: print('payu: warning: Configuration file {0} not found!' .format(config_fname)) config = {} else: raise collate_config = config.pop('collate', {}) # Transform legacy collate config options if type(collate_config) is bool: collate_config = {'enable': collate_config} collatestr = 'collate_' foundkeys = [] # Cycle through old collate config and convert to newer dict format for key in list(config.keys()): if key.startswith(collatestr): foundkeys.append(key) collate_config[key[len(collatestr):]] = config.pop(key) if foundkeys: print("Use of these keys is deprecated: {}.".format( ", ".join(foundkeys))) print("Instead use collate dictionary and subkey " "without 'collate_' prefix") config['collate'] = collate_config return config
python
async def connect(self, *args, **kwargs): """Connect to a juju model. This supports two calling conventions: The model and (optionally) authentication information can be taken from the data files created by the Juju CLI. This convention will be used if a ``model_name`` is specified, or if the ``endpoint`` and ``uuid`` are not. Otherwise, all of the ``endpoint``, ``uuid``, and authentication information (``username`` and ``password``, or ``bakery_client`` and/or ``macaroons``) are required. If a single positional argument is given, it will be assumed to be the ``model_name``. Otherwise, the first positional argument, if any, must be the ``endpoint``. Available parameters are: :param model_name: Format [controller:][user/]model :param str endpoint: The hostname:port of the controller to connect to. :param str uuid: The model UUID to connect to. :param str username: The username for controller-local users (or None to use macaroon-based login.) :param str password: The password for controller-local users. :param str cacert: The CA certificate of the controller (PEM formatted). :param httpbakery.Client bakery_client: The macaroon bakery client to to use when performing macaroon-based login. Macaroon tokens acquired when logging will be saved to bakery_client.cookies. If this is None, a default bakery_client will be used. :param list macaroons: List of macaroons to load into the ``bakery_client``. :param asyncio.BaseEventLoop loop: The event loop to use for async operations. :param int max_frame_size: The maximum websocket frame size to allow. """ await self.disconnect() if 'endpoint' not in kwargs and len(args) < 2: if args and 'model_name' in kwargs: raise TypeError('connect() got multiple values for model_name') elif args: model_name = args[0] else: model_name = kwargs.pop('model_name', None) await self._connector.connect_model(model_name, **kwargs) else: if 'model_name' in kwargs: raise TypeError('connect() got values for both ' 'model_name and endpoint') if args and 'endpoint' in kwargs: raise TypeError('connect() got multiple values for endpoint') if len(args) < 2 and 'uuid' not in kwargs: raise TypeError('connect() missing value for uuid') has_userpass = (len(args) >= 4 or {'username', 'password'}.issubset(kwargs)) has_macaroons = (len(args) >= 6 or not {'bakery_client', 'macaroons'}.isdisjoint(kwargs)) if not (has_userpass or has_macaroons): raise TypeError('connect() missing auth params') arg_names = [ 'endpoint', 'uuid', 'username', 'password', 'cacert', 'bakery_client', 'macaroons', 'loop', 'max_frame_size', ] for i, arg in enumerate(args): kwargs[arg_names[i]] = arg if not {'endpoint', 'uuid'}.issubset(kwargs): raise ValueError('endpoint and uuid are required ' 'if model_name not given') if not ({'username', 'password'}.issubset(kwargs) or {'bakery_client', 'macaroons'}.intersection(kwargs)): raise ValueError('Authentication parameters are required ' 'if model_name not given') await self._connector.connect(**kwargs) await self._after_connect()
python
def unescape_sql(inp): """ :param inp: an input string to be unescaped :return: return the unescaped version of the string. """ if inp.startswith('"') and inp.endswith('"'): inp = inp[1:-1] return inp.replace('""','"').replace('\\\\','\\')
python
def cmd(send, msg, args): """Compiles stuff. Syntax: {command} <code> """ if args['type'] == 'privmsg': send('GCC is a group exercise!') return if 'include' in msg: send("We're not a terribly inclusive community around here.") return if 'import' in msg: send("I'll have you know that standards compliance is important.") return tmpfile = tempfile.NamedTemporaryFile() for line in msg.splitlines(): line = line + '\n' tmpfile.write(line.encode()) tmpfile.flush() process = subprocess.run(['gcc', '-o', '/dev/null', '-xc', tmpfile.name], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=5, universal_newlines=True) tmpfile.close() # Take the last 3 lines to prevent Excess Flood on long error messages output = process.stdout.splitlines()[:3] for line in output: send(line, target=args['nick']) if process.returncode == 0: send(gen_slogan("gcc victory")) else: send(gen_slogan("gcc failed"))
python
def setup(app) -> Dict[str, Any]: """ Sets up Sphinx extension. """ app.setup_extension("sphinx.ext.graphviz") app.add_node( inheritance_diagram, html=(html_visit_inheritance_diagram, None), latex=(latex_visit_inheritance_diagram, None), man=(skip, None), texinfo=(skip, None), text=(skip, None), ) app.add_directive("inheritance-diagram", InheritanceDiagram) return { "version": uqbar.__version__, "parallel_read_safe": True, "parallel_write_safe": True, }
python
def fit(self,y_true_cal=None, y_prob_cal=None): """ If calibration, then train the calibration of probabilities Parameters ---------- y_true_cal : array-like of shape = [n_samples], optional default = None True class to be used for calibrating the probabilities y_prob_cal : array-like of shape = [n_samples, 2], optional default = None Predicted probabilities to be used for calibrating the probabilities Returns ------- self : object Returns self. """ if self.calibration: self.cal = ROCConvexHull() self.cal.fit(y_true_cal, y_prob_cal[:, 1])
python
def energy(self): """Calculate state's energy.""" arr = self.src.copy() arr = self.apply_color(arr, self.state) scores = [histogram_distance(self.ref[i], arr[i]) for i in range(3)] # Important: scale by 100 for readability return sum(scores) * 100
python
def get_items(self, project=None, scope_path=None, recursion_level=None, include_links=None, version_descriptor=None): """GetItems. Get a list of Tfvc items :param str project: Project ID or project name :param str scope_path: Version control path of a folder to return multiple items. :param str recursion_level: None (just the item), or OneLevel (contents of a folder). :param bool include_links: True to include links. :param :class:`<TfvcVersionDescriptor> <azure.devops.v5_0.tfvc.models.TfvcVersionDescriptor>` version_descriptor: :rtype: [TfvcItem] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') query_parameters = {} if scope_path is not None: query_parameters['scopePath'] = self._serialize.query('scope_path', scope_path, 'str') if recursion_level is not None: query_parameters['recursionLevel'] = self._serialize.query('recursion_level', recursion_level, 'str') if include_links is not None: query_parameters['includeLinks'] = self._serialize.query('include_links', include_links, 'bool') if version_descriptor is not None: if version_descriptor.version_option is not None: query_parameters['versionDescriptor.versionOption'] = version_descriptor.version_option if version_descriptor.version_type is not None: query_parameters['versionDescriptor.versionType'] = version_descriptor.version_type if version_descriptor.version is not None: query_parameters['versionDescriptor.version'] = version_descriptor.version response = self._send(http_method='GET', location_id='ba9fc436-9a38-4578-89d6-e4f3241f5040', version='5.0', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[TfvcItem]', self._unwrap_collection(response))
python
def folderitems(self): """TODO: Refactor to non-classic mode """ items = super(ARTemplateAnalysesView, self).folderitems() self.categories.sort() return items
python
def logs(awsclient, function_name, start_dt, end_dt=None, tail=False): """Send a ping request to a lambda function. :param awsclient: :param function_name: :param start_dt: :param end_dt: :param tail: :return: """ log.debug('Getting cloudwatch logs for: %s', function_name) log_group_name = '/aws/lambda/%s' % function_name current_date = None start_ts = datetime_to_timestamp(start_dt) if end_dt: end_ts = datetime_to_timestamp(end_dt) else: end_ts = None # tail mode # we assume that logs can arrive late but not out of order # so we hold the timestamp of the last logentry and start the next iteration # from there while True: logentries = filter_log_events(awsclient, log_group_name, start_ts=start_ts, end_ts=end_ts) if logentries: for e in logentries: actual_date, actual_time = decode_format_timestamp(e['timestamp']) if current_date != actual_date: # print the date only when it changed current_date = actual_date log.info(current_date) log.info('%s %s' % (actual_time, e['message'].strip())) if tail: if logentries: start_ts = logentries[-1]['timestamp'] + 1 time.sleep(2) continue break
python
def servicegroup_server_enable(sg_name, s_name, s_port, **connection_args): ''' Enable a server:port member of a servicegroup CLI Example: .. code-block:: bash salt '*' netscaler.servicegroup_server_enable 'serviceGroupName' 'serverName' 'serverPort' ''' ret = True server = _servicegroup_get_server(sg_name, s_name, s_port, **connection_args) if server is None: return False nitro = _connect(**connection_args) if nitro is None: return False try: NSServiceGroup.enable_server(nitro, server) except NSNitroError as error: log.debug('netscaler module error - NSServiceGroup.enable_server() failed: %s', error) ret = False _disconnect(nitro) return ret
python
def tokenize(qp_pair, tokenizer=None, is_training=False): ''' tokenize function. ''' question_tokens = tokenizer.tokenize(qp_pair['question']) passage_tokens = tokenizer.tokenize(qp_pair['passage']) if is_training: question_tokens = question_tokens[:300] passage_tokens = passage_tokens[:300] passage_tokens.insert( 0, {'word': '<BOS>', 'original_text': '<BOS>', 'char_begin': 0, 'char_end': 0}) passage_tokens.append( {'word': '<EOS>', 'original_text': '<EOS>', 'char_begin': 0, 'char_end': 0}) qp_pair['question_tokens'] = question_tokens qp_pair['passage_tokens'] = passage_tokens
python
def addSiInfo(self, msrunContainer, specfiles=None, attributes=['obsMz', 'rt', 'charge']): """Transfer attributes to :class:`Sii` elements from the corresponding :class`Si` in :class:`MsrunContainer.sic <MsrunContainer>`. If an attribute is not present in the ``Si`` the attribute value in the ``Sii``is set to ``None``. Attribute examples: 'obsMz', 'rt', 'charge', 'tic', 'iit', 'ms1Id' :param msrunContainer: an instance of :class:`MsrunContainer` which has imported the corresponding specfiles :param specfiles: the name of an ms-run file or a list of names. If None all specfiles are selected. :param attributes: a list of ``Si`` attributes that should be transfered. """ if specfiles is None: specfiles = [_ for _ in viewkeys(self.info)] else: specfiles = aux.toList(specfiles) for specfile in specfiles: if specfile not in self.info: warntext = 'Error while calling "SiiContainer.addSiInfo()": '\ '"%s" is not present in "SiiContainer.info"!'\ % (specfile, ) warnings.warn(warntext) elif specfile not in msrunContainer.info: warntext = 'Error while calling "SiiContainer.addSiInfo()": '\ '"%s" is not present in "MsrunContainer.info"'\ % (specfile, ) warnings.warn(warntext) else: for identifier in self.container[specfile]: si = msrunContainer.sic[specfile][identifier] for sii in self.container[specfile][identifier]: for attribute in attributes: setattr(sii, attribute, getattr(si, attribute, None) )
python
def cleanup(self): """Re-enable paging globally.""" if self.allow_disable_global: # Return paging state output_mode_cmd = "set output {}".format(self._output_mode) enable_paging_commands = ["config system console", output_mode_cmd, "end"] if self.vdoms: enable_paging_commands.insert(0, "config global") # Should test output is valid for command in enable_paging_commands: self.send_command_timing(command)
python
def fill_all_headers(self, req): """Set content-type, content-md5, date to the request.""" url = urlsplit(req.url) content_type, __ = mimetypes.guess_type(url.path) if content_type is None: content_type = self.DEFAULT_TYPE logger.warn("can not determine mime-type for {0}".format(url.path)) if self._expires is None: # sign with url, no content-type for url req.headers.setdefault("content-type", content_type) if ( req.body is not None and req.headers.get("content-md5") is None and self._allow_empty_md5 is False ): logger.debug("calculating content-md5") content, content_md5 = utils.cal_content_md5(req.body) req.body = content req.headers["content-md5"] = content_md5 logger.debug("new content-md5 is: {0}".format(content_md5)) else: logger.debug("skip content-md5 calculation") if self._expires is None: req.headers.setdefault( "date", time.strftime(self.DATE_FMT, time.gmtime()) ) return req
java
@Override protected String doBase(String name, Object value) { if (value instanceof Boolean) { return (Boolean) value ? TRUE : FALSE; } else if (value instanceof String) { return base(name, (String) value); } throw new IndexException("Field '{}' requires a boolean, but found '{}'", name, value); }
java
public void removeCollaborator(String appName, String collaborator) { connection.execute(new SharingRemove(appName, collaborator), apiKey); }
java
protected void writeLink(Header header) throws IOException { addTabs(2); writeStart(HtmlTags.LINK); write(HtmlTags.REL, header.getName()); write(HtmlTags.TYPE, HtmlTags.TEXT_CSS); write(HtmlTags.REFERENCE, header.getContent()); writeEnd(); }
python
def _get_hash_object(hash_algo_name): """Create a hash object based on given algorithm. :param hash_algo_name: name of the hashing algorithm. :raises: InvalidInputError, on unsupported or invalid input. :returns: a hash object based on the given named algorithm. """ algorithms = (hashlib.algorithms_guaranteed if six.PY3 else hashlib.algorithms) if hash_algo_name not in algorithms: msg = ("Unsupported/Invalid hash name '%s' provided." % hash_algo_name) raise exception.InvalidInputError(msg) return getattr(hashlib, hash_algo_name)()
java
@Override public void deliver(WriteStream os, OutHttp2 outHttp) throws IOException { int length = _length; int flags; switch (_flags) { case CONT_STREAM: flags = 0; break; case END_STREAM: flags = Http2Constants.END_STREAM; break; default: flags = 0; break; } outHttp.writeData(getStreamId(), _tBuf.buffer(), _offset, length, flags); /* System.out.println(" WDS: " + length); os.write((byte) (length >> 8)); os.write((byte) (length)); os.write(HttpConstants.FRAME_DATA); switch (_flags) { case CONT_STREAM: os.write(0); break; case END_STREAM: os.write(HttpConstants.END_STREAM); break; default: os.write(0); break; } BitsUtil.writeInt(os, getStreamId()); os.write(_tBuf.getBuffer(), _offset, _length); */ _tBuf.free(); }
python
def start(): """ | Start all the registered entrypoints | that have been added to `ENTRYPOINTS`. :rtype: None """ pool = gevent.threadpool.ThreadPool(len(ENTRYPOINTS)) for entrypoint, callback, args, kwargs in ENTRYPOINTS: cname = callback.__name__ #1. Retrieve the class which owns the callback for name, klass in inspect.getmembers(sys.modules[callback.__module__], inspect.isclass): if hasattr(klass, cname): service_name = name.lower() break #2.Start the entrypoint callback = getattr(klass(), cname) kwargs.update({'service':service_name, 'callback':callback, 'callback_name': cname}) LOGGER.info('Start service %s[%s].', service_name.capitalize(), cname) obj = entrypoint(*args, **kwargs) pool.spawn(obj.start, *args, **kwargs) pool.join() return True
python
def parse(readDataInstance): """ Returns a new L{ImageBaseRelocationEntry} object. @type readDataInstance: L{ReadData} @param readDataInstance: A L{ReadData} object with data to parse as a L{ImageBaseRelocationEntry} object. @rtype: L{ImageBaseRelocationEntry} @return: A new L{ImageBaseRelocationEntry} object. """ reloc = ImageBaseRelocationEntry() reloc.virtualAddress.value = readDataInstance.readDword() reloc.sizeOfBlock.value = readDataInstance.readDword() toRead = (reloc.sizeOfBlock.value - 8) / len(datatypes.WORD(0)) reloc.items = datatypes.Array.parse(readDataInstance, datatypes.TYPE_WORD, toRead) return reloc
python
def do_get(self, line): """get <peer> eg. get sw1 """ def f(p, args): print(p.get()) self._request(line, f)
java
private static Matrix readSVDLIBCsingularVector(File sigmaMatrixFile) throws IOException { BufferedReader br = new BufferedReader(new FileReader(sigmaMatrixFile)); int dimension = -1; int valsSeen = 0; Matrix m = null; for (String line = null; (line = br.readLine()) != null; ) { String[] vals = line.split("\\s+"); for (int i = 0; i < vals.length; ++i) { // the first value seen should be the number of singular values if (dimension == -1) { dimension = Integer.parseInt(vals[i]); m = new DiagonalMatrix(dimension); } else { m.set(valsSeen, valsSeen, Double.parseDouble(vals[i])); ++valsSeen; } } } return m; }
python
def BSearch(a, x, lo=0, hi=None): """Returns index of x in a, or -1 if x not in a. Arguments: a -- ordered numeric sequence x -- element to search within a lo -- lowest index to consider in search* hi -- highest index to consider in search* *bisect.bisect_left capability that we don't need to loose.""" if len(a) == 0: return -1 hi = hi if hi is not None else len(a) pos = bisect_left(a, x, lo, hi) return pos if pos != hi and a[pos] == x else -1
python
def make_grid_transformed(numPix, Mpix2Angle): """ returns grid with linear transformation (deltaPix and rotation) :param numPix: number of Pixels :param Mpix2Angle: 2-by-2 matrix to mat a pixel to a coordinate :return: coordinate grid """ x_grid, y_grid = make_grid(numPix, deltapix=1) ra_grid, dec_grid = map_coord2pix(x_grid, y_grid, 0, 0, Mpix2Angle) return ra_grid, dec_grid
python
def ssh(container, cmd='', user='root', password='root'): ''' SSH into a running container, using the host as a jump host. This requires the container to have a running sshd process. Args: * container: Container name or ID * cmd='': Command to run in the container * user='root': SSH username * password='root': SSH password ''' ip = get_ip(container) ssh_cmd = 'sshpass -p \'%s\' ssh -A -t -o StrictHostKeyChecking=no \'%s\'@%s' % (password, user, ip) local('ssh -A -t -o StrictHostKeyChecking=no -i "%s" %s@%s %s %s' % ( env.key_filename, env.user, env.host, ssh_cmd, cmd))
python
def is_reserved(self): """Test if the address is otherwise IETF reserved. Returns: A boolean, True if the address is within one of the reserved IPv6 Network ranges. """ reserved_nets = [IPv6Network(u'::/8'), IPv6Network(u'100::/8'), IPv6Network(u'200::/7'), IPv6Network(u'400::/6'), IPv6Network(u'800::/5'), IPv6Network(u'1000::/4'), IPv6Network(u'4000::/3'), IPv6Network(u'6000::/3'), IPv6Network(u'8000::/3'), IPv6Network(u'A000::/3'), IPv6Network(u'C000::/3'), IPv6Network(u'E000::/4'), IPv6Network(u'F000::/5'), IPv6Network(u'F800::/6'), IPv6Network(u'FE00::/9')] return any(self in x for x in reserved_nets)
java
public static boolean JVMSupportsLargeMappedFiles() { String arch = System.getProperty("os.arch"); if(arch==null || !arch.contains("64")) { return false; } if(isWindows()) return false; //TODO better check for 32bit JVM return true; }
java
public void setEventCreatedOn(String eventCreatedOn) { Long milliseconds = TimeUnit.SECONDS.toMillis(Long.parseLong(eventCreatedOn)); this.eventCreatedOn = new Date(milliseconds); }
java
public final void sortCatalogs(final List<TradingCatalog> pCurrentList) { Collections.sort(pCurrentList, this.cmprCatalogs); for (TradingCatalog tc : pCurrentList) { if (tc.getSubcatalogs().size() > 0) { sortCatalogs(tc.getSubcatalogs()); } } }
java
public static <T> T[] append(final T[] array, final T element) { Class<?> compType = getClassType(array, element); if (compType == null) { return null; } int newArrLength = array != null ? array.length + 1 : 1; @SuppressWarnings("unchecked") T[] newArray = (T[]) Array.newInstance(compType, newArrLength); if (array != null) { System.arraycopy(array, 0, newArray, 0, array.length); } newArray[newArray.length - 1] = element; return newArray; }
python
def main(): """ Create all the argparse-rs and invokes the function from set_defaults(). :return: The result of the function from set_defaults(). """ parser = argparse.ArgumentParser() parser.add_argument("--log-level", default="INFO", choices=logging._nameToLevel, help="Logging verbosity.") subparsers = parser.add_subparsers(help="Commands", dest="command") def add_backend_args(p): p.add_argument("--backend", default=None, help="Backend to use.") p.add_argument("--args", default=None, help="Backend's arguments.") def add_index_args(p): p.add_argument("--username", default="", help="Username for the Git repository with the index.") p.add_argument("--password", default="", help="Password for the Git repository with the index.") p.add_argument("--index-repo", default=None, help="Url of the remote Git repository.") p.add_argument("--cache", default=None, help="Path to the folder where the Git repository will be cached.") p.add_argument("-s", "--signoff", action="store_true", help="Add Signed-off-by line by the committer at the end of the commit log " "message. The meaning of a signoff depends on the project, but it " "typically certifies that committer has the rights to submit this work" " under the same license and agrees to a Developer Certificate of " "Origin (see http://developercertificate.org/ for more information).") def add_templates_args(p): p.add_argument( "--template-model", default=os.path.join(os.path.dirname(__file__), "templates/template_model.md.jinja2"), help="Path to the jinja2 template used in the index for the model.") p.add_argument( "--template-readme", default=os.path.join(os.path.dirname(__file__), "templates/template_readme.md.jinja2"), help="Path to the jinja2 template used in the index for the readme.") # ------------------------------------------------------------------------ init_parser = subparsers.add_parser("init", help="Initialize the registry.") init_parser.set_defaults(handler=initialize_registry) init_parser.add_argument("-f", "--force", action="store_true", help="Destructively initialize the registry.") add_index_args(init_parser) add_backend_args(init_parser) # ------------------------------------------------------------------------ dump_parser = subparsers.add_parser( "dump", help="Print a brief information about the model to stdout.") dump_parser.set_defaults(handler=dump_model) dump_parser.add_argument( "input", help="Path to the model file, URL or UUID.") add_index_args(dump_parser) add_backend_args(dump_parser) # ------------------------------------------------------------------------ install_parser = subparsers.add_parser( "install", help="Install the environment to run the model.") install_parser.set_defaults(handler=install_environment) install_parser.add_argument( "input", help="Path to the model file, URL or UUID.") install_parser.add_argument( "--reproduce", action="store_true", help="Ensure that training the model is possible.") install_parser.add_argument( "--pip", nargs="*", help="Additional arguments to pass to pip.") add_index_args(install_parser) add_backend_args(install_parser) # ------------------------------------------------------------------------ publish_parser = subparsers.add_parser( "publish", help="Upload the model and update the registry.") publish_parser.set_defaults(handler=publish_model) publish_parser.add_argument( "model", help="The path to the model to publish.") publish_parser.add_argument( "--meta", default=os.path.join(os.path.dirname(__file__), "templates/template_meta.json"), help="Path to the JSON file which contains the additional metadata of the model.") publish_parser.add_argument("-d", "--update-default", action="store_true", help="Set this model as the default one.") publish_parser.add_argument("-f", "--force", action="store_true", help="Overwrite existing models.") add_index_args(publish_parser) add_backend_args(publish_parser) add_templates_args(publish_parser) # ------------------------------------------------------------------------ list_parser = subparsers.add_parser( "list", help="Lists all the models in the registry.") list_parser.set_defaults(handler=list_models) add_index_args(list_parser) # ------------------------------------------------------------------------ delete_parser = subparsers.add_parser("delete", help="Delete a model.") delete_parser.set_defaults(handler=delete_model) delete_parser.add_argument( "input", help="UUID of the model to be deleted.") add_index_args(delete_parser) add_backend_args(delete_parser) add_templates_args(delete_parser) # ------------------------------------------------------------------------ args = parser.parse_args() args.log_level = logging._nameToLevel[args.log_level] slogging.setup(args.log_level, False) try: handler = args.handler except AttributeError: def print_usage(_): parser.print_usage() handler = print_usage return handler(args)
python
def api_range( self, serial: str, date: Optional[timelike] = None ) -> SensorRange: """Wraps a polygon representing a sensor's range. By default, returns the current range. Otherwise, you may enter a specific day (as a string, as an epoch or as a datetime) """ if date is None: date = round_time(datetime.now(timezone.utc), by=timedelta(days=1)) else: date = to_datetime(date) date = int(date.timestamp()) c = requests.get( f"https://opensky-network.org/api/range/days" f"?days={date}&serials={serial}" ) if c.status_code != 200: raise ValueError(c.content.decode()) return SensorRange(c.json())
java
public static MozuUrl updateItemDutyUrl(Double dutyAmount, String orderId, String orderItemId, String responseFields, String updateMode, String version) { UrlFormatter formatter = new UrlFormatter("/api/commerce/orders/{orderId}/items/{orderItemId}/dutyAmount/{dutyAmount}?updatemode={updateMode}&version={version}&responseFields={responseFields}"); formatter.formatUrl("dutyAmount", dutyAmount); formatter.formatUrl("orderId", orderId); formatter.formatUrl("orderItemId", orderItemId); formatter.formatUrl("responseFields", responseFields); formatter.formatUrl("updateMode", updateMode); formatter.formatUrl("version", version); return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ; }
java
public static String getClassType(Class expectedType) { String classType = expectedType.getName(); if (expectedType.isPrimitive()) { if (expectedType.equals(Boolean.TYPE)) { classType = Boolean.class.getName(); } else if (expectedType.equals(Byte.TYPE)) { classType = Byte.class.getName(); } else if (expectedType.equals(Character.TYPE)) { classType = Character.class.getName(); } else if (expectedType.equals(Short.TYPE)) { classType = Short.class.getName(); } else if (expectedType.equals(Integer.TYPE)) { classType = Integer.class.getName(); } else if (expectedType.equals(Long.TYPE)) { classType = Long.class.getName(); } else if (expectedType.equals(Float.TYPE)) { classType = Float.class.getName(); } else if (expectedType.equals(Double.TYPE)) { classType = Double.class.getName(); } } classType = toJavaSourceType(classType); return classType; }
python
def Ceil(input_vertex: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex: """ Applies the Ceiling operator to a vertex. This maps a vertex to the smallest integer greater than or equal to its value :param input_vertex: the vertex to be ceil'd """ return Double(context.jvm_view().CeilVertex, label, cast_to_double_vertex(input_vertex))
java
public static double sumOfSquares(NumberVector v) { final int dim = v.getDimensionality(); double sum = 0; for(int d = 0; d < dim; d++) { double x = v.doubleValue(d); sum += x * x; } return sum; }
java
public void deploy(DeploymentPhaseContext phaseContext) throws DeploymentUnitProcessingException { final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit(); final EEModuleDescription eeModuleDescription = deploymentUnit.getAttachment(Attachments.EE_MODULE_DESCRIPTION); // must be EE Module if(eeModuleDescription == null) { return; } // discover user-provided component ComponentDescription paComponent = detectExistingComponent(deploymentUnit); if(paComponent != null) { log.log(Level.INFO, "Detected user-provided @"+ProcessApplication.class.getSimpleName()+" component with name '"+paComponent.getComponentName()+"'."); // mark this to be a process application ProcessApplicationAttachments.attachProcessApplicationComponent(deploymentUnit, paComponent); ProcessApplicationAttachments.mark(deploymentUnit); ProcessApplicationAttachments.markPartOfProcessApplication(deploymentUnit); } }
java
public void write(@NonNull Normalizer normalizer, @NonNull File file) throws IOException { try (OutputStream out = new BufferedOutputStream(new FileOutputStream(file))) { write(normalizer, out); } }
java
protected void login(final Request request, final Response response) { // Login detected final Form form = new Form(request.getEntity()); final Parameter identifier = form.getFirst(this.getIdentifierFormName()); final Parameter secret = form.getFirst(this.getSecretFormName()); // Set credentials final ChallengeResponse cr = new ChallengeResponse(this.getScheme(), identifier != null ? identifier.getValue() : null, secret != null ? secret.getValue() : null); request.setChallengeResponse(cr); this.log.info("calling attemptRedirect after login"); // Attempt to redirect this.attemptRedirect(request, response, form); }
java
public Object invokeOperation(ObjectName name, String operName, String... paramStrings) throws Exception { String[] paramTypes = lookupParamTypes(name, operName, paramStrings); Object[] paramObjs; if (paramStrings.length == 0) { paramObjs = null; } else { paramObjs = new Object[paramStrings.length]; for (int i = 0; i < paramStrings.length; i++) { paramObjs[i] = ClientUtils.stringToParam(paramStrings[i], paramTypes[i]); } } return invokeOperation(name, operName, paramTypes, paramObjs); }
java
public static Date addMonths(Date date, int iMonths) { Calendar dateTime = dateToCalendar(date); dateTime.add(Calendar.MONTH, iMonths); return dateTime.getTime(); }
python
def context_chain(self) -> List['Context']: """Return a list of contexts starting from this one, its parent and so on.""" contexts = [] ctx = self # type: Optional[Context] while ctx is not None: contexts.append(ctx) ctx = ctx.parent return contexts
java
public Set<Class<? extends GrammaticalRelationAnnotation>> arcLabelsToNode(TreeGraphNode destNode) { Set<Class<? extends GrammaticalRelationAnnotation>> arcLabels = Generics.newHashSet(); CyclicCoreLabel cl = label(); for (Iterator<Class<?>> it = cl.keySet().iterator(); it.hasNext();) { Class<? extends CoreAnnotation> key = (Class<? extends CoreAnnotation>) it.next();//javac doesn't compile properly if generics are fully specified (but eclipse does...) Object val = cl.get(key); if (val != null && val instanceof Set) { if (((Set) val).contains(destNode)) { if (key != null) { arcLabels.add((Class<? extends GrammaticalRelationAnnotation>) key); } } } } return arcLabels; }
python
def agm(x, y, context=None): """ Return the arithmetic geometric mean of x and y. """ return _apply_function_in_current_context( BigFloat, mpfr.mpfr_agm, ( BigFloat._implicit_convert(x), BigFloat._implicit_convert(y), ), context, )
java
public OvhSmsBuilder url(String... url) { for (String u : url) { if (u != null) { urls.add(u); } } return this; }
python
def is_comment_deleted(comid): """ Return True of the comment is deleted. Else False :param comid: ID of comment to check """ query = """SELECT status from "cmtRECORDCOMMENT" WHERE id=%s""" params = (comid,) res = run_sql(query, params) if res and res[0][0] != 'ok': return True return False
python
def make_DID(name_type, address, index): """ Standard way of making a DID. name_type is "name" or "subdomain" """ if name_type not in ['name', 'subdomain']: raise ValueError("Require 'name' or 'subdomain' for name_type") if name_type == 'name': address = virtualchain.address_reencode(address) else: # what's the current version byte? vb = keylib.b58check.b58check_version_byte(address) if vb == bitcoin_blockchain.version_byte: # singlesig vb = SUBDOMAIN_ADDRESS_VERSION_BYTE else: vb = SUBDOMAIN_ADDRESS_MULTISIG_VERSION_BYTE address = virtualchain.address_reencode(address, version_byte=vb) return 'did:stack:v0:{}-{}'.format(address, index)