language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def read_namespaced_persistent_volume_claim_status(self, name, namespace, **kwargs): """ read status of the specified PersistentVolumeClaim This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_persistent_volume_claim_status(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the PersistentVolumeClaim (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :return: V1PersistentVolumeClaim If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_persistent_volume_claim_status_with_http_info(name, namespace, **kwargs) else: (data) = self.read_namespaced_persistent_volume_claim_status_with_http_info(name, namespace, **kwargs) return data
python
def ensure_image(self): """ Ensure that config.image_url has been downloaded and unpacked. """ image_folder = self.get_image_folder() if os.path.exists(image_folder): print( 'OS image directory {} exists...not overwriting' .format( image_folder)) return ensure_image( self.config.image_name, self.config.image_url, IMAGES_ROOT, getattr(self.config, 'image_md5', None), self.get_image_folder() )
java
public static CompletionStage<Void> copy(final ServiceBundler services, final Session session, final Resource resource, final IRI destination, final String baseUrl) { final Metadata.Builder builder = Metadata.builder(destination) .interactionModel(resource.getInteractionModel()); resource.getContainer().ifPresent(builder::container); resource.getBinaryMetadata().ifPresent(builder::binary); resource.getInsertedContentRelation().ifPresent(builder::insertedContentRelation); resource.getMemberOfRelation().ifPresent(builder::memberOfRelation); resource.getMemberRelation().ifPresent(builder::memberRelation); resource.getMembershipResource().ifPresent(builder::membershipResource); try (final Stream<Quad> stream = resource.stream(Trellis.PreferUserManaged)) { LOGGER.debug("Copying {} to {}", resource.getIdentifier(), destination); final TrellisDataset mutable = new TrellisDataset(stream.collect(toDataset())); return services.getResourceService().create(builder.build(), mutable.asDataset()) .whenComplete((a, b) -> mutable.close()) .thenCompose(future -> { final TrellisDataset immutable = TrellisDataset.createDataset(); services.getAuditService().creation(resource.getIdentifier(), session).stream() .map(skolemizeQuads(services.getResourceService(), baseUrl)).forEachOrdered(immutable::add); return services.getResourceService().add(resource.getIdentifier(), immutable.asDataset()) .whenComplete((a, b) -> immutable.close()); }) .thenCompose(future -> services.getMementoService().put(services.getResourceService(), resource.getIdentifier())) .thenAccept(future -> services.getEventService().emit(new SimpleEvent(externalUrl(destination, baseUrl), session.getAgent(), asList(PROV.Activity, AS.Create), singletonList(resource.getInteractionModel())))); } }
java
private void forceDecreaseKeyToMinimum(Node<K, V> n) { Node<K, V> y = n.parent; if (y != null) { cut(n, y); root.mark = false; cascadingRankChange(y); root = link(root, n); } }
python
def get_functions_reading_from_variable(self, variable): ''' Return the functions reading the variable ''' return [f for f in self.functions if f.is_reading(variable)]
python
def render_impl(self, template, context, at_paths=None, at_encoding=anytemplate.compat.ENCODING, **kwargs): """ Render given template file and return the result. :param template: Template file path :param context: A dict or dict-like object to instantiate given template file :param at_paths: Template search paths :param at_encoding: Template encoding :param kwargs: Keyword arguments passed to the template engine to render templates with specific features enabled. :return: Rendered string """ renderer = self._make_renderer(at_paths, at_encoding, **kwargs) ctxs = [] if context is None else [context] if os.path.sep in template: # `template` is in abs/rel-path. return renderer.render_path(template, *ctxs) else: if template.endswith(renderer.file_extension): template = os.path.splitext(template)[0] return renderer.render_name(template, *ctxs)
java
public void changeState(Class<? extends State> next) { Check.notNull(next); final State from = current; if (current != null) { last = current.getClass(); current.exit(); } if (!states.containsKey(next)) { final State state = create(next); states.put(next, state); } current = states.get(next); current.enter(); listeners.forEach(l -> l.notifyStateTransition(from != null ? from.getClass() : null, next)); }
java
public boolean validate(Signer signerToUse) throws CoseException { for (Signer r : signerList) { if (r == signerToUse) { return r.validate(rgbProtected, rgbContent); } } throw new CoseException("Signer not found"); }
python
def bfgs_method(f, x, line_search=1.0, maxiter=1000, tol=1e-15, num_store=None, hessinv_estimate=None, callback=None): r"""Quasi-Newton BFGS method to minimize a differentiable function. Can use either the regular BFGS method, or the limited memory BFGS method. Notes ----- This is a general and optimized implementation of a quasi-Newton method with BFGS update for solving a general unconstrained optimization problem .. math:: \min f(x) for a differentiable function :math:`f: \mathcal{X}\to \mathbb{R}` on a Hilbert space :math:`\mathcal{X}`. It does so by finding a zero of the gradient .. math:: \nabla f: \mathcal{X} \to \mathcal{X}. The QN method is an approximate Newton method, where the Hessian is approximated and gradually updated in each step. This implementation uses the rank-one BFGS update schema where the inverse of the Hessian is recalculated in each iteration. The algorithm is described in [GNS2009], Section 12.3 and in the `BFGS Wikipedia article <https://en.wikipedia.org/wiki/Broyden%E2%80%93Fletcher%E2%80%93\ Goldfarb%E2%80%93Shanno_algorithm>`_ Parameters ---------- f : `Functional` Functional with ``f.gradient``. x : ``f.domain`` element Starting point of the iteration line_search : float or `LineSearch`, optional Strategy to choose the step length. If a float is given, uses it as a fixed step length. maxiter : int, optional Maximum number of iterations. tol : float, optional Tolerance that should be used for terminating the iteration. num_store : int, optional Maximum number of correction factors to store. For ``None``, the method is the regular BFGS method. For an integer, the method becomes the Limited Memory BFGS method. hessinv_estimate : `Operator`, optional Initial estimate of the inverse of the Hessian operator. Needs to be an operator from ``f.domain`` to ``f.domain``. Default: Identity on ``f.domain`` callback : callable, optional Object executing code per iteration, e.g. plotting each iterate. References ---------- [GNS2009] Griva, I, Nash, S G, and Sofer, A. *Linear and nonlinear optimization*. Siam, 2009. """ grad = f.gradient if x not in grad.domain: raise TypeError('`x` {!r} is not in the domain of `grad` {!r}' ''.format(x, grad.domain)) if not callable(line_search): line_search = ConstantLineSearch(line_search) ys = [] ss = [] grad_x = grad(x) for i in range(maxiter): # Determine a stepsize using line search search_dir = -_bfgs_direction(ss, ys, grad_x, hessinv_estimate) dir_deriv = search_dir.inner(grad_x) if np.abs(dir_deriv) == 0: return # we found an optimum step = line_search(x, direction=search_dir, dir_derivative=dir_deriv) # Update x x_update = search_dir x_update *= step x += x_update grad_x, grad_diff = grad(x), grad_x # grad_diff = grad(x) - grad(x_old) grad_diff.lincomb(-1, grad_diff, 1, grad_x) y_inner_s = grad_diff.inner(x_update) # Test for convergence if np.abs(y_inner_s) < tol: if grad_x.norm() < tol: return else: # Reset if needed ys = [] ss = [] continue # Update Hessian ys.append(grad_diff) ss.append(x_update) if num_store is not None: # Throw away factors if they are too many. ss = ss[-num_store:] ys = ys[-num_store:] if callback is not None: callback(x)
python
def calc_prediction(registered_map, preregistration_mesh, native_mesh, model): ''' calc_registration_prediction is a pimms calculator that creates the both the prediction and the registration_prediction, both of which are pimms itables including the fields 'polar_angle', 'eccentricity', and 'visual_area'. The registration_prediction data describe the vertices for the registered_map, not necessarily of the native_mesh, while the prediction describes the native mesh. Provided efferent values: @ registered_mesh Will be a mesh object that is equivalent to the preregistration_mesh but with the coordinates and predicted fields (from the registration) filled in. Note that this mesh is still in the resampled configuration is resampling was performed. @ registration_prediction Will be a pimms ITable object with columns 'polar_angle', 'eccentricity', and 'visual_area'. For values outside of the model region, visual_area will be 0 and other values will be undefined (but are typically 0). The registration_prediction describes the values on the registrered_mesh. @ prediction will be a pimms ITable object with columns 'polar_angle', 'eccentricity', and 'visual_area'. For values outside of the model region, visual_area will be 0 and other values will be undefined (but are typically 0). The prediction describes the values on the native_mesh and the predicted_mesh. ''' # invert the map projection to make the registration map into a mesh coords3d = np.array(preregistration_mesh.coordinates) idcs = registered_map.labels coords3d[:,idcs] = registered_map.meta('projection').inverse(registered_map.coordinates) rmesh = preregistration_mesh.copy(coordinates=coords3d) # go ahead and get the model predictions... d = model.cortex_to_angle(registered_map.coordinates) id2n = model.area_id_to_name (ang, ecc) = d[0:2] lbl = np.asarray(d[2], dtype=np.int) rad = np.asarray([predict_pRF_radius(e, id2n[l]) if l > 0 else 0 for (e,l) in zip(ecc,lbl)]) d = {'polar_angle':ang, 'eccentricity':ecc, 'visual_area':lbl, 'radius':rad} # okay, put these on the mesh rpred = {} for (k,v) in six.iteritems(d): v.setflags(write=False) tmp = np.zeros(rmesh.vertex_count, dtype=v.dtype) tmp[registered_map.labels] = v tmp.setflags(write=False) rpred[k] = tmp rpred = pyr.pmap(rpred) rmesh = rmesh.with_prop(rpred) # next, do all of this for the native mesh.. if native_mesh is preregistration_mesh: pred = rpred pmesh = rmesh else: # we need to address the native coordinates in the prereg coordinates then unaddress them # in the registered coordinates; this will let us make a native-registered-map and repeat # the exercise above addr = preregistration_mesh.address(native_mesh.coordinates) natreg_mesh = native_mesh.copy(coordinates=rmesh.unaddress(addr)) d = model.cortex_to_angle(natreg_mesh) (ang,ecc) = d[0:2] lbl = np.asarray(d[2], dtype=np.int) rad = np.asarray([predict_pRF_radius(e, id2n[l]) if l > 0 else 0 for (e,l) in zip(ecc,lbl)]) pred = pyr.m(polar_angle=ang, eccentricity=ecc, radius=rad, visual_area=lbl) pmesh = natreg_mesh.with_prop(pred) return {'registered_mesh' : rmesh, 'registration_prediction': rpred, 'prediction' : pred, 'predicted_mesh' : pmesh}
java
@Override public RemoveRoleFromDBClusterResult removeRoleFromDBCluster(RemoveRoleFromDBClusterRequest request) { request = beforeClientExecution(request); return executeRemoveRoleFromDBCluster(request); }
python
def eth_getBlockHeaderByNumber(self, number): """Get block header by block number. :param number: :return: """ block_hash = self.reader._get_block_hash(number) block_number = _format_block_number(number) return self.reader._get_block_header(block_hash, block_number)
java
public List<ResourceHandle> getChildrenByType(final String type) { final ArrayList<ResourceHandle> children = new ArrayList<>(); if (this.isValid()) { for (final Resource child : this.resource.getChildren()) { ResourceHandle handle = ResourceHandle.use(child); if (handle.isOfType(type)) { children.add(handle); } } } return children; }
java
public <A extends Annotation> boolean hasAnnotation(final Field field, final Class<A> annClass) { return getAnnotation(field, annClass) != null; }
java
@Override protected LocalTime parse(final String string, final DateTimeFormatter formatter) { return LocalTime.parse(string, formatter); }
python
def link(self, link, title, text): """Rendering a given link with content and title. :param link: href link for ``<a>`` tag. :param title: title content for `title` attribute. :param text: text content for description. """ if self.anonymous_references: underscore = '__' else: underscore = '_' if title: return self._raw_html( '<a href="{link}" title="{title}">{text}</a>'.format( link=link, title=title, text=text ) ) if not self.parse_relative_links: return '\ `{text} <{target}>`{underscore}\ '.format( target=link, text=text, underscore=underscore ) else: url_info = urlparse(link) if url_info.scheme: return '\ `{text} <{target}>`{underscore}\ '.format( target=link, text=text, underscore=underscore ) else: link_type = 'doc' anchor = url_info.fragment if url_info.fragment: if url_info.path: # Can't link to anchors via doc directive. anchor = '' else: # Example: [text](#anchor) link_type = 'ref' doc_link = '{doc_name}{anchor}'.format( # splittext approach works whether or not path is set. It # will return an empty string if unset, which leads to # anchor only ref. doc_name=os.path.splitext(url_info.path)[0], anchor=anchor ) return '\ :{link_type}:`{text} <{doc_link}>`\ '.format( link_type=link_type, doc_link=doc_link, text=text )
python
def add_metaclass(metaclass): """ Class decorator for creating a class with a metaclass. Adapted from the six project: https://pythonhosted.org/six/ """ vars_to_skip = ('__dict__', '__weakref__') def wrapper(cls): copied_dict = { key: value for key, value in cls.__dict__.items() if key not in vars_to_skip } return metaclass(cls.__name__, cls.__bases__, copied_dict) return wrapper
python
def std_dev(self, value): """The std_dev property. Args: value (float). the property value. """ if value == self._defaults['stdDev'] and 'stdDev' in self._values: del self._values['stdDev'] else: self._values['stdDev'] = value
python
def toLily(self): ''' Method which converts the object instance, its attributes and children to a string of lilypond code :return: str of lilypond code ''' lilystring = "" children = self.GetChildrenIndexes() total = self.note_total counter = 0 for child in range(len(children)): note = self.GetChild(children[child]) item = note.GetItem() if item is not None: item.autoBeam = self.autoBeam if hasattr(note, "duration"): try: counter += int(note.duration) except: if note.duration == "\\longa": counter += 0.25 if note.duration == "\\breve": counter += 0.5 if counter > total / 2: if hasattr(self, "mid_barline"): lilystring += self.mid_barline.toLily() self.__delattr__("mid_barline") if hasattr(self, "rest") and hasattr(self, "total"): lilystring += "R" + self.total else: lilystring += note.toLily() + " " return lilystring
java
public <T extends CommandOutput<?, ?, ?>> void register(Class<T> commandOutputClass, CommandOutputFactory commandOutputFactory) { LettuceAssert.notNull(commandOutputClass, "CommandOutput class must not be null"); LettuceAssert.notNull(commandOutputFactory, "CommandOutputFactory must not be null"); register(registry, commandOutputClass, commandOutputFactory); }
java
public void setExecutableUsers(java.util.Collection<String> executableUsers) { if (executableUsers == null) { this.executableUsers = null; return; } this.executableUsers = new com.amazonaws.internal.SdkInternalList<String>(executableUsers); }
java
private static float calcLearningRate(float initLearningRate, int epochs, long totalProcessed, long vocabWordCount) { float rate = initLearningRate * (1 - totalProcessed / (float) (epochs * vocabWordCount + 1)); if (rate < initLearningRate * LEARNING_RATE_MIN_FACTOR) rate = initLearningRate * LEARNING_RATE_MIN_FACTOR; return rate; }
python
def gamma(ranks_list1,ranks_list2): ''' Goodman and Kruskal's gamma correlation coefficient :param ranks_list1: a list of ranks (integers) :param ranks_list2: a second list of ranks (integers) of equal length with corresponding entries :return: Gamma correlation coefficient (rank correlation ignoring ties) ''' num_concordant_pairs = 0 num_discordant_pairs = 0 num_tied_x = 0 num_tied_y = 0 num_tied_xy = 0 num_items = len(ranks_list1) for i in range(num_items): rank_1 = ranks_list1[i] rank_2 = ranks_list2[i] for j in range(i + 1, num_items): diff1 = ranks_list1[j] - rank_1 diff2 = ranks_list2[j] - rank_2 if (diff1 > 0 and diff2 > 0) or (diff1 < 0 and diff2 < 0): num_concordant_pairs += 1 elif (diff1 > 0 and diff2 < 0) or (diff1 < 0 and diff2 > 0): num_discordant_pairs += 1 elif diff1 == 0 and diff2 == 0: num_tied_xy += 1 elif diff1 == 0: num_tied_x += 1 elif diff2 == 0: num_tied_y += 1 try: gamma_corr_coeff = float(num_concordant_pairs - num_discordant_pairs)/float(num_concordant_pairs + num_discordant_pairs) except: gamma_corr_coeff = 'n/a' return [num_tied_x, num_tied_y, num_tied_xy, gamma_corr_coeff]
python
def shutdown(self, container, instances=None, map_name=None, **kwargs): """ Shut down container instances from a container configuration. Typically this means stopping and removing containers. Note that not all policy classes necessarily implement this method. :param container: Container name. :type container: unicode | str :param instances: Instance names to remove. If not specified, will remove all instances as specified in the configuration (or just one default instance). :type instances: collections.Iterable[unicode | str | NoneType] :param map_name: Container map name. Optional - if not provided the default map is used. :type map_name: unicode | str :param kwargs: Additional kwargs. Only options controlling policy behavior are considered. :return: Return values of removed containers. :rtype: list[dockermap.map.runner.ActionOutput] """ return self.run_actions('shutdown', container, instances=instances, map_name=map_name, **kwargs)
java
public static String getDateString(Date self) { return DateFormat.getDateInstance(DateFormat.SHORT).format(self); }
python
def _set_port(self, v, load=False): """ Setter method for port, mapped from YANG variable /qos/cpu/slot/port_group/port (container) If this variable is read-only (config: false) in the source YANG file, then _set_port is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_port() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=port.port, is_container='container', presence=False, yang_name="port", rest_name="port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CPU QoS port parameters', u'alt-name': u'port', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-cpu', defining_module='brocade-qos-cpu', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """port must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=port.port, is_container='container', presence=False, yang_name="port", rest_name="port", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure CPU QoS port parameters', u'alt-name': u'port', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-qos-cpu', defining_module='brocade-qos-cpu', yang_type='container', is_config=True)""", }) self.__port = t if hasattr(self, '_set'): self._set()
python
def covariance(left, right, where=None, how='sample'): """ Compute covariance of two numeric array Parameters ---------- how : {'sample', 'pop'}, default 'sample' Returns ------- cov : double scalar """ expr = ops.Covariance(left, right, how, where).to_expr() return expr
python
def get_queryset(self, request): """Limit Pages to those that belong to the request's user.""" qs = super(VISADeviceAdmin, self).get_queryset(request) return qs.filter(protocol_id=PROTOCOL_ID)
java
private static Optional<ImmutableQueryModifiers> convertModifiers(MutableQueryModifiers queryModifiers) { if (queryModifiers.hasModifiers()) { ImmutableQueryModifiers immutableQueryModifiers = new ImmutableQueryModifiersImpl(queryModifiers); return Optional.of(immutableQueryModifiers); } else { return Optional.empty(); } }
java
public void serialize(StringBuilder buf) { for (CSSClass clss : store.values()) { clss.appendCSSDefinition(buf); } }
python
def start(self, name): ''' End the current behaviour and run a named behaviour. :param name: the name of the behaviour to run :type name: str ''' d = self.boatd.post({'active': name}, endpoint='/behaviours') current = d.get('active') if current is not None: return 'started {}'.format(current) else: return 'no behaviour running'
java
public ReportDefinition withAdditionalSchemaElements(SchemaElement... additionalSchemaElements) { java.util.ArrayList<String> additionalSchemaElementsCopy = new java.util.ArrayList<String>(additionalSchemaElements.length); for (SchemaElement value : additionalSchemaElements) { additionalSchemaElementsCopy.add(value.toString()); } if (getAdditionalSchemaElements() == null) { setAdditionalSchemaElements(additionalSchemaElementsCopy); } else { getAdditionalSchemaElements().addAll(additionalSchemaElementsCopy); } return this; }
python
def flatten(*args): '''Generator that recursively flattens embedded lists, tuples, etc.''' for arg in args: if isinstance(arg, collections.Iterable) and not isinstance(arg, (str, bytes)): yield from flatten(*arg) else: yield arg
python
def reset(self, data={}): """JsonQuery object cen be reset to new data according to given data or previously given raw Json data :@param data: {} :@type data: json/dict :@return self """ if data and (isinstance(data, dict) or isinstance(data, list)): self._json_data = data else: self._json_data = copy.deepcopy(self._raw_data) self.__reset_queries() return self
java
public static String abbreviate(String str, int max) { return StringUtils.isBlank(str) ? "" : StringUtils.abbreviate(str, max); }
java
private boolean tryAddModule( IAggregator aggregator, List<String> list, String bundleRoot, IResource bundleRootRes, String locale, String resource, Collection<String> availableLocales) throws IOException { if (availableLocales != null && !availableLocales.contains(locale)) { return false; } boolean result = false; URI uri = bundleRootRes.getURI(); URI testUri = uri.resolve(locale + "/" + resource + ".js"); //$NON-NLS-1$ //$NON-NLS-2$ IResource testResource = aggregator.newResource(testUri); if (availableLocales != null || testResource.exists()) { String mid = bundleRoot+"/"+locale+"/"+resource; //$NON-NLS-1$ //$NON-NLS-2$ list.add(mid); result = true; } return result; }
python
def median(lst): """ Calcuates the median value in a @lst """ #: http://stackoverflow.com/a/24101534 sortedLst = sorted(lst) lstLen = len(lst) index = (lstLen - 1) // 2 if (lstLen % 2): return sortedLst[index] else: return (sortedLst[index] + sortedLst[index + 1])/2.0
java
private void repairBrokenLatticeBefore(ViterbiLattice lattice, int index) { ViterbiNode[][] nodeStartIndices = lattice.getStartIndexArr(); for (int startIndex = index; startIndex > 0; startIndex--) { if (nodeStartIndices[startIndex] != null) { ViterbiNode glueBase = findGlueNodeCandidate(index, nodeStartIndices[startIndex], startIndex); if (glueBase != null) { int length = index + 1 - startIndex; String surface = glueBase.getSurface().substring(0, length); ViterbiNode glueNode = createGlueNode(startIndex, glueBase, surface); lattice.addNode(glueNode, startIndex, startIndex + glueNode.getSurface().length()); return; } } } }
python
def __parse_namespace(self): """ Parse the namespace from various sources """ if self.manifest.has_option('config', 'namespace'): return self.manifest.get('config', 'namespace') elif self.manifest.has_option('config', 'source'): return NAMESPACE_REGEX.search(self.manifest.get('config', 'source')).groups()[0] else: logger.warn('Could not parse namespace implicitely') return None
java
public void marshall(DescribeDomainControllersRequest describeDomainControllersRequest, ProtocolMarshaller protocolMarshaller) { if (describeDomainControllersRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeDomainControllersRequest.getDirectoryId(), DIRECTORYID_BINDING); protocolMarshaller.marshall(describeDomainControllersRequest.getDomainControllerIds(), DOMAINCONTROLLERIDS_BINDING); protocolMarshaller.marshall(describeDomainControllersRequest.getNextToken(), NEXTTOKEN_BINDING); protocolMarshaller.marshall(describeDomainControllersRequest.getLimit(), LIMIT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public ParseSetup readSetup(FileVec f, String[] columnNames, byte[] columnTypes) { try { Reader orcFileReader = getReader(f); StructObjectInspector insp = (StructObjectInspector) orcFileReader.getObjectInspector(); OrcParser.OrcParseSetup stp = OrcParser.deriveParseSetup(orcFileReader, insp); // change back the columnNames and columnTypes if they are specified already if (!(columnNames == null) && (stp.getAllColNames().length == columnNames.length)) { // copy column name stp.setColumnNames(columnNames); stp.setAllColNames(columnNames); } if (columnTypes != null) { // copy enum type only byte[] old_columnTypes = stp.getColumnTypes(); String[] old_columnTypeNames = stp.getColumnTypesString(); for (int index = 0; index < columnTypes.length; index++) { if(columnTypes[index] != old_columnTypes[index]){ if(supported_type_conversions[old_columnTypes[index]][columnTypes[index]] == 1){ old_columnTypes[index] = columnTypes[index]; } else { stp.addErrs(new ParseWriter.UnsupportedTypeOverride(f._key.toString(),Vec.TYPE_STR[old_columnTypes[index]], Vec.TYPE_STR[columnTypes[index]],columnNames[index])); } } if (columnTypes[index] == Vec.T_CAT || columnTypes[index] == Vec.T_BAD || columnTypes[index] == Vec.T_TIME) // only copy the enum types old_columnTypes[index] = columnTypes[index]; } stp.setColumnTypes(old_columnTypes); stp.setColumnTypeStrings(old_columnTypeNames); } List<StripeInformation> stripesInfo = orcFileReader.getStripes(); if(stripesInfo.size() == 0) { // empty file f.setChunkSize(stp._chunk_size = (int)f.length()); return stp; } f.setNChunks(stripesInfo.size()); stp._chunk_size = f._chunkSize; assert f.nChunks() == stripesInfo.size(); // ORC parser needs one-to one mapping between chunk and strip (just ids, offsets do not matter) return stp; } catch(IOException ioe) { throw new RuntimeException(ioe); } }
python
def property_cache_once_per_frame(f): """ This decorator caches the return value for one game loop, then clears it if it is accessed in a different game loop Only works on properties of the bot object because it requires access to self.state.game_loop """ f.frame = -1 f.cache = None @wraps(f) def inner(self): if f.frame != self.state.game_loop: f.frame = self.state.game_loop f.cache = None if f.cache is None: f.cache = f(self) return f.cache return property(inner)
python
def convert_convolution(node, **kwargs): """Map MXNet's convolution operator attributes to onnx's Conv operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) kernel_dims = list(parse_helper(attrs, "kernel")) stride_dims = list(parse_helper(attrs, "stride", [1, 1])) pad_dims = list(parse_helper(attrs, "pad", [0, 0])) num_group = int(attrs.get("num_group", 1)) dilations = list(parse_helper(attrs, "dilate", [1, 1])) pad_dims = pad_dims + pad_dims conv_node = onnx.helper.make_node( "Conv", inputs=input_nodes, outputs=[name], kernel_shape=kernel_dims, strides=stride_dims, dilations=dilations, pads=pad_dims, group=num_group, name=name ) return [conv_node]
python
def CheckForNonStandardConstructs(filename, clean_lines, linenum, nesting_state, error): r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2. Complain about several constructs which gcc-2 accepts, but which are not standard C++. Warning about these in lint is one way to ease the transition to new compilers. - put storage class first (e.g. "static const" instead of "const static"). - "%lld" instead of %qd" in printf-type functions. - "%1$d" is non-standard in printf-type functions. - "\%" is an undefined character escape sequence. - text after #endif is not allowed. - invalid inner-style forward declaration. - >? and <? operators, and their >?= and <?= cousins. Additionally, check for constructor/destructor style violations and reference members, as it is very convenient to do so while checking for gcc-2 compliance. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. nesting_state: A NestingState instance which maintains information about the current stack of nested blocks being parsed. error: A callable to which errors are reported, which takes 4 arguments: filename, line number, error level, and message """ # Remove comments from the line, but leave in strings for now. line = clean_lines.lines[linenum] if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line): error(filename, linenum, 'runtime/printf_format', 3, '%q in format strings is deprecated. Use %ll instead.') if Search(r'printf\s*\(.*".*%\d+\$', line): error(filename, linenum, 'runtime/printf_format', 2, '%N$ formats are unconventional. Try rewriting to avoid them.') # Remove escaped backslashes before looking for undefined escapes. line = line.replace('\\\\', '') if Search(r'("|\').*\\(%|\[|\(|{)', line): error(filename, linenum, 'build/printf_format', 3, '%, [, (, and { are undefined character escapes. Unescape them.') # For the rest, work with both comments and strings removed. line = clean_lines.elided[linenum] if Search(r'\b(const|volatile|void|char|short|int|long' r'|float|double|signed|unsigned' r'|schar|u?int8|u?int16|u?int32|u?int64)' r'\s+(register|static|extern|typedef)\b', line): error(filename, linenum, 'build/storage_class', 5, 'Storage class (static, extern, typedef, etc) should be first.') if Match(r'\s*#\s*endif\s*[^/\s]+', line): error(filename, linenum, 'build/endif_comment', 5, 'Uncommented text after #endif is non-standard. Use a comment.') if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line): error(filename, linenum, 'build/forward_decl', 5, 'Inner-style forward declarations are invalid. Remove this line.') if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?', line): error(filename, linenum, 'build/deprecated', 3, '>? and <? (max and min) operators are non-standard and deprecated.') if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line): # TODO(unknown): Could it be expanded safely to arbitrary references, # without triggering too many false positives? The first # attempt triggered 5 warnings for mostly benign code in the regtest, hence # the restriction. # Here's the original regexp, for the reference: # type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?' # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;' error(filename, linenum, 'runtime/member_string_references', 2, 'const string& members are dangerous. It is much better to use ' 'alternatives, such as pointers or simple constants.') # Everything else in this function operates on class declarations. # Return early if the top of the nesting stack is not a class, or if # the class head is not completed yet. classinfo = nesting_state.InnermostClass() if not classinfo or not classinfo.seen_open_brace: return # The class may have been declared with namespace or classname qualifiers. # The constructor and destructor will not have those qualifiers. base_classname = classinfo.name.split('::')[-1] # Look for single-argument constructors that aren't marked explicit. # Technically a valid construct, but against style. Also look for # non-single-argument constructors which are also technically valid, but # strongly suggest something is wrong. explicit_constructor_match = Match( r'\s+(?:inline\s+)?(explicit\s+)?(?:inline\s+)?%s\s*' r'\(((?:[^()]|\([^()]*\))*)\)' % re.escape(base_classname), line) if explicit_constructor_match: is_marked_explicit = explicit_constructor_match.group(1) if not explicit_constructor_match.group(2): constructor_args = [] else: constructor_args = explicit_constructor_match.group(2).split(',') # collapse arguments so that commas in template parameter lists and function # argument parameter lists don't split arguments in two i = 0 while i < len(constructor_args): constructor_arg = constructor_args[i] while (constructor_arg.count('<') > constructor_arg.count('>') or constructor_arg.count('(') > constructor_arg.count(')')): constructor_arg += ',' + constructor_args[i + 1] del constructor_args[i + 1] constructor_args[i] = constructor_arg i += 1 defaulted_args = [arg for arg in constructor_args if '=' in arg] noarg_constructor = (not constructor_args or # empty arg list # 'void' arg specifier (len(constructor_args) == 1 and constructor_args[0].strip() == 'void')) onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg not noarg_constructor) or # all but at most one arg defaulted (len(constructor_args) >= 1 and not noarg_constructor and len(defaulted_args) >= len(constructor_args) - 1)) initializer_list_constructor = bool( onearg_constructor and Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0])) copy_constructor = bool( onearg_constructor and Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&' % re.escape(base_classname), constructor_args[0].strip())) if (not is_marked_explicit and onearg_constructor and not initializer_list_constructor and not copy_constructor): if defaulted_args: error(filename, linenum, 'runtime/explicit', 5, 'Constructors callable with one argument ' 'should be marked explicit.') else: error(filename, linenum, 'runtime/explicit', 5, 'Single-parameter constructors should be marked explicit.') elif is_marked_explicit and not onearg_constructor: if noarg_constructor: error(filename, linenum, 'runtime/explicit', 5, 'Zero-parameter constructors should not be marked explicit.') else: error(filename, linenum, 'runtime/explicit', 0, 'Constructors that require multiple arguments ' 'should not be marked explicit.')
java
public SDVariable normal(String name, double mean, double stddev, SDVariable shape) { validateInteger("normal (Gaussian) random", shape); SDVariable ret = f().randomNormal(mean, stddev, shape); return updateVariableNameAndReference(ret, name); }
java
public static DoubleBinding hypot(final ObservableDoubleValue x, final ObservableDoubleValue y) { return createDoubleBinding(() -> Math.hypot(x.get(), y.get()), x, y); }
python
def communicate_path(self, path): """Communicates `path` to this peer if it qualifies. Checks if `path` should be shared/communicated with this peer according to various conditions: like bgp state, transmit side loop, local and remote AS path, community attribute, etc. """ LOG.debug('Peer %s asked to communicate path', self) if not path: raise ValueError('Invalid path %s given.' % path) # We do not send anything to peer who is not in established state. if not self.in_established(): LOG.debug('Skipping sending path as peer is not in ' 'ESTABLISHED state %s', path) return # Check if this session is available for given paths afi/safi path_rf = path.route_family if not (self.is_mpbgp_cap_valid(path_rf) or path_rf in [RF_IPv4_UC, RF_IPv6_UC]): LOG.debug('Skipping sending path as %s route family is not' ' available for this session', path_rf) return # If RTC capability is available and path afi/saif is other than RT # nlri if path_rf != RF_RTC_UC and \ self.is_mpbgp_cap_valid(RF_RTC_UC): rtfilter = self._peer_manager.curr_peer_rtfilter(self) # If peer does not have any rtfilter or if rtfilter does not have # any RTs common with path RTs we do not share this path with the # peer if rtfilter and not path.has_rts_in(rtfilter): LOG.debug('Skipping sending path as rffilter %s and path ' 'rts %s have no RT in common', rtfilter, path.get_rts()) return # Transmit side loop detection: We check if leftmost AS matches # peers AS, if so we do not send UPDATE message to this peer. as_path = path.get_pattr(BGP_ATTR_TYPE_AS_PATH) if as_path and as_path.has_matching_leftmost(self.remote_as): LOG.debug('Skipping sending path as AS_PATH has peer AS %s', self.remote_as) return # If this peer is a route server client, we forward the path # regardless of AS PATH loop, whether the connection is iBGP or eBGP, # or path's communities. if self.is_route_server_client: outgoing_route = OutgoingRoute(path) self.enque_outgoing_msg(outgoing_route) if self._neigh_conf.multi_exit_disc: med_attr = path.get_pattr(BGP_ATTR_TYPE_MULTI_EXIT_DISC) if not med_attr: path = bgp_utils.clone_path_and_update_med_for_target_neighbor( path, self._neigh_conf.multi_exit_disc ) # For connected/local-prefixes, we send update to all peers. if path.source is None: # Construct OutgoingRoute specific for this peer and put it in # its sink. outgoing_route = OutgoingRoute(path) self.enque_outgoing_msg(outgoing_route) # If path from a bgp-peer is new best path, we share it with # all bgp-peers except the source peer and other peers in his AS. # This is default Junos setting that in Junos can be disabled with # 'advertise-peer-as' setting. elif (self != path.source or self.remote_as != path.source.remote_as): # When BGP speaker receives an UPDATE message from an internal # peer, the receiving BGP speaker SHALL NOT re-distribute the # routing information contained in that UPDATE message to other # internal peers (unless the speaker acts as a BGP Route # Reflector) [RFC4271]. if (self.remote_as == self._core_service.asn and self.remote_as == path.source.remote_as and isinstance(path.source, Peer) and not path.source.is_route_reflector_client and not self.is_route_reflector_client): LOG.debug( 'Skipping sending iBGP route to iBGP peer %s AS %s', self.ip_address, self.remote_as) return # If new best path has community attribute, it should be taken into # account when sending UPDATE to peers. comm_attr = path.get_pattr(BGP_ATTR_TYPE_COMMUNITIES) if comm_attr: comm_attr_na = comm_attr.has_comm_attr( BGPPathAttributeCommunities.NO_ADVERTISE ) # If we have NO_ADVERTISE attribute present, we do not send # UPDATE to any peers if comm_attr_na: LOG.debug('Path has community attr. NO_ADVERTISE = %s' '. Hence not advertising to peer', comm_attr_na) return comm_attr_ne = comm_attr.has_comm_attr( BGPPathAttributeCommunities.NO_EXPORT ) comm_attr_nes = comm_attr.has_comm_attr( BGPPathAttributeCommunities.NO_EXPORT_SUBCONFED ) # If NO_EXPORT_SUBCONFED/NO_EXPORT is one of the attribute, we # do not advertise to eBGP peers as we do not have any # confederation feature at this time. if ((comm_attr_nes or comm_attr_ne) and (self.remote_as != self._core_service.asn)): LOG.debug('Skipping sending UPDATE to peer: %s as per ' 'community attribute configuration', self) return # Construct OutgoingRoute specific for this peer and put it in # its sink. outgoing_route = OutgoingRoute(path) self.enque_outgoing_msg(outgoing_route) LOG.debug('Enqueued outgoing route %s for peer %s', outgoing_route.path.nlri, self)
python
def get(self, sid): """ Constructs a TaskContext :param sid: The sid :returns: twilio.rest.taskrouter.v1.workspace.task.TaskContext :rtype: twilio.rest.taskrouter.v1.workspace.task.TaskContext """ return TaskContext(self._version, workspace_sid=self._solution['workspace_sid'], sid=sid, )
java
protected boolean isIncludeStackTrace(ServerRequest request, MediaType produces) { ErrorProperties.IncludeStacktrace include = this.errorProperties .getIncludeStacktrace(); if (include == ErrorProperties.IncludeStacktrace.ALWAYS) { return true; } if (include == ErrorProperties.IncludeStacktrace.ON_TRACE_PARAM) { return isTraceEnabled(request); } return false; }
java
public Title getSectionHeading() { if (Section_Type.featOkTst && ((Section_Type)jcasType).casFeat_sectionHeading == null) jcasType.jcas.throwFeatMissing("sectionHeading", "de.julielab.jules.types.Section"); return (Title)(jcasType.ll_cas.ll_getFSForRef(jcasType.ll_cas.ll_getRefValue(addr, ((Section_Type)jcasType).casFeatCode_sectionHeading)));}
python
def __get_nfiles_to_compress(self): """ Return the number of files to compress Note: it should take about 0.1s for counting 100k files on a dual core machine """ floyd_logger.info("Get number of files to compress... (this could take a few seconds)") paths = [self.source_dir] try: # Traverse each subdirs of source_dir and count files/dirs while paths: path = paths.pop() for item in scandir(path): if item.is_dir(): paths.append(item.path) self.__files_to_compress += 1 elif item.is_file(): self.__files_to_compress += 1 except OSError as e: # OSError: [Errno 13] Permission denied if e.errno == errno.EACCES: self.source_dir = os.getcwd() if self.source_dir == '.' else self.source_dir # Expand cwd sys.exit(("Permission denied. Make sure to have read permission " "for all the files and directories in the path: %s") % (self.source_dir)) floyd_logger.info("Compressing %d files", self.__files_to_compress)
python
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. """ C = self.COEFFS[imt] imean = (self._get_magnitude_term(C, rup.mag) + self._get_distance_term(C, dists.rhypo, rup.mag)) # Convert mean from cm/s and cm/s/s if imt.name in "SA PGA": mean = np.log((10.0 ** (imean - 2.0)) / g) else: mean = np.log(10.0 ** imean) stddevs = self._get_stddevs(C, len(dists.rhypo), stddev_types) return mean, stddevs
java
public DataBuffer asNd4jBuffer(DataType type, int elementSize) { int length = content.length / elementSize; DataBuffer ret = Nd4j.createBuffer(ByteBuffer.allocateDirect(content.length),type,length,0); for(int i = 0; i < length; i++) { switch(type) { case DOUBLE: ret.put(i,getDouble(i)); break; case INT: ret.put(i,getInt(i)); break; case FLOAT: ret.put(i,getFloat(i)); break; case LONG: ret.put(i,getLong(i)); break; } } return ret; }
python
def makevFunc(self,solution): ''' Construct the value function for each current state. Parameters ---------- solution : ConsumerSolution The solution to the single period consumption-saving problem. Must have a consumption function cFunc (using cubic or linear splines) as a list with elements corresponding to the current Markov state. E.g. solution.cFunc[0] is the consumption function when in the i=0 Markov state this period. Returns ------- vFuncNow : [ValueFunc] A list of value functions (defined over normalized market resources m) for each current period Markov state. ''' vFuncNow = [] # Initialize an empty list of value functions # Loop over each current period state and construct the value function for i in range(self.StateCount): # Make state-conditional grids of market resources and consumption mNrmMin = self.mNrmMin_list[i] mGrid = mNrmMin + self.aXtraGrid cGrid = solution.cFunc[i](mGrid) aGrid = mGrid - cGrid # Calculate end-of-period value at each gridpoint EndOfPrdv_all = np.zeros((self.StateCount,self.aXtraGrid.size)) for j in range(self.StateCount): if self.possible_transitions[i,j]: EndOfPrdv_all[j,:] = self.EndOfPrdvFunc_list[j](aGrid) EndOfPrdv = np.dot(self.MrkvArray[i,:],EndOfPrdv_all) # Calculate (normalized) value and marginal value at each gridpoint vNrmNow = self.u(cGrid) + EndOfPrdv vPnow = self.uP(cGrid) # Make a "decurved" value function with the inverse utility function vNvrs = self.uinv(vNrmNow) # value transformed through inverse utility vNvrsP = vPnow*self.uinvP(vNrmNow) mNrm_temp = np.insert(mGrid,0,mNrmMin) # add the lower bound vNvrs = np.insert(vNvrs,0,0.0) vNvrsP = np.insert(vNvrsP,0,self.MPCmaxEff[i]**(-self.CRRA/(1.0-self.CRRA))) MPCminNvrs = self.MPCminNow[i]**(-self.CRRA/(1.0-self.CRRA)) vNvrsFunc_i = CubicInterp(mNrm_temp,vNvrs,vNvrsP,MPCminNvrs*self.hNrmNow[i],MPCminNvrs) # "Recurve" the decurved value function and add it to the list vFunc_i = ValueFunc(vNvrsFunc_i,self.CRRA) vFuncNow.append(vFunc_i) return vFuncNow
python
def update_group_color(self, lights: list) -> None: """Update group colors based on light states. deCONZ group updates don't contain any information about the current state of the lights in the group. This method updates the color properties of the group to the current color of the lights in the group. For groups where the lights have different colors the group color will only reflect the color of the latest changed light in the group. """ for group in self.groups.values(): # Skip group if there are no common light ids. if not any({*lights} & {*group.lights}): continue # More than one light means load_parameters called this method. # Then we take first best light to be available. light_ids = lights if len(light_ids) > 1: light_ids = group.lights for light_id in light_ids: if self.lights[light_id].reachable: group.update_color_state(self.lights[light_id]) break
python
def unblock_signals(self): """Let the combos listen for event changes again.""" self.aggregation_layer_combo.blockSignals(False) self.exposure_layer_combo.blockSignals(False) self.hazard_layer_combo.blockSignals(False)
java
@Path("version") @GET @Produces(MediaType.APPLICATION_JSON) public Response version() { try { InputStream is = getClass().getResourceAsStream("/application.properties"); Properties props = new Properties(); props.load(is); String version = props.get("version").toString(); String buildNumber = props.get("buildNumber").toString(); return Response.ok().entity( "{\"version\":\"" + version + "\",\"build\":\"" + buildNumber + "\"}").build(); } catch (IOException e) { //should never happen throw new RuntimeException(e); } }
java
@FFDCIgnore({ IOException.class }) private void flushAsyncBuffers() throws IOException { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Flushing Async buffers: " + this); } if (!this.isc.getResponse().isCommitted()) { if ((obs != null) && !this.WCheadersWritten) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "obs ->" + obs); } obs.alertOSFirstFlush(); } this.isc.getResponse().setCommitted(); } if (this.ignoreFlush) { this.ignoreFlush = false; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Ignoring first flush attempt"); } return; } final boolean writingBody = (hasBufferedContent()); // flip the last buffer for the write... if (writingBody && null != this.output[this.outputIndex]) { this.output[this.outputIndex].flip(); } try { WsByteBuffer[] content = (writingBody) ? this.output : null; if (isClosed() || this.isClosing) { if (!hasFinished) { //if we've already called finishResponseMessage - don't call again // on a closed stream, use the final write api this.isc.finishResponseMessage(content); // check if this also needs to be async this.isClosing = false; this.hasFinished = true; } } else { // else use the partial body api // Add the async option. vc = this.isc.sendResponseBody(content, _callback, false); if (vc == null) { // then we will have to wait for data to be written, async write in progress if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "wait for data to be written, async write in progress, set ready to false"); } synchronized (this._writeReadyLockObj) { this.setWriteReady(false); this.set_internalReady(false); } } else { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "able to write out, no callback required, vc -->" + vc); } } } } catch (MessageSentException mse) { FFDCFilter.processException(mse, getClass().getName(), "flushAsyncBuffers", new Object[] { this, this.isc }); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(tc, "Invalid state, message-sent-exception received; " + this.isc); } this.error = new IOException("Invalid state"); throw this.error; } catch (IOException ioe) { // no FFDC required this.error = ioe; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Received exception during write: " + ioe); } throw ioe; } finally { this.bytesWritten += this.bufferedCount; if (this.contentLengthSet) { this.bytesRemaining -= this.bufferedCount; } this.bufferedCount = 0; this.outputIndex = 0; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "finally, this " + this + " ,bytesRemaining -->" + this.bytesRemaining + ", bytesWritten -->" + this.bytesWritten); } if (writingBody && vc != null) { clearBuffersAfterWrite(); } } }
java
public <T> T evaluateExpressionGet(FacesContext context, String expression, Class<? extends T> expectedType) throws ELException { Application application = getMyfacesApplicationInstance(context); if (application != null) { return application.evaluateExpressionGet(context, expression, expectedType); } throw new UnsupportedOperationException(); }
python
def get_klout_topics(tweet, topic_type='influence'): """ Warning: Klout is deprecated and is being removed from Tweet payloads May 2018. \n See https://developer.twitter.com/en/docs/tweets/enrichments/overview/klout \n Get the user's chosen Klout topics (a list of dicts), if it exists. Regardless of format or topic type, topic dicts will have the same keys: "url", "id", "name", "score" Args: tweet (Tweet): A Tweet object topic_type (str): Which type of Klout topic to return. Options are limited to 'influence' and 'interest' Returns: list: A list of dicts representing Klout topics, or if Klout topics \ do not exist in the Tweet payload, return None. The list is sorted by the "score" value. Example: >>> result = [{ ... # the user's score for that topic ... "score": 0.54, ... # the Klout topic ID ... "id": "10000000000000019376", ... # the Klout topic URL ... "url": "http://klout.com/topic/id/10000000000000019376", ... # the Klout topic name ... "name": "Emoji" ... }, ... { ... "score": 0.43, ... "id": "9159", ... "url": "http://klout.com/topic/id/9159", ... "name": "Vegetables" ... }] """ try: # check that the dict paths exist if is_original_format(tweet): topics = tweet['user']['derived']['klout']['{}_topics'.format(topic_type)] else: topics = tweet['gnip']['klout_profile']['topics'] except KeyError: return None # since we have topics, collect the right pieces topics_list = [] if is_original_format(tweet): for topic in topics: # note: this is the same as the current structure of OF # payloads, but is written out for consistency w/ AS payloads this_topic = dict(url=topic['url'], id=topic['id'], name=topic['name'], score=topic['score']) topics_list.append(this_topic) else: relevant_topics = [x for x in topics if x['topic_type'] == topic_type] for topic in relevant_topics: this_topic = dict(url=topic['link'], id=topic['id'], name=topic['displayName'], score=topic['score']) topics_list.append(this_topic) sorted_topics_list = sorted(topics_list, key=lambda x: x['score']) return sorted_topics_list
java
public static <T, C extends Collection<T>> Pair<Integer, C> page(long start, long howMany, Iterable<T> iterable, C collection) { dbc.precondition(iterable != null, "cannot call page with a null iterable"); return Pagination.page(start, howMany, iterable.iterator(), collection); }
java
public void marshall(KeyValuePair keyValuePair, ProtocolMarshaller protocolMarshaller) { if (keyValuePair == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(keyValuePair.getName(), NAME_BINDING); protocolMarshaller.marshall(keyValuePair.getValue(), VALUE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def wiki(searchterm): """return the top wiki search result for the term""" searchterm = quote(searchterm) url = "https://en.wikipedia.org/w/api.php?action=query&list=search&srsearch={0}&format=json" url = url.format(searchterm) result = requests.get(url).json() pages = result["query"]["search"] # try to reject disambiguation pages pages = [p for p in pages if 'may refer to' not in p["snippet"]] if not pages: return "" page = quote(pages[0]["title"].encode("utf8")) link = "http://en.wikipedia.org/wiki/{0}".format(page) r = requests.get( "http://en.wikipedia.org/w/api.php?format=json&action=parse&page={0}". format(page)).json() soup = BeautifulSoup(r["parse"]["text"]["*"], "html5lib") p = soup.find('p').get_text() p = p[:8000] return u"{0}\n{1}".format(p, link)
java
static String createStatsType(Set<String> statsItems, String sortType, MtasFunctionParserFunction functionParser) { String statsType = STATS_BASIC; for (String statsItem : statsItems) { if (STATS_FULL_TYPES.contains(statsItem)) { statsType = STATS_FULL; break; } else if (STATS_ADVANCED_TYPES.contains(statsItem)) { statsType = STATS_ADVANCED; } else if (statsType != STATS_ADVANCED && STATS_BASIC_TYPES.contains(statsItem)) { statsType = STATS_BASIC; } else { Matcher m = fpStatsFunctionItems.matcher(statsItem.trim()); if (m.find()) { if (STATS_FUNCTIONS.contains(m.group(2).trim())) { statsType = STATS_FULL; break; } } } } if (sortType != null && STATS_TYPES.contains(sortType)) { if (STATS_FULL_TYPES.contains(sortType)) { statsType = STATS_FULL; } else if (STATS_ADVANCED_TYPES.contains(sortType)) { statsType = (statsType == null || statsType != STATS_FULL) ? STATS_ADVANCED : statsType; } } return statsType; }
java
public BundledSignaturesType createBundledSignatures() { final BundledSignaturesType s = new BundledSignaturesType(); s.setProject(getProject()); bundledSignatures.add(s); return s; }
java
protected void notifyInMainThread(final T value) { for (ValueChangedListener<T> listener : listeners) { listener.onChanged(value, Value.this); } }
python
def stylize(self): """Apply theme style attributes to this instance and its children. This also causes a relayout to occur so that any changes in padding or other stylistic attributes may be handled. """ # do children first in case parent needs to override their style for child in self.children: child.stylize() style = theme.current.get_dict(self) preserve_child = False try: preserve_child = getattr(theme.current, 'preserve_child') except: preserve_child = False for key, val in style.iteritems(): kvc.set_value_for_keypath(self, key, val, preserve_child) self.layout()
java
@Override int getIndexOf(final V value) { Validate.notNull(value, "Value required"); Validate.validState(getSize() > 0, "No data"); // search for value in current page final List<V> data = getData(); final int currentPageSize = data.size(); final K valueKey = getKeyOf(value); for (int localIndex = 0; localIndex < currentPageSize; localIndex++) { if (getKeyOf(data.get(localIndex)).equals(valueKey)) { return first + localIndex; } } return -1; }
java
@Override protected ILaunchConfiguration findLaunchConfiguration(IType type, ILaunchConfigurationType configType) { // Find an existing or create a launch configuration (Standard way) ILaunchConfiguration iConf = super.findLaunchConfiguration(type, configType); ILaunchConfigurationWorkingCopy iConfWC; try { /* * Tune the default launch configuration: setup run-time classpath * manually */ iConfWC = iConf.getWorkingCopy(); iConfWC.setAttribute( IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false); List<String> classPath = new ArrayList<String>(); IResource resource = type.getResource(); IJavaProject project = (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID); IRuntimeClasspathEntry cpEntry = JavaRuntime.newDefaultProjectClasspathEntry(project); classPath.add(0, cpEntry.getMemento()); iConfWC.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH, classPath); } catch (CoreException e) { e.printStackTrace(); // FIXME Error dialog return null; } /* * Update the selected configuration with a specific Hadoop location * target */ IResource resource = type.getResource(); if (!(resource instanceof IFile)) return null; RunOnHadoopWizard wizard = new RunOnHadoopWizard((IFile) resource, iConfWC); WizardDialog dialog = new WizardDialog(Display.getDefault().getActiveShell(), wizard); dialog.create(); dialog.setBlockOnOpen(true); if (dialog.open() != WizardDialog.OK) return null; try { iConfWC.doSave(); } catch (CoreException e) { e.printStackTrace(); // FIXME Error dialog return null; } return iConfWC; }
java
public com.amazonaws.services.simpledb.SimpleDBResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) { ResponseMetadata metadata = client.getResponseMetadataForRequest(request); if (metadata != null) return new com.amazonaws.services.simpledb.SimpleDBResponseMetadata(metadata); else return null; }
python
def _concat_same_dtype(self, to_concat, name): """ Concatenate to_concat which has the same class. """ attribs = self._get_attributes_dict() attribs['name'] = name # do not pass tz to set because tzlocal cannot be hashed if len({str(x.dtype) for x in to_concat}) != 1: raise ValueError('to_concat must have the same tz') new_data = type(self._values)._concat_same_type(to_concat).asi8 # GH 3232: If the concat result is evenly spaced, we can retain the # original frequency is_diff_evenly_spaced = len(unique_deltas(new_data)) == 1 if not is_period_dtype(self) and not is_diff_evenly_spaced: # reset freq attribs['freq'] = None return self._simple_new(new_data, **attribs)
java
void debugHull() { StringBuilder buf = new StringBuilder(hull.size() * 20); for(IntIntPair p : hull) { buf.append(p.first).append(" (").append(p.second).append(") "); } LOG.debugFinest(buf); }
java
private boolean fillLast() throws IOException { int count; if (lastFilled == pageSize) { throw new IllegalStateException(); } count = src.read(pages[lastNo], lastFilled, pageSize - lastFilled); if (count <= 0) { if (count == 0) { throw new IllegalStateException(); } return false; } lastFilled += count; return true; }
python
def compile_file(env, src_path, dst_path, encoding='utf-8', base_dir=''): """Compiles a Jinja2 template to python code. :param env: a Jinja2 Environment instance. :param src_path: path to the source file. :param dst_path: path to the destination file. :param encoding: template encoding. :param base_dir: the base path to be removed from the compiled template filename. """ src_file = file(src_path, 'r') source = src_file.read().decode(encoding) name = src_path.replace(base_dir, '') raw = env.compile(source, name=name, filename=name, raw=True) src_file.close() dst_file = open(dst_path, 'w') dst_file.write(raw) dst_file.close()
java
public BeanDeployer addClass(String className, AnnotatedTypeLoader loader) { addIfNotNull(loader.loadAnnotatedType(className, getManager().getId())); return this; }
java
public static ns_conf_upgrade_history[] get(nitro_service client) throws Exception { ns_conf_upgrade_history resource = new ns_conf_upgrade_history(); resource.validate("get"); return (ns_conf_upgrade_history[]) resource.get_resources(client); }
java
public HttpServerBuilder contentFrom(String contextRoot, String contentResource){ URL resource = resolver.resolve(contentResource,CallStack.getCallerClass()); resources.put(contextRoot, resource); return this; }
java
protected void addSummaryType(ProgramElementDoc member, Content tdSummaryType) { MethodDoc meth = (MethodDoc)member; addModifierAndType(meth, meth.returnType(), tdSummaryType); }
java
private void initialize() { this.setLayout(new CardLayout()); this.setName(Constant.messages.getString("conn.options.title")); this.add(getPanelProxyChain(), getPanelProxyChain().getName()); }
java
public boolean checkHeartBeat() { String dataPath = executorMeta.getLocalDir(); File localstate = new File(dataPath + "/data/" + startType + "/" + startType + ".heartbeat/"); Long modefyTime = localstate.lastModified(); if (System.currentTimeMillis() - modefyTime > JOYConstants.EXECUTOR_HEARTBEAT_TIMEOUT) { LOG.info("----------------------"); modefyTime = localstate.lastModified(); LOG.info(modefyTime.toString()); LOG.info(Long.toString(new Date().getTime())); LOG.info(dataPath + "/data/" + startType + "/" + startType + ".heartbeat/"); LOG.info("can't get heartbeat over " + String.valueOf(JOYConstants.EXECUTOR_HEARTBEAT_TIMEOUT) + " seconds"); return false; } else return true; }
python
def value_for_keypath(dict, keypath): """ Returns the value of a keypath in a dictionary if the keypath exists or None if the keypath does not exist. """ if len(keypath) == 0: return dict keys = keypath.split('.') value = dict for key in keys: if key in value: value = value[key] else: return None return value
python
def get_serializer_info(self, serializer): """ Given an instance of a serializer, return a dictionary of metadata about its fields. """ if hasattr(serializer, 'child'): # If this is a `ListSerializer` then we want to examine the # underlying child serializer instance instead. serializer = serializer.child # Remove the URL field if present serializer.fields.pop(api_settings.URL_FIELD_NAME, None) return OrderedDict([ (field_name, self.get_field_info(field)) for field_name, field in serializer.fields.items() ])
python
def get(self, oid): """Use PySNMP to perform an SNMP GET operation on a single object. :param oid: The OID of the object to get. :raises: SNMPFailure if an SNMP request fails. :returns: The value of the requested object. """ try: results = self.cmd_gen.getCmd(self._get_auth(), self._get_transport(), oid) except snmp_error.PySnmpError as e: raise SNMPFailure(SNMP_FAILURE_MSG % ("GET", e)) error_indication, error_status, error_index, var_binds = results if error_indication: # SNMP engine-level error. raise SNMPFailure(SNMP_FAILURE_MSG % ("GET", error_indication)) if error_status: # SNMP PDU error. raise SNMPFailure( "SNMP operation '%(operation)s' failed: %(error)s at" " %(index)s" % {'operation': "GET", 'error': error_status.prettyPrint(), 'index': error_index and var_binds[int(error_index) - 1] or '?'}) # We only expect a single value back name, val = var_binds[0] return val
python
def get_tier_from_participants(participantsIdentities, minimum_tier=Tier.bronze, queue=Queue.RANKED_SOLO_5x5): """ Returns the tier of the lowest tier and the participantsIDs divided by tier player in the match :param participantsIdentities: the match participants :param minimum_tier: the minimum tier that a participant must be in order to be added :param queue: the queue over which the tier of the player is considered :return: the tier of the lowest tier player in the match """ leagues = leagues_by_summoner_ids([p.player.summonerId for p in participantsIdentities], queue) match_tier = max(leagues.keys(), key=operator.attrgetter('value')) return match_tier, {league: ids for league, ids in leagues.items() if league.is_better_or_equal(minimum_tier)}
java
public static void Forward(double[][] data) { double[][] result = new double[data.length][data[0].length]; for (int m = 0; m < data.length; m++) { for (int n = 0; n < data[0].length; n++) { double sum = 0; for (int i = 0; i < result.length; i++) { for (int k = 0; k < data.length; k++) { sum += data[i][k] * cas(((2.0 * Math.PI) / data.length) * (i * m + k * n)); } result[m][n] = (1.0 / data.length) * sum; } } } for (int i = 0; i < data.length; i++) { for (int j = 0; j < data[0].length; j++) { data[i][j] = result[i][j]; } } }
python
def start(self, interval_s): """Starts executing the method at the specified interval. Args: interval_s: The amount of time between executions of the method. Returns: False if the interval was already running. """ if self.running: return False self.stopped.clear() def _execute(): # Always execute immediately once if not self.method() and self.stop_if_false: return while not self.stopped.wait(interval_s): if not self.method() and self.stop_if_false: return self.thread = threading.Thread(target=_execute) self.thread.daemon = True self.thread.start() return True
python
def profile_args(_args): """Return args for v1, v2, or v3 structure. Args: _args (dict): The args section from the profile. Returns: dict: A collapsed version of the args dict. """ # TODO: clean this up in a way that works for both py2/3 if ( _args.get('app', {}).get('optional') is not None or _args.get('app', {}).get('required') is not None ): # detect v3 schema app_args_optional = _args.get('app', {}).get('optional', {}) app_args_required = _args.get('app', {}).get('required', {}) default_args = _args.get('default', {}) _args = {} _args.update(app_args_optional) _args.update(app_args_required) _args.update(default_args) elif _args.get('app') is not None and _args.get('default') is not None: # detect v2 schema app_args = _args.get('app', {}) default_args = _args.get('default', {}) _args = {} _args.update(app_args) _args.update(default_args) return _args
python
def main(): """ Start the DQL client. """ parse = argparse.ArgumentParser(description=main.__doc__) parse.add_argument("-c", "--command", help="Run this command and exit") region = os.environ.get("AWS_REGION", "us-west-1") parse.add_argument( "-r", "--region", default=region, help="AWS region to connect to (default %(default)s)", ) parse.add_argument( "-H", "--host", default=None, help="Host to connect to if using a local instance " "(default %(default)s)", ) parse.add_argument( "-p", "--port", default=8000, type=int, help="Port to connect to " "(default %(default)d)", ) parse.add_argument( "--version", action="store_true", help="Print the version and exit" ) args = parse.parse_args() if args.version: print(__version__) return logging.config.dictConfig(LOG_CONFIG) cli = DQLClient() cli.initialize(region=args.region, host=args.host, port=args.port) if args.command: command = args.command.strip() try: cli.run_command(command) if cli.engine.partial: cli.run_command(";") except KeyboardInterrupt: pass else: cli.start()
java
public History withEvents(HistoryEvent... events) { if (this.events == null) { setEvents(new java.util.ArrayList<HistoryEvent>(events.length)); } for (HistoryEvent ele : events) { this.events.add(ele); } return this; }
java
private Object[] convert(FieldConversion[] fcs, Object[] values) { Object[] convertedValues = new Object[values.length]; for (int i= 0; i < values.length; i++) { convertedValues[i] = fcs[i].sqlToJava(values[i]); } return convertedValues; }
java
public static void cursorDoubleToContentValues(Cursor cursor, String field, ContentValues values, String key) { int colIndex = cursor.getColumnIndex(field); if (!cursor.isNull(colIndex)) { values.put(key, cursor.getDouble(colIndex)); } else { values.put(key, (Double) null); } }
python
def stdrepr_object(self, title, elements, *, cls=None, short=False, quote_string_keys=False, delimiter=None): """ Helper function to represent objects. Arguments: title: A title string displayed above the box containing the elements, or a pair of two strings that will be displayed left and right (e.g. a pair of brackets). elements: A list of (key, value) pairs, which will be displayed in a table in the order given. cls: A class to give to the result. short: Whether to use short or long form. Short form displays the elements as ``k=v``, appended horizontally. The alternative is a table, with associations stacked vertically. quote_string_keys: If True, string keys will be displayed with quotes around them. Default is False. delimiter: The character to use to separate key and value. By default '↦' if quote_string_keys is True. """ H = self.H if delimiter is None and quote_string_keys is True: delimiter = ' ↦ ' def wrap(x): if not quote_string_keys and isinstance(x, str): return x else: return self(x) if short: contents = [] for k, v in elements: kv = H.div['hrepr-object-kvpair']( wrap(k), delimiter or '', self(v) ) contents.append(kv) else: t = H.table()['hrepr-object-table'] for k, v in elements: tr = H.tr(H.td(wrap(k))) if delimiter is not None: tr = tr(H.td['hrepr-delimiter'](delimiter)) tr = tr(H.td(self(v))) # t = t(H.tr(H.td(wrap(k)), H.td(self(v)))) t = t(tr) contents = [t] title_brackets = isinstance(title, tuple) and len(title) == 2 horizontal = short or title_brackets rval = self.titled_box(title, contents, 'h' if title_brackets else 'v', 'h' if short else 'v') if cls: rval = rval[cls] return rval
java
public boolean isBlank() { boolean rslt = true; // default if (value != null) { for (String v : value) { if (StringUtils.isNotBlank(v)) { rslt = false; break; } } } return rslt; }
python
def train_epoch(self, epoch_info: EpochInfo, interactive=True): """ Train model on an epoch of a fixed number of batch updates """ epoch_info.on_epoch_begin() if interactive: iterator = tqdm.trange(epoch_info.batches_per_epoch, file=sys.stdout, desc="Training", unit="batch") else: iterator = range(epoch_info.batches_per_epoch) for batch_idx in iterator: batch_info = BatchInfo(epoch_info, batch_idx) batch_info.on_batch_begin() self.train_batch(batch_info) batch_info.on_batch_end() epoch_info.result_accumulator.freeze_results() epoch_info.on_epoch_end()
python
def get_lowest_numeric_score(self): """Gets the lowest number in a numeric grading system. return: (decimal) - the lowest number raise: IllegalState - ``is_based_on_grades()`` is ``true`` *compliance: mandatory -- This method must be implemented.* """ if self.is_based_on_grades(): raise errors.IllegalState('This GradeSystem is based on grades') if self._my_map['lowestNumericScore'] is None: return None else: return Decimal(str(self._my_map['lowestNumericScore']))
python
def _read_mode_pocsp(self, size, kind): """Read Partial Order Connection Service Profile option. Positional arguments: * size - int, length of option * kind - int, 10 (POC-Serv Profile) Returns: * dict -- extracted Partial Order Connection Service Profile (POC-SP) option Structure of TCP POC-SP Option [RFC 1693][RFC 6247]: 1 bit 1 bit 6 bits +----------+----------+------------+----------+--------+ | Kind=10 | Length=3 | Start_flag | End_flag | Filler | +----------+----------+------------+----------+--------+ Octets Bits Name Description 0 0 tcp.pocsp.kind Kind (10) 1 8 tcp.pocsp.length Length (3) 2 16 tcp.pocsp.start Start Flag 2 17 tcp.pocsp.end End Flag 2 18 tcp.pocsp.filler Filler """ temp = self._read_binary(size) data = dict( kind=kind, length=size, start=True if int(temp[0]) else False, end=True if int(temp[1]) else False, filler=bytes(chr(int(temp[2:], base=2)), encoding='utf-8'), ) return data
python
def nextMode(self): """ Put the search-replace macro into the next stage. The first stage is the query stage to ask the user for the string to replace, the second stage is to query the string to replace it with, and the third allows to replace or skip individual matches, or replace them all automatically. """ # Terminate the replacement procedure if the no match was found. if len(self.matchList) == 0: self.qteAbort(QtmacsMessage()) self.qteMain.qteKillMiniApplet() return self.queryMode += 1 if self.queryMode == 1: # Disconnect the text-changed slot because no real time # highlighting is necessary when entering the replacement # string, unlike when entering the string to replace. self.qteText.textChanged.disconnect(self.qteTextChanged) # Store the string to replace and clear out the query field. self.toReplace = self.qteText.toPlainText() self.qteText.clear() self.qteTextPrefix.setText('mode 1') elif self.queryMode == 2: # Mode two is to replace or skip individual matches. For # this purpose rebind the "n", "!", and <space> keys # with the respective macros to facilitate it. register = self.qteMain.qteRegisterMacro bind = self.qteMain.qteBindKeyWidget # Unbind all keys from the input widget. self.qteMain.qteUnbindAllFromWidgetObject(self.qteText) macroName = register(self.ReplaceAll, replaceMacro=True) bind('!', macroName, self.qteText) macroName = register(self.ReplaceNext, replaceMacro=True) bind('<space>', macroName, self.qteText) macroName = register(self.SkipNext, replaceMacro=True) bind('n', macroName, self.qteText) macroName = register(self.Abort, replaceMacro=True) bind('q', macroName, self.qteText) bind('<enter>', macroName, self.qteText) self.toReplaceWith = self.qteText.toPlainText() self.qteTextPrefix.setText('mode 2') self.qteText.setText('<space> to replace, <n> to skip, ' '<!> to replace all.') else: self.qteAbort(QtmacsMessage()) self.qteMain.qteKillMiniApplet()
java
public void cleanupStoredBeanManagerOnShutdown(@Observes BeforeShutdown beforeShutdown) { if (bmpSingleton == null) { // this happens if there has been a failure at startup return; } ClassLoader classLoader = getClassLoader(null); bmpSingleton.bmInfos.remove(classLoader); //X TODO this might not be enough as there might be //X ClassLoaders used during Weld startup which are not the TCCL... }
python
def list_management_certificates(kwargs=None, conn=None, call=None): ''' .. versionadded:: 2015.8.0 List management certificates associated with the subscription CLI Example: .. code-block:: bash salt-cloud -f list_management_certificates my-azure name=my_management ''' if call != 'function': raise SaltCloudSystemExit( 'The list_management_certificates function must be called with -f or --function.' ) if not conn: conn = get_conn() data = conn.list_management_certificates() ret = {} for item in data.subscription_certificates: ret[item.subscription_certificate_thumbprint] = object_to_dict(item) return ret
java
@Check(CheckType.NORMAL) public void checkUnusedCapacities(SarlCapacityUses uses) { if (!isIgnored(UNUSED_AGENT_CAPACITY)) { final XtendTypeDeclaration container = uses.getDeclaringType(); final JvmDeclaredType jvmContainer = (JvmDeclaredType) this.associations.getPrimaryJvmElement(container); final Map<String, JvmOperation> importedFeatures = CollectionLiterals.newHashMap(); for (final JvmOperation operation : jvmContainer.getDeclaredOperations()) { if (Utils.isNameForHiddenCapacityImplementationCallingMethod(operation.getSimpleName())) { importedFeatures.put(operation.getSimpleName(), operation); } } final boolean isSkill = container instanceof SarlSkill; int index = 0; for (final JvmTypeReference capacity : uses.getCapacities()) { final LightweightTypeReference lreference = toLightweightTypeReference(capacity); if (isSkill && lreference.isAssignableFrom(jvmContainer)) { addIssue(MessageFormat.format( Messages.SARLValidator_22, capacity.getSimpleName()), uses, SARL_CAPACITY_USES__CAPACITIES, index, UNUSED_AGENT_CAPACITY, capacity.getSimpleName()); } else { final String fieldName = Utils.createNameForHiddenCapacityImplementationAttribute(capacity.getIdentifier()); final String operationName = Utils.createNameForHiddenCapacityImplementationCallingMethodFromFieldName(fieldName); final JvmOperation operation = importedFeatures.get(operationName); if (operation != null && !isLocallyUsed(operation, container)) { addIssue(MessageFormat.format( Messages.SARLValidator_78, capacity.getSimpleName()), uses, SARL_CAPACITY_USES__CAPACITIES, index, UNUSED_AGENT_CAPACITY, capacity.getSimpleName()); } } ++index; } } }