language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def _get_args(args): """Argparse logic lives here. returns: parsed arguments. """ parser = argparse.ArgumentParser( description='A tool to extract features into a simple format.', formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument('--no-cache', action='store_true') parser.add_argument('--deploy', action='store_true') parser.add_argument('--cache-path', type=str, default='fex-cache.pckl', help='Path for cache file') parser.add_argument('--path', type=str, default='features.csv', help='Path to write the dataset to') args = parser.parse_args(args) if args.no_cache: args.cache_path = None return args
java
@Override public Response deleteApplication( String applicationName ) { this.logger.fine( "Request: delete application " + applicationName + "." ); Response result = Response.ok().build(); try { ManagedApplication ma = this.manager.applicationMngr().findManagedApplicationByName( applicationName ); if( ma == null ) result = handleError( Status.NOT_FOUND, new RestError( REST_INEXISTING, application( applicationName )), lang( this.manager )).build(); else this.manager.applicationMngr().deleteApplication( ma ); } catch( UnauthorizedActionException | IOException e ) { result = handleError( Status.FORBIDDEN, new RestError( REST_DELETION_ERROR, e, application( applicationName )), lang( this.manager )).build(); } return result; }
python
def save(self, *args, **kwargs): """ **uid**: :code:`cycle:{year}` """ self.slug = slugify(self.name) self.uid = 'cycle:{}'.format(self.slug) super(ElectionCycle, self).save(*args, **kwargs)
python
def notify(self, msg, color='green', notify='true', message_format='text'): """Send notification to specified HipChat room""" self.message_dict = { 'message': msg, 'color': color, 'notify': notify, 'message_format': message_format, } if not self.debug: return requests.post( self.notification_url, json.dumps(self.message_dict), headers=self.headers ) else: print('HipChat message: <{}>'.format(msg)) return []
java
public boolean isSet(String propName) { if (propName.equals("uniqueId")) { return isSetUniqueId(); } if (propName.equals("uniqueName")) { return isSetUniqueName(); } if (propName.equals("externalId")) { return isSetExternalId(); } if (propName.equals("externalName")) { return isSetExternalName(); } if (propName.equals("repositoryId")) { return isSetRepositoryId(); } return false; }
python
def try_fields(cls, *names) -> t.Optional[t.Any]: """Return first existing of given class field names.""" for name in names: if hasattr(cls, name): return getattr(cls, name) raise AttributeError((cls, names))
java
public FluentMatchingR<T, R> get(Supplier<R> supplier) { return get(fluentMatchingR, supplier); }
python
def val_to_fp(self, sort, signed=True, rm=None): """ Interpret this bitvector as an integer, and return the floating-point representation of that integer. :param sort: The sort of floating point value to return :param signed: Optional: whether this value is a signed integer :param rm: Optional: the rounding mode to use :return: An FP AST whose value is the same as this BV """ if rm is None: rm = fp.fp.RM.default() if sort is None: sort = fp.fp.FSort.from_size(self.length) op = fp.fpToFP if signed else fp.fpToFPUnsigned return op(rm, self, sort)
python
def format_checksum(checksum_pyxb): """Create string representation of a PyXB Checksum object. Args: PyXB Checksum object Returns: str : Combined hexadecimal value and algorithm name. """ return '{}/{}'.format( checksum_pyxb.algorithm.upper().replace('-', ''), checksum_pyxb.value().lower() )
python
def getScriptNames(self): """This function returns the list of layouts for the current db.""" if self._db == '': raise FMError, 'No database was selected' request = [] request.append(uu({'-db': self._db })) request.append(uu({'-scriptnames': '' })) result = self._doRequest(request) result = FMResultset.FMResultset(result) scriptNames = [] for scriptName in result.resultset: scriptNames.append(string.lower(scriptName['SCRIPT_NAME'])) return scriptNames
python
def mark_topic_read(self, topic, user): """ Marks a topic as read. """ if not user.is_authenticated: return forum = topic.forum try: forum_track = ForumReadTrack.objects.get(forum=forum, user=user) except ForumReadTrack.DoesNotExist: forum_track = None if ( forum_track is None or (topic.last_post_on and forum_track.mark_time < topic.last_post_on) ): topic_track, created = TopicReadTrack.objects.get_or_create(topic=topic, user=user) if not created: topic_track.save() # mark_time filled # If no other topic is unread inside the considered forum, the latter should also be # marked as read. unread_topics = ( forum.topics .filter( Q(tracks__user=user, tracks__mark_time__lt=F('last_post_on')) | Q( forum__tracks__user=user, forum__tracks__mark_time__lt=F('last_post_on'), tracks__isnull=True, ) ) .exclude(id=topic.id) ) forum_topic_tracks = TopicReadTrack.objects.filter(topic__forum=forum, user=user) if ( not unread_topics.exists() and ( forum_track is not None or forum_topic_tracks.count() == forum.topics.filter(approved=True).count() ) ): # The topics that are marked as read inside the forum for the given user will be # deleted while the forum track associated with the user must be created or updated. # This is done only if there are as many topic tracks as approved topics in case # the related forum has not beem previously marked as read. TopicReadTrack.objects.filter(topic__forum=forum, user=user).delete() forum_track, _ = ForumReadTrack.objects.get_or_create(forum=forum, user=user) forum_track.save() # Update parent forum tracks self._update_parent_forum_tracks(forum, user)
python
def create(self, name, description=None, units=None, agg_method="priority_fill", overwrite=False): """ Create, or get if exists, a Symbol. Parameters ---------- name : str A symbol's name is a primary key, used across the Trump ORM. description : str, optional An arbitrary string, used to store user information related to the symbol. units : str, optional This is a string used to denote the units of the final data Series. agg_method : str, optional The aggregation method, used to calculate the final feed. Defaults to priority_fill. overwrite : bool, optional Set to True, to force deletion an existing symbol. defaults to False. Returns ------- Symbol """ sym = self.try_to_get(name) if sym is not None: if overwrite: print "Deleting {}".format(sym.name) self.ses.delete(sym) self.ses.commit() else: msg = 'Symbol {} already exists.\n' + \ 'Consider setting overwrite to True.' msg = msg.format(name) raise Exception(msg) sym = Symbol(name, description, units, agg_method) self.ses.add(sym) print "Creating {}".format(sym.name) sym.add_alias(name) sym.handle = SymbolHandle(sym=sym) self.ses.commit() return sym
python
def create_filters(predicate_params, predicate_factory): """Create filter functions from a list of string parameters. :param predicate_params: A list of predicate_param arguments as in `create_filter`. :param predicate_factory: As in `create_filter`. """ filters = [] for predicate_param in predicate_params: filters.append(create_filter(predicate_param, predicate_factory)) return filters
python
def append(self, value): """Add an item to the end of the list.""" return super(Collection, self).append( self._ensure_value_is_valid(value))
python
def serialise(self, default_endianness=None): """ Serialise a message, without including any framing. :param default_endianness: The default endianness, unless overridden by the fields or class metadata. Should usually be left at ``None``. Otherwise, use ``'<'`` for little endian and ``'>'`` for big endian. :type default_endianness: str :return: The serialised message. :rtype: bytes """ # Figure out an endianness. endianness = (default_endianness or DEFAULT_ENDIANNESS) if hasattr(self, '_Meta'): endianness = self._Meta.get('endianness', endianness) inferred_fields = set() for k, v in iteritems(self._type_mapping): inferred_fields |= {x._name for x in v.dependent_fields()} for field in inferred_fields: setattr(self, field, None) # Some fields want to manipulate other fields that appear before them (e.g. Unions) for k, v in iteritems(self._type_mapping): v.prepare(self, getattr(self, k)) message = b'' for k, v in iteritems(self._type_mapping): message += v.value_to_bytes(self, getattr(self, k), default_endianness=endianness) return message
python
def ceil(self): """Round `x` and `y` up to integers.""" return Point(int(math.ceil(self.x)), int(math.ceil(self.y)))
python
def prepare_create_transaction(*, signers, recipients=None, asset=None, metadata=None): """Prepares a ``"CREATE"`` transaction payload, ready to be fulfilled. Args: signers (:obj:`list` | :obj:`tuple` | :obj:`str`): One or more public keys representing the issuer(s) of the asset being created. recipients (:obj:`list` | :obj:`tuple` | :obj:`str`, optional): One or more public keys representing the new recipients(s) of the asset being created. Defaults to ``None``. asset (:obj:`dict`, optional): The asset to be created. Defaults to ``None``. metadata (:obj:`dict`, optional): Metadata associated with the transaction. Defaults to ``None``. Returns: dict: The prepared ``"CREATE"`` transaction. .. important:: * If ``asset`` is set, it MUST be in the form of:: { 'data': { ... } } * If ``recipients`` is not given, or evaluates to ``False``, it will be set equal to ``signers``:: if not recipients: recipients = signers """ if not isinstance(signers, (list, tuple)): signers = [signers] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(signers, tuple): signers = list(signers) if not recipients: recipients = [(signers, 1)] elif not isinstance(recipients, (list, tuple)): recipients = [([recipients], 1)] # NOTE: Needed for the time being. See # https://github.com/bigchaindb/bigchaindb/issues/797 elif isinstance(recipients, tuple): recipients = [(list(recipients), 1)] transaction = Transaction.create( signers, recipients, metadata=metadata, asset=asset['data'] if asset else None, ) return transaction.to_dict()
python
def secret_absent(name, namespace='default', **kwargs): ''' Ensures that the named secret is absent from the given namespace. name The name of the secret namespace The name of the namespace ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} secret = __salt__['kubernetes.show_secret'](name, namespace, **kwargs) if secret is None: ret['result'] = True if not __opts__['test'] else None ret['comment'] = 'The secret does not exist' return ret if __opts__['test']: ret['comment'] = 'The secret is going to be deleted' ret['result'] = None return ret __salt__['kubernetes.delete_secret'](name, namespace, **kwargs) # As for kubernetes 1.6.4 doesn't set a code when deleting a secret # The kubernetes module will raise an exception if the kubernetes # server will return an error ret['result'] = True ret['changes'] = { 'kubernetes.secret': { 'new': 'absent', 'old': 'present'}} ret['comment'] = 'Secret deleted' return ret
java
private void processPredecessors() throws SQLException { List<Row> rows = getRows("select * from link where projid=? order by linkid", m_projectID); List<Row> completedSections = getRows("select * from task_completed_section where projid=?", m_projectID); m_reader.processPredecessors(rows, completedSections); }
python
def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: state, buffer_paths, buffers = _remove_buffers(self.get_state()) args = dict(target_name='jupyter.widget', data={'state': state, 'buffer_paths': buffer_paths}, buffers=buffers, metadata={'version': __protocol_version__} ) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args)
java
public static String calcMoisture33Kpa(String slsnd, String slcly, String omPct) { String ret; if ((slsnd = checkPctVal(slsnd)) == null || (slcly = checkPctVal(slcly)) == null || (omPct = checkPctVal(omPct)) == null) { LOG.error("Invalid input parameters for calculating 33 kPa moisture, normal density, %v"); return null; } String mt33Fst = calcMoisture33KpaFst(slsnd, slcly, omPct); ret = sum(product(pow(mt33Fst, "2"), "0.01283"), product(mt33Fst, "0.626"), "-1.5"); LOG.debug("Calculate result for 33 kPa moisture, normal density, %v is {}", ret); return ret; }
python
def get_templates_per_page(self, per_page=1000, page=1, params=None): """ Get templates per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param params: Search parameters. Default: {} :return: list """ return self._get_resource_per_page(resource=TEMPLATES, per_page=per_page, page=page, params=params)
java
public MessageDialog build() { MessageDialog messageDialog = new MessageDialog( title, text, buttons.toArray(new MessageDialogButton[buttons.size()])); messageDialog.setHints(extraWindowHints); return messageDialog; }
python
def replace(self, main_type=None, sub_type=None, params=None): """ Return a new MimeType with new values for the specified fields. :param str main_type: The new main type. :param str sub_type: The new sub type. :param dict params: The new parameters. :return: A new instance of MimeType """ if main_type is None: main_type = self.main_type if sub_type is None: sub_type = self.sub_type if params is None: params = self.params return MimeType(main_type, sub_type, params)
python
def padded_variance_explained(predictions, labels, weights_fn=common_layers.weights_all): """Explained variance, also known as R^2.""" predictions, labels = common_layers.pad_with_zeros(predictions, labels) targets = labels weights = weights_fn(targets) y_bar = tf.reduce_mean(weights * targets) tot_ss = tf.reduce_sum(weights * tf.pow(targets - y_bar, 2)) res_ss = tf.reduce_sum(weights * tf.pow(targets - predictions, 2)) r2 = 1. - res_ss / tot_ss return r2, tf.reduce_sum(weights)
java
public static void handleReflectionException(Exception ex) { if (ex instanceof NoSuchMethodException) { throw new IllegalStateException("Method not found: " + ex.getMessage()); } if (ex instanceof IllegalAccessException) { throw new IllegalStateException("Could not access method: " + ex.getMessage()); } if (ex instanceof InvocationTargetException) { handleInvocationTargetException((InvocationTargetException) ex); } if (ex instanceof RuntimeException) { throw (RuntimeException) ex; } throw new UndeclaredThrowableException(ex); }
python
def const(f): ''' const(f) yields f.c if f is a constant potential function; if f is a constant, it yields f or the equivalent numpy array object; if f is a potential function that is not const, or is not a valid potential function constant, yields None. ''' if is_const_potential(f): return f.c elif pimms.is_array(f, 'number'): if sps.issparse(f): return f else: return np.asarray(f) else: return None
java
@MemberOrder(sequence = "40") public StringInterpolatorDemoToDoItem newToDo( @ParameterLayout(named = "Description") @Parameter(regexPattern = "\\w[@&:\\-\\,\\.\\+ \\w]*") final String description, @ParameterLayout(named = "Documentation page") final String documentationPage) { final StringInterpolatorDemoToDoItem toDoItem = container.newTransientInstance(StringInterpolatorDemoToDoItem.class); toDoItem.setDescription(description); toDoItem.setDocumentationPage(documentationPage); container.persist(toDoItem); container.flush(); return toDoItem; }
python
def insert(self, index, value): """ Inserts @value before @index in the list. @index: list index to insert @value before @value: item to insert @where: whether to insert BEFORE|AFTER @refvalue -> #int new length of the list on success or -1 if refvalue is not in the list. """ _uuid = gen_rand_str(24, 32) item_at_index = self[index] self[index] = _uuid uuid = _uuid _uuid = self._dumps(uuid) pipe = self._client.pipeline(transaction=True) # Needs to be atomic pipe.linsert( self.key_prefix, "BEFORE", _uuid, self._dumps(value)) pipe.linsert( self.key_prefix, "BEFORE", _uuid, item_at_index) results = pipe.execute() self.remove(uuid) return results[0]
java
public void generateDummyCoding() throws NumberFormatException, DDFException { DummyCoding dc = new DummyCoding(); // initialize array xCols which is just 0, 1, 2 .. dc.xCols = new int[this.getColumns().size()]; int i = 0; while (i < dc.xCols.length) { dc.xCols[i] = i; i += 1; } List<Column> columns = this.getColumns(); Iterator<Column> it = columns.iterator(); int count = 0; while (it.hasNext()) { Column currentColumn = it.next(); int currentColumnIndex = this.getColumnIndex(currentColumn.getName()); HashMap<String, java.lang.Double> temp = new HashMap<String, java.lang.Double>(); // loop if (currentColumn.getColumnClass() == ColumnClass.FACTOR) { //set as factor //recompute level List<String> levels = new ArrayList(currentColumn.getOptionalFactor().getLevels()); currentColumn.getOptionalFactor().setLevels(levels, true); Map<String, Integer> currentColumnFactor = currentColumn.getOptionalFactor().getLevelMap(); Iterator<String> iterator = currentColumnFactor.keySet() .iterator(); //TODO update this code i = 0; temp = new HashMap<String, java.lang.Double>(); while (iterator.hasNext()) { String columnValue = iterator.next(); temp.put(columnValue, Double.parseDouble(i + "")); i += 1; } dc.getMapping().put(currentColumnIndex, temp); count += temp.size() - 1; } } dc.setNumDummyCoding(count); // TODO hardcode remove this // HashMap<String, Double> temp2 = new HashMap<String, Double>(); // temp2.put("IAD", 1.0); // temp2.put("IND", 2.0); // temp2.put("ISP", 3.0); // dc.getMapping().put(1, temp2); // dc.setNumDummyCoding(2); // ignore Y column Integer _features = this.getNumColumns() - 1; // plus bias term for linear model _features += 1; // plus the new dummy coding columns _features += dc.getNumDummyCoding(); //dc.getMapping().size() means number of factor column _features -= (!dc.getMapping().isEmpty()) ? dc.getMapping().size() : 0; dc.setNumberFeatures(_features); // set number of features in schema this.setDummyCoding(dc); }
java
public static final String getString(int index) { StringBuffer buf = new StringBuffer(); // lower than 0 ? Add minus if (index < 0) { buf.append('-'); index = -index; } // greater than 3000 if (index > 3000) { buf.append('|'); buf.append(getString(index / 1000)); buf.append('|'); // remainder index = index - (index / 1000) * 1000; } // number between 1 and 3000 int pos = 0; while (true) { // loop over the array with values for m-d-c-l-x-v-i RomanDigit dig = roman[pos]; // adding as many digits as we can while (index >= dig.value) { buf.append(dig.digit); index -= dig.value; } // we have the complete number if (index <= 0) { break; } // look for the next digit that can be used in a special way int j = pos; while (!roman[++j].pre); // does the special notation apply? if (index + roman[j].value >= dig.value) { buf.append(roman[j].digit).append(dig.digit); index -= dig.value - roman[j].value; } pos++; } return buf.toString(); }
python
def get_activity(self, id_num): """Return the activity with the given id. Note that this contains more detailed information than returned by `get_activities`. """ url = self._build_url('my', 'activities', id_num) return self._json(url)
java
@Benchmark public Object blockingUnary() throws Exception { return ClientCalls.blockingUnaryCall( channels[0].newCall(unaryMethod, CallOptions.DEFAULT), Unpooled.EMPTY_BUFFER); }
python
def tai_timestamp(): """Return current TAI timestamp.""" timestamp = time.time() date = datetime.utcfromtimestamp(timestamp) if date.year < 1972: return timestamp offset = 10 + timestamp leap_seconds = [ (1972, 1, 1), (1972, 7, 1), (1973, 1, 1), (1974, 1, 1), (1975, 1, 1), (1976, 1, 1), (1977, 1, 1), (1978, 1, 1), (1979, 1, 1), (1980, 1, 1), (1981, 7, 1), (1982, 7, 1), (1983, 7, 1), (1985, 7, 1), (1988, 1, 1), (1990, 1, 1), (1991, 1, 1), (1992, 7, 1), (1993, 7, 1), (1994, 7, 1), (1996, 1, 1), (1997, 7, 1), (1999, 1, 1), (2006, 1, 1), (2009, 1, 1), (2012, 7, 1), (2015, 7, 1), (2017, 1, 1), ] for idx, leap_date in enumerate(leap_seconds): if leap_date >= (date.year, date.month, date.day): return idx - 1 + offset return len(leap_seconds) - 1 + offset
python
def APFSContainerPathSpecGetVolumeIndex(path_spec): """Retrieves the volume index from the path specification. Args: path_spec (PathSpec): path specification. Returns: int: volume index or None if the index cannot be determined. """ volume_index = getattr(path_spec, 'volume_index', None) if volume_index is not None: return volume_index location = getattr(path_spec, 'location', None) if location is None or not location.startswith('/apfs'): return None try: volume_index = int(location[5:], 10) - 1 except (TypeError, ValueError): volume_index = None if volume_index is None or volume_index < 0 or volume_index > 99: volume_index = None return volume_index
python
def _python3_record_factory(*args, **kwargs): """Python 3 approach to custom logging, using `logging.getLogRecord(...)` Inspireb by: https://docs.python.org/3/howto/logging-cookbook.html#customizing-logrecord :return: A log record augmented with the values required by LOG_FORMAT, as per `_update_record(...)` """ record = _python_record_factory(*args, **kwargs) _update_record(record) return record
python
def as_svg_data_uri(matrix, version, scale=1, border=None, color='#000', background=None, xmldecl=False, svgns=True, title=None, desc=None, svgid=None, svgclass='segno', lineclass='qrline', omitsize=False, unit='', encoding='utf-8', svgversion=None, nl=False, encode_minimal=False, omit_charset=False): """\ Converts the matrix to a SVG data URI. The XML declaration is omitted by default (set ``xmldecl`` to ``True`` to enable it), further the newline is omitted by default (set ``nl`` to ``True`` to enable it). Aside from the missing ``out`` parameter and the different ``xmldecl`` and ``nl`` default values and the additional parameter ``encode_minimal`` and ``omit_charset`` this function uses the same parameters as the usual SVG serializer. :param bool encode_minimal: Indicates if the resulting data URI should use minimal percent encoding (disabled by default). :param bool omit_charset: Indicates if the ``;charset=...`` should be omitted (disabled by default) :rtype: str """ encode = partial(quote, safe=b"") if not encode_minimal else partial(quote, safe=b" :/='") buff = io.BytesIO() write_svg(matrix, version, buff, scale=scale, color=color, background=background, border=border, xmldecl=xmldecl, svgns=svgns, title=title, desc=desc, svgclass=svgclass, lineclass=lineclass, omitsize=omitsize, encoding=encoding, svgid=svgid, unit=unit, svgversion=svgversion, nl=nl) return 'data:image/svg+xml{0},{1}' \ .format(';charset=' + encoding if not omit_charset else '', # Replace " quotes with ' and URL encode the result # See also https://codepen.io/tigt/post/optimizing-svgs-in-data-uris encode(_replace_quotes(buff.getvalue())))
python
def _are_nearby_parallel_boxes(self, b1, b2): "Are two boxes nearby, parallel, and similar in width?" if not self._are_aligned_angles(b1.angle, b2.angle): return False # Otherwise pick the smaller angle and see whether the two boxes are close according to the "up" direction wrt that angle angle = min(b1.angle, b2.angle) return abs(np.dot(b1.center - b2.center, [-np.sin(angle), np.cos(angle)])) < self.lineskip_tol * ( b1.height + b2.height) and (b1.width > 0) and (b2.width > 0) and (0.5 < b1.width / b2.width < 2.0)
java
@Override public void addSecurityConstraintMappings( final SecurityConstraintMappingModel model) { final ServletContextHandler context = server.getOrCreateContext(model); final SecurityHandler securityHandler = context.getSecurityHandler(); if (securityHandler == null) { throw new IllegalStateException( "Internal error: Cannot find the security handler. Please report."); } String mappingMethod = model.getMapping(); String constraintName = model.getConstraintName(); String url = model.getUrl(); String dataConstraint = model.getDataConstraint(); List<String> roles = model.getRoles(); boolean authentication = model.isAuthentication(); ConstraintMapping newConstraintMapping = new ConstraintMapping(); newConstraintMapping.setMethod(mappingMethod); newConstraintMapping.setPathSpec(url); Constraint constraint = new Constraint(); constraint.setAuthenticate(authentication); constraint.setName(constraintName); constraint.setRoles(roles.toArray(new String[roles.size()])); if (dataConstraint == null || "NONE".equals(dataConstraint)) { constraint.setDataConstraint(Constraint.DC_NONE); } else if ("INTEGRAL".equals(dataConstraint)) { constraint.setDataConstraint(Constraint.DC_INTEGRAL); } else if ("CONFIDENTIAL".equals(dataConstraint)) { constraint.setDataConstraint(Constraint.DC_CONFIDENTIAL); } else { LOG.warn("Unknown user-data-constraint:" + dataConstraint); constraint.setDataConstraint(Constraint.DC_CONFIDENTIAL); } newConstraintMapping.setConstraint(constraint); ((ConstraintSecurityHandler) securityHandler) .addConstraintMapping(newConstraintMapping); }
java
public final Tuple2<Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>, Tuple2<T14, T15>> split13() { return new Tuple2<>(limit13(), skip13()); }
java
public static <K, VV, Message, EV> ScatterGatherIteration<K, VV, Message, EV> withEdges( DataSet<Edge<K, EV>> edgesWithValue, ScatterFunction<K, VV, Message, EV> sf, GatherFunction<K, VV, Message> gf, int maximumNumberOfIterations) { return new ScatterGatherIteration<>(sf, gf, edgesWithValue, maximumNumberOfIterations); }
java
@VisibleForTesting static String makeTargetStringForDirectAddress(SocketAddress address) { try { return new URI(DIRECT_ADDRESS_SCHEME, "", "/" + address, null).toString(); } catch (URISyntaxException e) { // It should not happen. throw new RuntimeException(e); } }
python
def datetime_handler(x): """ Allow serializing datetime objects to JSON """ if isinstance(x, datetime.datetime) or isinstance(x, datetime.date): return x.isoformat() raise TypeError("Unknown type")
python
def defaultannotator(self, annotationtype, set=None): """Obtain the default annotator for the specified annotation type and set. Arguments: annotationtype: The type of annotation, this is conveyed by passing the corresponding annototion class (such as :class:`PosAnnotation` for example), or a member of :class:`AnnotationType`, such as ``AnnotationType.POS``. set (str): the set, should formally be a URL pointing to the set definition Returns: the set (str) Raises: :class:`NoDefaultError` if the annotation type does not exist or if there is ambiguity (multiple sets for the same type) """ if inspect.isclass(annotationtype) or isinstance(annotationtype,AbstractElement): annotationtype = annotationtype.ANNOTATIONTYPE if not set: set = self.defaultset(annotationtype) try: return self.annotationdefaults[annotationtype][set]['annotator'] except KeyError: raise NoDefaultError
java
public Schema loadSchema( Connection pConnection, Diagram pRootDiagram ) throws SQLException { Schema lSchema = new Schema(); String lSql = "select table_name from user_tables"; CallableStatement lTableStatement = pConnection.prepareCall( lSql ); ResultSet lTableResultSet = lTableStatement.executeQuery(); while( lTableResultSet.next() ) { if( pRootDiagram.isTableIncluded( lTableResultSet.getString( 1 ) ) ) { Main.log( "Loading Table: " + lTableResultSet.getString( 1 ) ); Table lTable = new Table( lTableResultSet.getString( 1 ) ); lSchema.addTable( lTable ); CallableStatement lColumnStatement = pConnection.prepareCall( "select column_name from user_tab_columns where table_name = :1 order by column_id" ); lColumnStatement.setString( 1, lTable.getName() ); ResultSet lColumnResultSet = lColumnStatement.executeQuery(); while( lColumnResultSet.next() ) { lTable.addColumn( new Column( lColumnResultSet.getString( 1 ) ) ); } lColumnResultSet.close(); lColumnStatement.close(); } } lTableResultSet.close(); lTableStatement.close(); String lFkSql = " select /*+ full(o)*/ " + " o.constraint_name fk_name, " + " o.table_name tab_from, " + " o_fk_cols.column_name col_from, " + " i.table_name tab_to, " + " i_fk_cols.column_name col_to, " + " nvl " + " ( " + " ( " + " select distinct " + " 'N' " + " from user_cons_columns ox_fk_cols, " + " user_tab_columns " + " where ox_fk_cols.constraint_name = o.constraint_name " + " and ox_fk_cols.table_name = o.table_name " + " and user_tab_columns.column_name = ox_fk_cols.column_name " + " and ox_fk_cols.table_name = user_tab_columns.table_name " + " and nullable = 'N' " + " ), " + " 'Y' " + " ) as nullable, " + " nvl " + " ( " + " ( " + " select distinct " + " 'Y' " + " from user_constraints o_uk " + " where o_uk.table_name = o.table_name " + " and o_uk.constraint_type = 'U' " + " and not exists " + " ( " + " select 1 " + " from user_cons_columns o_uk_cols " + " where o_uk_cols.constraint_name = o_uk.constraint_name " + " and o_uk_cols.table_name = o_uk.table_name " + " and not exists " + " ( " + " select 1 " + " from user_cons_columns ox_fk_cols " + " where ox_fk_cols.constraint_name = o.constraint_name " + " and ox_fk_cols.table_name = o.table_name " + " and ox_fk_cols.column_name = o_uk_cols.column_name " + " ) " + " ) " + " ), " + " 'N' " + " ) as uk_on_fk " + " from user_constraints o, " + " user_constraints i, " + " user_cons_columns o_fk_cols, " + " user_tab_columns, " + " user_cons_columns i_fk_cols " + " where o.constraint_type = 'R' " + " and i.constraint_name = o.r_constraint_name " + " and o_fk_cols.constraint_name = o.constraint_name " + " and o_fk_cols.column_name = user_tab_columns.column_name " + " and o.table_name = user_tab_columns.table_name " + " and i_fk_cols.constraint_name = i.constraint_name " + " and i_fk_cols.table_name = i.table_name " + " order by 1,2 "; CallableStatement lFkStatement = pConnection.prepareCall( lFkSql ); ResultSet lFkResultSet = lFkStatement.executeQuery(); Association lAssociation = null; while( lFkResultSet.next() ) { Table lTableFrom = lSchema.findTable( lFkResultSet.getString( "tab_from" ) ); Table lTableTo = lSchema.findTable( lFkResultSet.getString( "tab_to" ) ); if( lTableFrom != null && lTableTo != null ) { String lConstraintName = lFkResultSet.getString( "fk_name" ); if( lAssociation == null || !lAssociation.getAssociationName().equals( lConstraintName ) ) { lAssociation = new Association( lConstraintName, lTableFrom, lTableTo, true, 0, lFkResultSet.getString( "uk_on_fk" ).equals( "N" ) ? Association.MULTIPLICITY_N : 1, lFkResultSet.getString( "nullable" ).equals( "N" ) ? 1 : 0, 1 ); lSchema.addAssociation( lAssociation ); } lAssociation.addColumnFrom( lFkResultSet.getString( "col_from" ) ); lAssociation.addColumnTo( lFkResultSet.getString( "col_to" ) ); } } lFkResultSet.close(); lFkStatement.close(); return lSchema; }
python
def taf(wxdata: TafData, units: Units) -> TafTrans: """ Translate the results of taf.parse Keys: Forecast, Min-Temp, Max-Temp Forecast keys: Wind, Visibility, Clouds, Altimeter, Wind-Shear, Turbulance, Icing, Other """ translations = {'forecast': []} # type: ignore for line in wxdata.forecast: trans = shared(line, units) # type: ignore trans['wind'] = wind(line.wind_direction, line.wind_speed, line.wind_gust, unit=units.wind_speed) trans['wind_shear'] = wind_shear(line.wind_shear, units.altitude, units.wind_speed) trans['turbulance'] = turb_ice(line.turbulance, units.altitude) trans['icing'] = turb_ice(line.icing, units.altitude) # Remove false 'Sky Clear' if line type is 'BECMG' if line.type == 'BECMG' and trans['clouds'] == 'Sky clear': trans['clouds'] = None # type: ignore translations['forecast'].append(TafLineTrans(**trans)) # type: ignore translations['min_temp'] = min_max_temp(wxdata.min_temp, units.temperature) # type: ignore translations['max_temp'] = min_max_temp(wxdata.max_temp, units.temperature) # type: ignore translations['remarks'] = remarks.translate(wxdata.remarks) return TafTrans(**translations)
python
def setTransform(self, T): """ Transform actor position and orientation wrt to its polygonal mesh, which remains unmodified. """ if isinstance(T, vtk.vtkMatrix4x4): self.SetUserMatrix(T) else: try: self.SetUserTransform(T) except TypeError: colors.printc('~time Error in setTransform(): consider transformPolydata() instead.', c=1) return self
java
public static Date parseDate(String dateValue, Collection<String> dateFormats, Date startDate) throws ParseException { if (dateValue == null) { throw new IllegalArgumentException("dateValue is null"); } if (dateFormats == null) { dateFormats = DEFAULT_HTTP_CLIENT_PATTERNS; } if (startDate == null) { startDate = DEFAULT_TWO_DIGIT_YEAR_START; } // trim single quotes around date if present // see issue #5279 if ((dateValue.length() > 1) && dateValue.startsWith("'") && dateValue.endsWith("'")) { dateValue = dateValue.substring(1, dateValue.length() - 1); } SimpleDateFormat dateParser = null; Iterator formatIter = dateFormats.iterator(); while (formatIter.hasNext()) { String format = (String)formatIter.next(); if (dateParser == null) { dateParser = new SimpleDateFormat(format, Locale.ENGLISH); dateParser.setTimeZone(TIMEZONE_GMT); dateParser.set2DigitYearStart(startDate); } else { dateParser.applyPattern(format); } try { return dateParser.parse(dateValue); } catch (ParseException pe) { // ignore this exception, we will try the next format } } // we were unable to parse the date throw new ParseException("Unable to parse the date " + dateValue, 0); }
python
def customize_compiler_for_nvcc(self): '''This is a verbatim copy of the NVCC compiler extension from https://github.com/rmcgibbo/npcuda-example ''' self.src_extensions.append('.cu') default_compiler_so = self.compiler_so super = self._compile def _compile(obj, src, ext, cc_args, extra_postargs, pp_opts): if os.path.splitext(src)[1] == '.cu': self.set_executable('compiler_so', CUDA['nvcc']) postargs = extra_postargs['nvcc'] else: postargs = extra_postargs['cc'] super(obj, src, ext, cc_args, postargs, pp_opts) self.compiler_so = default_compiler_so self._compile = _compile
java
private boolean handleClosedLedgers(List<Write> writes) { if (writes.size() == 0 || !writes.get(0).getWriteLedger().ledger.isClosed()) { // Nothing to do. We only need to check the first write since, if a Write failed with LedgerClosed, then the // first write must have failed for that reason (a Ledger is closed implies all ledgers before it are closed too). return false; } long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "handleClosedLedgers", writes.size()); WriteLedger currentLedger = getWriteLedger(); Map<Long, Long> lastAddsConfirmed = new HashMap<>(); boolean anythingChanged = false; for (Write w : writes) { if (w.isDone() || !w.getWriteLedger().ledger.isClosed()) { continue; } // Write likely failed because of LedgerClosedException. Need to check the LastAddConfirmed for each // involved Ledger and see if the write actually made it through or not. long lac = fetchLastAddConfirmed(w.getWriteLedger(), lastAddsConfirmed); if (w.getEntryId() >= 0 && w.getEntryId() <= lac) { // Write was actually successful. Complete it and move on. completeWrite(w); anythingChanged = true; } else if (currentLedger.ledger.getId() != w.getWriteLedger().ledger.getId()) { // Current ledger has changed; attempt to write to the new one. w.setWriteLedger(currentLedger); anythingChanged = true; } } LoggerHelpers.traceLeave(log, this.traceObjectId, "handleClosedLedgers", traceId, writes.size(), anythingChanged); return anythingChanged; }
python
def _get_abstract_layer_name(self): """ Looks for the name of abstracted layer. Usually these layers appears when model is stacked. :return: List of abstracted layers """ abstract_layers = [] for layer in self.model.layers: if 'layers' in layer.get_config(): abstract_layers.append(layer.name) return abstract_layers
java
public UpdateMethodResponseResult withResponseParameters(java.util.Map<String, Boolean> responseParameters) { setResponseParameters(responseParameters); return this; }
java
public void marshall(DetectKeyPhrasesRequest detectKeyPhrasesRequest, ProtocolMarshaller protocolMarshaller) { if (detectKeyPhrasesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(detectKeyPhrasesRequest.getText(), TEXT_BINDING); protocolMarshaller.marshall(detectKeyPhrasesRequest.getLanguageCode(), LANGUAGECODE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def NameImport(package, as_name=None, prefix=None): """ Accepts a package (Name node), name to import it as (string), and optional prefix and returns a node: import <package> [as <as_name>] """ if prefix is None: prefix = u"" children = [Name(u"import", prefix=prefix), package] if as_name is not None: children.extend([Name(u"as", prefix=u" "), Name(as_name, prefix=u" ")]) return Node(syms.import_name, children)
python
def check_new_target_system(self): '''handle a new target_system''' sysid = self.get_sysid() if sysid in self.pstate: return self.add_new_target_system(sysid)
java
public void initCircuitBreakerRegistry(CircuitBreakerRegistry circuitBreakerRegistry) { circuitBreakerProperties.getBackends().forEach( (name, properties) -> circuitBreakerRegistry.circuitBreaker(name, circuitBreakerProperties.createCircuitBreakerConfig(properties)) ); }
java
private <IPW extends IndentingPrintWriter> IPW writeNameTo(IPW output) { output.append(name.isEmpty() ? "unnamed" : name).whitespace(); return output; }
java
public static Region[] getExcluding(Region... excludedRegions) { Region[] regions = Regions.getRegions(); if (excludedRegions == null || excludedRegions.length == 0) { return regions; } excludedRegions = removeDuplicates(excludedRegions); int excludedLength = 0; for (Region r : excludedRegions) { if (r != null) { excludedLength++; } } int outputLength = regions.length - excludedLength; if (outputLength < 1) { return new Region[0]; } Region[] outputRegions = new Region[outputLength]; int i = 0; for (Region r : regions) { boolean excluded = r == null || contains(excludedRegions, r); if (!excluded) { outputRegions[i] = r; i++; } } return outputRegions; }
python
def wnreld(a, op, b): """ Compare two double precision windows. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/wnreld_c.html :param a: First window. :type a: spiceypy.utils.support_types.SpiceCell :param op: Comparison operator. :type op: str :param b: Second window. :type b: spiceypy.utils.support_types.SpiceCell :return: The result of comparison: a (op) b. :rtype: bool """ assert isinstance(a, stypes.SpiceCell) assert b.dtype == 1 assert isinstance(b, stypes.SpiceCell) assert a.dtype == 1 assert isinstance(op, str) op = stypes.stringToCharP(op.encode(encoding='UTF-8')) return bool(libspice.wnreld_c(ctypes.byref(a), op, ctypes.byref(b)))
python
def template_exception_handler(fn, error_context, filename=None): """Calls the given function, attempting to catch any template-related errors, and converts the error to a Statik TemplateError instance. Returns the result returned by the function itself.""" error_message = None if filename: error_context.update(filename=filename) try: return fn() except jinja2.TemplateSyntaxError as exc: error_context.update(filename=exc.filename, line_no=exc.lineno) error_message = exc.message except jinja2.TemplateError as exc: error_message = exc.message except Exception as exc: error_message = "%s" % exc raise TemplateError(message=error_message, context=error_context)
java
public DenseLU factor(DenseMatrix A) { singular = false; intW info = new intW(0); LAPACK.getInstance().dgetrf(A.numRows(), A.numColumns(), A.getData(), Matrices.ld(A.numRows()), piv, info); if (info.val > 0) singular = true; else if (info.val < 0) throw new IllegalArgumentException(); LU.set(A); return this; }
python
def present(name, zone, ttl, data, rdtype='A', **kwargs): ''' Ensures that the named DNS record is present with the given ttl. name The host portion of the DNS record, e.g., 'webserver'. Name and zone are concatenated when the entry is created unless name includes a trailing dot, so make sure that information is not duplicated in these two arguments. zone The zone to check/update ttl TTL for the record data Data for the DNS record. E.g., the IP address for an A record. rdtype DNS resource type. Default 'A'. ``**kwargs`` Additional arguments the ddns.update function may need (e.g. nameserver, keyfile, keyname). Note that the nsupdate key file can’t be reused by this function, the keyfile and other arguments must follow the `dnspython <http://www.dnspython.org/>`_ spec. ''' ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if __opts__['test']: ret['result'] = None ret['comment'] = '{0} record "{1}" will be updated'.format(rdtype, name) return ret status = __salt__['ddns.update'](zone, name, ttl, rdtype, data, **kwargs) if status is None: ret['result'] = True ret['comment'] = '{0} record "{1}" already present with ttl of {2}'.format( rdtype, name, ttl) elif status: ret['result'] = True ret['comment'] = 'Updated {0} record for "{1}"'.format(rdtype, name) ret['changes'] = {'name': name, 'zone': zone, 'ttl': ttl, 'rdtype': rdtype, 'data': data } else: ret['result'] = False ret['comment'] = 'Failed to create or update {0} record for "{1}"'.format(rdtype, name) return ret
python
def make_error_response(self, validation_error, expose_errors): """ Return an appropriate ``HttpResponse`` on authentication failure. In case of an error, the specification only details the inclusion of the ``WWW-Authenticate`` header. Additionally, when allowed by the specification, we respond with error details formatted in JSON in the body of the response. For more information, read the specification: http://tools.ietf.org/html/rfc6750#section-3.1 . :param validation_error: A :py:class:`djoauth2.access_token.AuthenticationError` raised by the :py:meth:`validate` method. :param expose_errors: A boolean describing whether or not to expose error information in the error response, as described by the section of the specification linked to above. :rtype: a Django ``HttpResponse``. """ authenticate_header = ['Bearer realm="{}"'.format(settings.DJOAUTH2_REALM)] if not expose_errors: response = HttpResponse(status=400) response['WWW-Authenticate'] = ', '.join(authenticate_header) return response status_code = 401 error_details = get_error_details(validation_error) if isinstance(validation_error, InvalidRequest): status_code = 400 elif isinstance(validation_error, InvalidToken): status_code = 401 elif isinstance(validation_error, InsufficientScope): error_details['scope'] = ' '.join(self.required_scope_names) status_code = 403 # TODO(peter): should we return response details as JSON? This is not # touched upon by the spec and may limit use of this library. Many # programmers use other transport languaes such as YAML or XML. All of the # error information is already included in the headers. response = HttpResponse(content=json.dumps(error_details), content_type='application/json', status=status_code) for key, value in error_details.iteritems(): authenticate_header.append('{}="{}"'.format(key, value)) response['WWW-Authenticate'] = ', '.join(authenticate_header) return response
java
public static void loadSeg(String path) throws LoadModelException { if(seg==null) { String file = path+segModel; seg = new CWSTagger(file); seg.setEnFilter(isEnFilter); } }
python
def cache_file(source): ''' Wrapper for cp.cache_file which raises an error if the file was unable to be cached. CLI Example: .. code-block:: bash salt myminion container_resource.cache_file salt://foo/bar/baz.txt ''' try: # Don't just use cp.cache_file for this. Docker has its own code to # pull down images from the web. if source.startswith('salt://'): cached_source = __salt__['cp.cache_file'](source) if not cached_source: raise CommandExecutionError( 'Unable to cache {0}'.format(source) ) return cached_source except AttributeError: raise SaltInvocationError('Invalid source file {0}'.format(source)) return source
java
public String getResultString() throws TransformerException { return withExceptionHandling(new Trans<String>() { public String transform() { return transformation.transformToString(); } }); }
java
public static void write( String content, Writer writer ) throws IOException { CheckArg.isNotNull(writer, "destination writer"); boolean error = false; try { if (content != null) { writer.write(content); } } catch (IOException e) { error = true; // this error should be thrown, even if there is an error flushing/closing writer throw e; } catch (RuntimeException e) { error = true; // this error should be thrown, even if there is an error flushing/closing writer throw e; } finally { try { writer.flush(); } catch (IOException e) { if (!error) throw e; } finally { try { writer.close(); } catch (IOException e) { if (!error) throw e; } } } }
java
protected void resumeFaxJob(HylaFaxJob faxJob,HylaFAXClient client) throws Exception { //get job Job job=faxJob.getHylaFaxJob(); //get job ID long faxJobID=job.getId(); //resume job client.retry(faxJobID); }
java
public static long max(LongArrayND array) { return array.stream().parallel().reduce( Long.MIN_VALUE, Math::max); }
java
private static Field getField(final Class cls, final String fieldName, boolean declared) { $.checkNotEmpty(fieldName); List<Field> fields = getFieldsList(cls, declared); for (int i = 0; i < fields.size(); i++) { if (fieldName.equals(fields.get(i).getName())) { return fields.get(i); } } return null; }
java
public static void tagsToFilters(final Map<String, String> tags, final List<TagVFilter> filters) { mapToFilters(tags, filters, true); }
java
public static void writeField(final Field field, final Object target, final Object value) throws IllegalAccessException { writeField(field, target, value, false); }
python
def monkhorst_automatic(cls, structure, ngkpt, use_symmetries=True, use_time_reversal=True, chksymbreak=None, comment=None): """ Convenient static constructor for an automatic Monkhorst-Pack mesh. Args: structure: :class:`Structure` object. ngkpt: Subdivisions N_1, N_2 and N_3 along reciprocal lattice vectors. use_symmetries: Use spatial symmetries to reduce the number of k-points. use_time_reversal: Use time-reversal symmetry to reduce the number of k-points. Returns: :class:`KSampling` object. """ sg = SpacegroupAnalyzer(structure) #sg.get_crystal_system() #sg.get_point_group_symbol() # TODO nshiftk = 1 #shiftk = 3*(0.5,) # this is the default shiftk = 3*(0.5,) #if lattice.ishexagonal: #elif lattice.isbcc #elif lattice.isfcc return cls.monkhorst( ngkpt, shiftk=shiftk, use_symmetries=use_symmetries, use_time_reversal=use_time_reversal, chksymbreak=chksymbreak, comment=comment if comment else "Automatic Monkhorst-Pack scheme")
java
public static void requireAccess (ClientObject clobj, Permission perm, Object context) throws InvocationException { String errmsg = clobj.checkAccess(perm, context); if (errmsg != null) { throw new InvocationException(errmsg); } }
java
public void setFilters(java.util.Collection<DocumentKeyValuesFilter> filters) { if (filters == null) { this.filters = null; return; } this.filters = new com.amazonaws.internal.SdkInternalList<DocumentKeyValuesFilter>(filters); }
java
@SafeVarargs public static <T> SimplePageable<T> of(T... array) { return new SimplePageable<>(Arrays.asList(nullSafeArray(array))); }
python
def InsertArg(self, string="", **_): """Insert an arg to the current expression.""" if self.state == "LIST_ARG": self.list_args.append(string) elif self.current_expression.AddArg(string): # This expression is complete self.stack.append(self.current_expression) self.current_expression = self.expression_cls() # We go to the BINARY state, to find if there's an AND or OR operator return "BINARY"
python
def delete(self, block_type, block_num): """ Deletes a block :param block_type: Type of block :param block_num: Bloc number """ logger.info("deleting block") blocktype = snap7.snap7types.block_types[block_type] result = self.library.Cli_Delete(self.pointer, blocktype, block_num) return result
java
public java.util.List<JobFlowDetail> getJobFlows() { if (jobFlows == null) { jobFlows = new com.amazonaws.internal.SdkInternalList<JobFlowDetail>(); } return jobFlows; }
java
public static void isTrue(final boolean expression, final String message, final long value) { if (!expression) { throw new IllegalArgumentException(StringUtils.simpleFormat(message, Long.valueOf(value))); } }
java
private static SSLException asSSLException(Exception e) throws SSLException { if (e instanceof SSLException) return (SSLException) e; return new SSLException(e.getMessage(), e); }
python
def get_flexports_output_flexport_list_port_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_flexports = ET.Element("get_flexports") config = get_flexports output = ET.SubElement(get_flexports, "output") flexport_list = ET.SubElement(output, "flexport-list") port_id = ET.SubElement(flexport_list, "port-id") port_id.text = kwargs.pop('port_id') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def send(self, cmd): """ Send a command to the bridge. :param cmd: List of command bytes. """ self._bridge.send(cmd, wait=self.wait, reps=self.reps)
python
def get_member(self, username, api=None): """ Return specified automation member :param username: Member username :param api: sevenbridges Api instance. :return: AutomationMember object """ member = Transform.to_automation_member(username) api = api or self._API return AutomationMember.get( id=member, automation=self.id, api=api )
java
private SessionData load(InputStream is, String expectedId) throws Exception { try { DataInputStream di = new DataInputStream(is); String id = di.readUTF(); // the actual id from inside the file long created = di.readLong(); long accessed = di.readLong(); long lastAccessed = di.readLong(); long expiry = di.readLong(); long maxIdle = di.readLong(); SessionData data = newSessionData(id, created, accessed, lastAccessed, maxIdle); data.setExpiryTime(expiry); data.setMaxInactiveInterval(maxIdle); // Attributes restoreAttributes(di, di.readInt(), data); return data; } catch (Exception e) { throw new UnreadableSessionDataException(expectedId, e); } }
java
public static <ReqT, RespT> ServerCallHandler<ReqT, RespT> asyncBidiStreamingCall( BidiStreamingMethod<ReqT, RespT> method) { return asyncStreamingRequestCall(method); }
java
@TargetApi(Build.VERSION_CODES.FROYO) public static boolean hasLocationFeature(Context context) { return hasLocationFeature(context.getPackageManager()); }
python
def to_string(self, verbose=0): """String representation.""" lines = [] app = lines.append for i, cycle in enumerate(self): app("") app("RELAXATION STEP: %d" % (i + 1)) app(cycle.to_string(verbose=verbose)) return "\n".join(lines)
python
def _detect(self): """ Detect uninitialized state variables Recursively visit the calls Returns: dict: [contract name] = set(state variable uninitialized) """ results = [] for c in self.slither.contracts_derived: ret = self.detect_uninitialized(c) for variable, functions in ret: info = "{}.{} ({}) is never initialized. It is used in:\n" info = info.format(variable.contract.name, variable.name, variable.source_mapping_str) for f in functions: info += "\t- {} ({})\n".format(f.name, f.source_mapping_str) source = [variable.source_mapping] source += [f.source_mapping for f in functions] json = self.generate_json_result(info) self.add_variable_to_json(variable, json) self.add_functions_to_json(functions, json) results.append(json) return results
python
def update(self): """ Updates the todo list according to the todos in the view associated with this list. """ old_focus_position = self.todolist.focus id_length = max_id_length(self.view.todolist.count()) del self.todolist[:] for group, todos in self.view.groups.items(): if len(self.view.groups) > 1: grouplabel = ", ".join(group) self.todolist.append(urwid.Text(grouplabel)) self.todolist.append(urwid.Divider('-')) for todo in todos: todowidget = TodoWidget.create(todo, id_length) todowidget.number = self.view.todolist.number(todo) self.todolist.append(todowidget) self.todolist.append(urwid.Divider('-')) if old_focus_position: try: self.todolist.set_focus(old_focus_position) except IndexError: # scroll to the bottom if the last item disappeared from column # -2 for the same reason as in self._scroll_to_bottom() self.todolist.set_focus(len(self.todolist) - 2)
java
public static void setJVMProxy(String host, String port) { System.setProperty(JVM_PROXY_HOST_PROPERTY, host); System.setProperty(JVM_PROXY_PORT_PROPERTY, port); }
java
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public Span setStatus(Data data) { data.spanToSet.setStatus(STATUS_OK); return data.spanToSet; }
java
public static void addFlowContext(final FlowContext flowContext) { if (null == flowContext) { clearFlowcontext(); return; } FLOW_CONTEXT_THREAD_LOCAL.set(flowContext); MDC.put("flowCtxt", flowContext.toString()); }
java
public static ListenableFuture<SnapshotCompletionInterest.SnapshotCompletionEvent> watchSnapshot(final String nonce) { final SettableFuture<SnapshotCompletionInterest.SnapshotCompletionEvent> result = SettableFuture.create(); SnapshotCompletionInterest interest = new SnapshotCompletionInterest() { @Override public CountDownLatch snapshotCompleted(SnapshotCompletionEvent event) { if (event.nonce.equals(nonce) && event.didSucceed) { VoltDB.instance().getSnapshotCompletionMonitor().removeInterest(this); result.set(event); } return null; } }; VoltDB.instance().getSnapshotCompletionMonitor().addInterest(interest); return result; }
python
def get_series(self, series): """ Returns a census series API handler. """ if series == "acs1": return self.census.acs1dp elif series == "acs5": return self.census.acs5 elif series == "sf1": return self.census.sf1 elif series == "sf3": return self.census.sf3 else: return None
java
protected Statement withAfterClasses(Statement statement) { List<FrameworkMethod> afters = testClass .getAnnotatedMethods(AfterClass.class); return afters.isEmpty() ? statement : new RunAfters(statement, afters, null); }
python
def alignVec_quat(vec): """Returns a unit quaternion that will align vec with the z-axis""" alpha = np.arctan2(vec[1], vec[0]) beta = np.arccos(vec[2]) gamma = -alpha*vec[2] cb = np.cos(0.5*beta) sb = np.sin(0.5*beta) return np.array([cb*np.cos(0.5*(alpha + gamma)), sb*np.sin(0.5*(gamma - alpha)), sb*np.cos(0.5*(gamma - alpha)), cb*np.sin(0.5*(alpha + gamma))])
python
def add_colorbar(self, *args, **kwargs): """DEPRECATED, use `Plot.colorbar` instead """ warnings.warn( "{0}.add_colorbar was renamed {0}.colorbar, this warnings will " "result in an error in the future".format(type(self).__name__), DeprecationWarning) return self.colorbar(*args, **kwargs)
java
public Observable<ManagedInstanceVulnerabilityAssessmentInner> createOrUpdateAsync(String resourceGroupName, String managedInstanceName, ManagedInstanceVulnerabilityAssessmentInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, managedInstanceName, parameters).map(new Func1<ServiceResponse<ManagedInstanceVulnerabilityAssessmentInner>, ManagedInstanceVulnerabilityAssessmentInner>() { @Override public ManagedInstanceVulnerabilityAssessmentInner call(ServiceResponse<ManagedInstanceVulnerabilityAssessmentInner> response) { return response.body(); } }); }
python
def install(self, release_id): """Install the local artifact into the remote release directory, optionally with a different name than the artifact had locally. If the directory for the given release ID does not exist on the remote system, it will be created. The directory will be created according to the standard Tunic directory structure (see :doc:`design`). :param str release_id: Timestamp-based identifier for this deployment. :param int retries: Max number of times to retry downloads after a failure :param float retry_delay: Number of seconds between download retries :return: The results of the ``put`` command using Fabric. This return value is an iterable of the paths of all files uploaded on the remote server. """ release_path = os.path.join(self._releases, release_id) if not self._runner.exists(release_path): self._runner.run("mkdir -p '{0}'".format(release_path)) # The artifact can optionally be renamed when being uploaded to # remote server. Useful for when we need a consistent name for # each deploy on the remote server but the local artifact includes # version numbers or something. if self._remote_name is not None: destination = os.path.join(release_path, self._remote_name) else: destination = release_path return self._runner.put(self._local_file, destination, mirror_local_mode=True)