language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def map_df(self, df): """ Map df """ if len(df) == 0: return aesthetics = set(self.aesthetics) & set(df.columns) for ae in aesthetics: df[ae] = self.map(df[ae]) return df
python
def _fileobj_lookup(self, fileobj): """Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. This is used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping. """ try: return _fileobj_to_fd(fileobj) except ValueError: # Do an exhaustive search. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise
python
def render_template(self, template, **kwargs): """ Use this method on your own endpoints, will pass the extra_args to the templates. :param template: The template relative path :param kwargs: arguments to be passed to the template """ kwargs["base_template"] = self.appbuilder.base_template kwargs["appbuilder"] = self.appbuilder return render_template( template, **dict(list(kwargs.items()) + list(self.extra_args.items())) )
java
private short getCellType(String value) { short ret = STRING_TYPE; if(value.equals("number")) ret = NUMBER_TYPE; else if(value.equals("datetime")) ret = DATETIME_TYPE; else if(value.equals("boolean")) ret = BOOLEAN_TYPE; return ret; }
java
public static Ifc4Factory init() { try { Ifc4Factory theIfc4Factory = (Ifc4Factory) EPackage.Registry.INSTANCE.getEFactory(Ifc4Package.eNS_URI); if (theIfc4Factory != null) { return theIfc4Factory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new Ifc4FactoryImpl(); }
java
public CertificateWithNonceDescriptionInner generateVerificationCode(String resourceGroupName, String resourceName, String certificateName, String ifMatch) { return generateVerificationCodeWithServiceResponseAsync(resourceGroupName, resourceName, certificateName, ifMatch).toBlocking().single().body(); }
java
public TableColumn copyBox() { TableColumn ret = new TableColumn(el, g, ctx); ret.copyValues(this); return ret; }
java
@Override public DeleteConferenceProviderResult deleteConferenceProvider(DeleteConferenceProviderRequest request) { request = beforeClientExecution(request); return executeDeleteConferenceProvider(request); }
java
public ClientTransaction sendRequest(Request request) throws SipException, TransactionUnavailableException { ensureCorrectDialogLocalTag(request); final ClientTransactionWrapper ctw = ra.getProviderWrapper().getNewDialogActivityClientTransaction(this,request); if (request.getMethod().equals(Request.INVITE)) lastCancelableTransactionId = ctw.getActivityHandle(); if (tracer.isInfoEnabled()) { tracer.info(String.valueOf(ctw)+" sending request:\n"+request); } wrappedDialog.sendRequest(ctw.getWrappedClientTransaction()); return ctw; }
python
def initArgosApplicationSettings(app): # TODO: this is Argos specific. Move somewhere else. """ Sets Argos specific attributes, such as the OrganizationName, so that the application persistent settings are read/written to the correct settings file/winreg. It is therefore important to call this function at startup. The ArgosApplication constructor does this. """ assert app, \ "app undefined. Call QtWidgets.QApplication.instance() or QtCor.QApplication.instance() first." logger.debug("Setting Argos QApplication settings.") app.setApplicationName(info.REPO_NAME) app.setApplicationVersion(info.VERSION) app.setOrganizationName(info.ORGANIZATION_NAME) app.setOrganizationDomain(info.ORGANIZATION_DOMAIN)
java
public static int getContainerPageTypeIdSafely() { try { return getContainerPageTypeId(); } catch (CmsLoaderException e) { if (LOG.isDebugEnabled()) { LOG.debug(e.getLocalizedMessage(), e); } return -1; } }
java
@Override public long getDuration() { final long end = running.get() ? System.nanoTime() : endTime.get(); return end - startTime.get(); }
python
def position_target_local_ned_encode(self, time_boot_ms, coordinate_frame, type_mask, x, y, z, vx, vy, vz, afx, afy, afz, yaw, yaw_rate): ''' Reports the current commanded vehicle position, velocity, and acceleration as specified by the autopilot. This should match the commands sent in SET_POSITION_TARGET_LOCAL_NED if the vehicle is being controlled this way. time_boot_ms : Timestamp in milliseconds since system boot (uint32_t) coordinate_frame : Valid options are: MAV_FRAME_LOCAL_NED = 1, MAV_FRAME_LOCAL_OFFSET_NED = 7, MAV_FRAME_BODY_NED = 8, MAV_FRAME_BODY_OFFSET_NED = 9 (uint8_t) type_mask : Bitmask to indicate which dimensions should be ignored by the vehicle: a value of 0b0000000000000000 or 0b0000001000000000 indicates that none of the setpoint dimensions should be ignored. If bit 10 is set the floats afx afy afz should be interpreted as force instead of acceleration. Mapping: bit 1: x, bit 2: y, bit 3: z, bit 4: vx, bit 5: vy, bit 6: vz, bit 7: ax, bit 8: ay, bit 9: az, bit 10: is force setpoint, bit 11: yaw, bit 12: yaw rate (uint16_t) x : X Position in NED frame in meters (float) y : Y Position in NED frame in meters (float) z : Z Position in NED frame in meters (note, altitude is negative in NED) (float) vx : X velocity in NED frame in meter / s (float) vy : Y velocity in NED frame in meter / s (float) vz : Z velocity in NED frame in meter / s (float) afx : X acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float) afy : Y acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float) afz : Z acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float) yaw : yaw setpoint in rad (float) yaw_rate : yaw rate setpoint in rad/s (float) ''' return MAVLink_position_target_local_ned_message(time_boot_ms, coordinate_frame, type_mask, x, y, z, vx, vy, vz, afx, afy, afz, yaw, yaw_rate)
java
public Map<String, Binding<?>> linkAll() { assertLockHeld(); if (linkedBindings != null) { return linkedBindings; } for (Binding<?> binding : bindings.values()) { if (!binding.isLinked()) { toLink.add(binding); } } linkRequested(); // This method throws if bindings are not resolvable/linkable. linkedBindings = Collections.unmodifiableMap(bindings); return linkedBindings; }
python
def subquery(self, name=None): """ The recipe's query as a subquery suitable for use in joins or other queries. """ query = self.query() return query.subquery(name=name)
java
static String stripRules(String rules) { StringBuilder strippedRules = new StringBuilder(); int rulesLength = rules.length(); for (int idx = 0; idx < rulesLength;) { char ch = rules.charAt(idx++); if (ch == '#') { while (idx < rulesLength && ch != '\r' && ch != '\n' && ch != chNEL) { ch = rules.charAt(idx++); } } if (!UCharacter.isISOControl(ch)) { strippedRules.append(ch); } } return strippedRules.toString(); }
java
protected Collection invokePage(int pPageIndex, int pPageSize) { if(mDao instanceof BaseSelectPageMapper) { return ((BaseSelectPageMapper)mDao).selectPage(createModel(),pPageIndex,pPageSize); } else if( mDao instanceof MysqlSelectPageMapper) { return ((MysqlSelectPageMapper)mDao).selectPage(createModel(),pPageIndex,pPageSize); } else if(mDao instanceof Mapper) { Mapper mapper= (Mapper)mDao; return mapper.selectByRowBounds(createModel(),new RowBounds(pPageIndex-1,pPageSize)); } return new ArrayList(); }
python
def _build(self, inputs, prev_state, **kwargs): """Connects the DeepRNN module into the graph. If this is not the first time the module has been connected to the graph, the Tensors provided as input_ and state must have the same final dimension, in order for the existing variables to be the correct size for their corresponding multiplications. The batch size may differ for each connection. Args: inputs: a nested tuple of Tensors of arbitrary dimensionality, with at least an initial batch dimension. prev_state: a tuple of `prev_state`s that corresponds to the state of each one of the cores of the `DeepCore`. **kwargs: optional kwargs to be passed to the `_build` of all sub-modules. E.g. is_training=True. Note all sub-modules must accept the given kwarg. Returns: output: a nested tuple of Tensors of arbitrary dimensionality, with at least an initial batch dimension. next_state: a tuple of `next_state`s that corresponds to the updated state of each one of the cores of the `DeepCore`. Raises: ValueError: if connecting the module into the graph any time after the first time, and the inferred size of the inputs does not match previous invocations. This may happen if one connects a module any time after the first time that does not have the configuration of skip connections as the first time. """ current_input = inputs next_states = [] outputs = [] recurrent_idx = 0 concatenate = lambda *args: tf.concat(args, axis=-1) for i, core in enumerate(self._cores): if self._skip_connections and i > 0: current_input = nest.map_structure(concatenate, inputs, current_input) # Determine if this core in the stack is recurrent or not and call # accordingly. if self._is_recurrent_list[i]: current_input, next_state = core(current_input, prev_state[recurrent_idx], **kwargs) next_states.append(next_state) recurrent_idx += 1 else: current_input = core(current_input, **kwargs) if self._skip_connections: outputs.append(current_input) if self._skip_connections and self._concat_final_output_if_skip: output = nest.map_structure(concatenate, *outputs) else: output = current_input self._last_output_size = _get_shape_without_batch_dimension(output) return output, tuple(next_states)
python
def plot_d_delta_m(fignum, Bdm, DdeltaM, s): """ function to plot d (Delta M)/dB curves Parameters __________ fignum : matplotlib figure number Bdm : change in field Ddelta M : change in delta M s : specimen name """ plt.figure(num=fignum) plt.clf() if not isServer: plt.figtext(.02, .01, version_num) start = len(Bdm) - len(DdeltaM) plt.plot(Bdm[start:], DdeltaM, 'b') plt.xlabel('B (T)') plt.ylabel('d (Delta M)/dB') plt.title(s)
java
public static final void event(Object o, TraceComponent tc, String msg) { Tr.event(tc, getFullClassName(tc)+" "+getMEName(o) + " " + msg); }
java
@Restricted(DoNotUse.class) // WebOnly public HttpResponse doPlatformPluginList() throws IOException { SetupWizard setupWizard = Jenkins.get().getSetupWizard(); if (setupWizard != null) { if (InstallState.UPGRADE.equals(Jenkins.get().getInstallState())) { JSONArray initialPluginData = getPlatformPluginUpdates(); if(initialPluginData != null) { return HttpResponses.okJSON(initialPluginData); } } else { JSONArray initialPluginData = getPlatformPluginList(); if(initialPluginData != null) { return HttpResponses.okJSON(initialPluginData); } } } return HttpResponses.okJSON(); }
python
def update(self, obj, size): '''Update this profile. ''' self.number += 1 self.total += size if self.high < size: # largest self.high = size try: # prefer using weak ref self.objref, self.weak = Weakref.ref(obj), True except TypeError: self.objref, self.weak = obj, False
python
def flanks(args): """ %prog flanks gaps.bed fastafile Create sequences flanking the gaps. """ p = OptionParser(flanks.__doc__) p.add_option("--extend", default=2000, type="int", help="Extend seq flanking the gaps [default: %default]") opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) gapsbed, fastafile = args Ext = opts.extend sizes = Sizes(fastafile).mapping bed = Bed(gapsbed) pf = gapsbed.rsplit(".", 1)[0] extbed = pf + ".ext.bed" fw = open(extbed, "w") for i, b in enumerate(bed): seqid = b.seqid gapname = b.accn size = sizes[seqid] prev_b = bed[i - 1] if i > 0 else None next_b = bed[i + 1] if i + 1 < len(bed) else None if prev_b and prev_b.seqid != seqid: prev_b = None if next_b and next_b.seqid != seqid: next_b = None start = prev_b.end + 1 if prev_b else 1 start, end = max(start, b.start - Ext), b.start - 1 print("\t".join(str(x) for x in \ (b.seqid, start - 1, end, gapname + "L")), file=fw) end = next_b.start - 1 if next_b else size start, end = b.end + 1, min(end, b.end + Ext) print("\t".join(str(x) for x in \ (b.seqid, start - 1, end, gapname + "R")), file=fw) fw.close() extfasta = fastaFromBed(extbed, fastafile, name=True) return extbed, extfasta
python
def sphbear (lat1, lon1, lat2, lon2, tol=1e-15): """Calculate the bearing between two locations on a sphere. lat1 The latitude of the first location. lon1 The longitude of the first location. lat2 The latitude of the second location. lon2 The longitude of the second location. tol Tolerance for checking proximity to poles and rounding to zero. The bearing (AKA the position angle, PA) is the orientation of point 2 with regards to point 1 relative to the longitudinal axis. Returns the bearing in radians. All arguments are in radians as well. The arguments may be vectors. Note that the ordering of the arguments maps to the nonstandard ordering ``(Dec, RA)`` in equatorial coordinates. In a spherical projection it maps to ``(Y, X)`` which may also be unexpected. The sign convention is astronomical: bearings range from -π to π, with negative values if point 2 is in the western hemisphere with regards to point 1, positive if it is in the eastern. (That is, “east from north”.) If point 1 is very near the pole, the bearing is undefined and the result is NaN. The *tol* argument is used for checking proximity to the poles and for rounding the bearing to precisely zero if it's extremely small. Derived from ``bear()`` in `angles.py from Prasanth Nair <https://github.com/phn/angles>`_. His version is BSD licensed. This one is sufficiently different that I think it counts as a separate implementation. """ # cross product on outer axis: ocross = lambda a, b: np.cross (a, b, axisa=0, axisb=0, axisc=0) # if args have shape S, this has shape (3, S) v1 = np.asarray ([np.cos (lat1) * np.cos (lon1), np.cos (lat1) * np.sin (lon1), np.sin (lat1)]) v2 = np.asarray ([np.cos (lat2) * np.cos (lon2), np.cos (lat2) * np.sin (lon2), np.sin (lat2)]) is_bad = (v1[0]**2 + v1[1]**2) < tol p12 = ocross (v1, v2) # ~"perpendicular to great circle containing points" p1z = np.asarray ([v1[1], -v1[0], np.zeros_like (lat1)]) # ~"perp to base and Z axis" cm = np.sqrt ((ocross (p12, p1z)**2).sum (axis=0)) # ~"angle between the vectors" bearing = np.arctan2 (cm, np.sum (p12 * p1z, axis=0)) bearing = np.where (p12[2] < 0, -bearing, bearing) # convert to [-pi/2, pi/2] bearing = np.where (np.abs (bearing) < tol, 0, bearing) # clamp bearing[np.where (is_bad)] = np.nan return bearing
java
public List<Object> stepsInstances(List<CandidateSteps> candidateSteps) { List<Object> instances = new ArrayList<>(); for (CandidateSteps steps : candidateSteps) { if (steps instanceof Steps) { instances.add(((Steps) steps).instance()); } } return instances; }
java
private int convertFocusDirectionToLayoutDirection(int focusDirection) { switch (focusDirection) { case View.FOCUS_BACKWARD: return RenderState.LAYOUT_START; case View.FOCUS_FORWARD: return RenderState.LAYOUT_END; case View.FOCUS_UP: return mOrientation == VERTICAL ? RenderState.LAYOUT_START : RenderState.INVALID_LAYOUT; case View.FOCUS_DOWN: return mOrientation == VERTICAL ? RenderState.LAYOUT_END : RenderState.INVALID_LAYOUT; case View.FOCUS_LEFT: return mOrientation == HORIZONTAL ? RenderState.LAYOUT_START : RenderState.INVALID_LAYOUT; case View.FOCUS_RIGHT: return mOrientation == HORIZONTAL ? RenderState.LAYOUT_END : RenderState.INVALID_LAYOUT; default: if (DEBUG) { Log.d(TAG, "Unknown focus request:" + focusDirection); } return RenderState.INVALID_LAYOUT; } }
python
def _dispatch_container(self, textgroup, directory): """ Run the dispatcher over a textgroup within a try/except block .. note:: This extraction allows to change the dispatch routine \ without having to care for the error dispatching :param textgroup: Textgroup object that needs to be dispatched :param directory: Directory in which the textgroup was found """ try: self._dispatch(textgroup, directory) except UndispatchedTextError as E: self.logger.error("Error dispatching %s ", directory) if self.RAISE_ON_UNDISPATCHED is True: raise E
python
def get_manifest_list_only_expectation(self): """ Get expectation for manifest list only :return: bool, expect manifest list only? """ if not self.workflow.postbuild_results.get(PLUGIN_GROUP_MANIFESTS_KEY): self.log.debug('Cannot check if only manifest list digest should be returned ' 'because group manifests plugin did not run') return False platforms = get_platforms(self.workflow) if not platforms: self.log.debug('Cannot check if only manifest list digest should be returned ' 'because we have no platforms list') return False try: platform_to_goarch = get_platform_to_goarch_mapping(self.workflow) except KeyError: self.log.debug('Cannot check if only manifest list digest should be returned ' 'because there are no platform descriptors') return False for plat in platforms: if platform_to_goarch[plat] == 'amd64': self.log.debug('amd64 was built, all media types available') return False self.log.debug('amd64 was not built, only manifest list digest is available') return True
python
def _send_locked(self, cmd): """Sends the specified command to the lutron controller. Assumes self._lock is held. """ _LOGGER.debug("Sending: %s" % cmd) try: self._telnet.write(cmd.encode('ascii') + b'\r\n') except BrokenPipeError: self._disconnect_locked()
python
def reset(self): """Ensure all shards, configs, and routers are running and available.""" # Ensure all shards by calling "reset" on each. for shard_id in self._shards: if self._shards[shard_id].get('isReplicaSet'): singleton = ReplicaSets() elif self._shards[shard_id].get('isServer'): singleton = Servers() singleton.command(self._shards[shard_id]['_id'], 'reset') # Ensure all config servers by calling "reset" on each. for config_id in self._configsvrs: self.configdb_singleton.command(config_id, 'reset') # Ensure all routers by calling "reset" on each. for router_id in self._routers: Servers().command(router_id, 'reset') return self.info()
python
def make_img_widget(cls, img, layout=Layout(), format='jpg'): "Returns an image widget for specified file name `img`." return widgets.Image(value=img, format=format, layout=layout)
python
def do_related(parser, token): """ Get N related models into a context variable optionally specifying a named related finder. **Usage**:: {% related <limit>[ query_type] [app.model, ...] for <object> as <result> %} **Parameters**:: ================================== ================================================ Option Description ================================== ================================================ ``limit`` Number of objects to retrieve. ``query_type`` Named finder to resolve the related objects, falls back to ``settings.DEFAULT_RELATED_FINDER`` when not specified. ``app.model``, ... List of allowed models, all if omitted. ``object`` Object to get the related for. ``result`` Store the resulting list in context under given name. ================================== ================================================ **Examples**:: {% related 10 for object as related_list %} {% related 10 directly articles.article, galleries.gallery for object as related_list %} """ bits = token.split_contents() obj_var, count, var_name, mods, finder = parse_related_tag(bits) return RelatedNode(obj_var, count, var_name, mods, finder)
java
public GenericDataType[] getSchemaRow() { GenericDataType[] result = new GenericDataType[this.schemaRow.size()]; this.schemaRow.toArray(result); return result; }
python
def _private_packages_allowed(): """ Checks if the current user is allowed to create private packages. In the public cloud, the user needs to be on a paid plan. There are no restrictions in other deployments. """ if not HAVE_PAYMENTS or TEAM_ID: return True customer = _get_or_create_customer() plan = _get_customer_plan(customer) return plan != PaymentPlan.FREE
python
def _collect_data(self): """ Returns a list of all the data gathered from the engine iterable. """ all_data = [] for line in self.engine.run_engine(): logging.debug("Adding {} to all_data".format(line)) all_data.append(line.copy()) logging.debug("all_data is now {}".format(all_data)) return all_data
java
private void writeOpCode(@NonNull final BluetoothGattCharacteristic characteristic, @NonNull final byte[] value) throws DeviceDisconnectedException, DfuException, UploadAbortedException { writeOpCode(characteristic, value, false); }
java
protected void __invokeTag(int line, String name) { __engine.invokeTemplate(line, name, this, null, null, null); }
java
public void updateMorphTime(float delta) { for (int i=0;i<figures.size();i++) { Figure figure = (Figure) figures.get(i); MorphShape shape = (MorphShape) figure.getShape(); shape.updateMorphTime(delta); } }
python
def num_rows(self): """ Returns the number of rows. Returns ------- out : int Number of rows in the SFrame. """ if self._is_vertex_frame(): return self.__graph__.summary()['num_vertices'] elif self._is_edge_frame(): return self.__graph__.summary()['num_edges']
java
private void localChangeEnd(IManagedContext<?> managedContext, boolean silent, boolean deferCommit, ISurveyCallback callback) throws ContextException { if (pendingStack.isEmpty() || pendingStack.peek() != managedContext) { throw new ContextException("Illegal context change nesting."); } if (!managedContext.isPending()) { pendingStack.pop(); return; } commitStack.push(managedContext); managedContext.surveySubscribers(silent, response -> { boolean accept = !response.rejected(); if (!accept && log.isDebugEnabled()) { log.debug("Survey of managed context " + managedContext.getContextName() + " returned '" + response + "'."); } pendingStack.remove(managedContext); if (!deferCommit && (!accept || pendingStack.isEmpty())) { commitContexts(accept, accept); } execCallback(callback, response); }); }
python
def master_for(self, service_name, redis_class=StrictRedis, connection_pool_class=SentinelConnectionPool, **kwargs): """ Returns a redis client instance for the ``service_name`` master. A SentinelConnectionPool class is used to retrive the master's address before establishing a new connection. NOTE: If the master's address has changed, any cached connections to the old master are closed. By default clients will be a redis.StrictRedis instance. Specify a different class to the ``redis_class`` argument if you desire something different. The ``connection_pool_class`` specifies the connection pool to use. The SentinelConnectionPool will be used by default. All other keyword arguments are merged with any connection_kwargs passed to this class and passed to the connection pool as keyword arguments to be used to initialize Redis connections. """ kwargs['is_master'] = True connection_kwargs = dict(self.connection_kwargs) connection_kwargs.update(kwargs) return redis_class(connection_pool=connection_pool_class( service_name, self, **connection_kwargs))
java
public IType getByRelativeName(String relativeName, ITypeUsesMap typeUses) throws ClassNotFoundException { String relativeName1 = relativeName; IType type = FrequentUsedJavaTypeCache.instance( getExecutionEnv() ).getHighUsageType(relativeName1); if (type != null) { return type; } //## todo: consider handling requests to find a parameterized type... the following is a giant hack int i = relativeName1 == null ? -1 : relativeName1.indexOf( '<' ); if( i >= 0 ) { assert relativeName1 != null; relativeName1 = relativeName1.substring( 0, i ); } //## type = getTypeByRelativeNameIfValid_NoGenerics(relativeName1, typeUses); if( type == null ) { throw new ClassNotFoundException(relativeName1); } else { return type; } }
python
def validate_lang(ctx, param, lang): """Validation callback for the <lang> option. Ensures <lang> is a supported language unless the <nocheck> flag is set """ if ctx.params['nocheck']: return lang try: if lang not in tts_langs(): raise click.UsageError( "'%s' not in list of supported languages.\n" "Use --all to list languages or " "add --nocheck to disable language check." % lang) else: # The language is valid. # No need to let gTTS re-validate. ctx.params['nocheck'] = True except RuntimeError as e: # Only case where the <nocheck> flag can be False # Non-fatal. gTTS will try to re-validate. log.debug(str(e), exc_info=True) return lang
java
public Object invoke(MethodInvocation mi) throws Throwable { if (!Modifier.isPublic(mi.getMethod().getModifiers())) { return mi.proceed(); } Method searchMethod = mi.getMethod(); Object[] args = mi.getArguments(); String appid = AOPUtils.getFirstArgOfString(args); Method superMethod = null; Measured measuredAnno = null; try { superMethod = Search.class.getMethod(searchMethod.getName(), searchMethod.getParameterTypes()); measuredAnno = superMethod.getAnnotation(Measured.class); } catch (Exception e) { logger.error("Error in search AOP layer!", e); } Set<IOListener> ioListeners = Para.getSearchQueryListeners(); for (IOListener ioListener : ioListeners) { ioListener.onPreInvoke(superMethod, args); logger.debug("Executed {}.onPreInvoke().", ioListener.getClass().getName()); } Object result = null; if (measuredAnno != null) { result = invokeTimedSearch(appid, searchMethod, mi); } else { result = mi.proceed(); } for (IOListener ioListener : ioListeners) { ioListener.onPostInvoke(superMethod, result); logger.debug("Executed {}.onPostInvoke().", ioListener.getClass().getName()); } return result; }
python
def from_diff(diff, options=None, cwd=None): """Create a Radius object from a diff rather than a reposistory. """ return RadiusFromDiff(diff=diff, options=options, cwd=cwd)
python
def _geoid_radius(latitude: float) -> float: """Calculates the GEOID radius at a given latitude Parameters ---------- latitude : float Latitude (degrees) Returns ------- R : float GEOID Radius (meters) """ lat = deg2rad(latitude) return sqrt(1/(cos(lat) ** 2 / Rmax_WGS84 ** 2 + sin(lat) ** 2 / Rmin_WGS84 ** 2))
python
def _send_and_receive(self, target, lun, netfn, cmdid, payload): """Send and receive data using RMCP interface. target: lun: netfn: cmdid: raw_bytes: IPMI message payload as bytestring Returns the received data as array. """ self._inc_sequence_number() header = IpmbHeaderReq() header.netfn = netfn header.rs_lun = lun header.rs_sa = target.ipmb_address header.rq_seq = self.next_sequence_number header.rq_lun = 0 header.rq_sa = self.slave_address header.cmd_id = cmdid # Bridge message if target.routing: tx_data = encode_bridged_message(target.routing, header, payload, self.next_sequence_number) else: tx_data = encode_ipmb_msg(header, payload) self._send_ipmi_msg(tx_data) received = False while received is False: if not self._q.empty(): rx_data = self._q.get() else: rx_data = self._receive_ipmi_msg() if array('B', rx_data)[5] == constants.CMDID_SEND_MESSAGE: rx_data = decode_bridged_message(rx_data) received = rx_filter(header, rx_data) if not received: self._q.put(rx_data) return rx_data[6:-1]
python
def _add(self, uri, methods, handler, host=None, name=None): """Add a handler to the route list :param uri: path to match :param methods: sequence of accepted method names. If none are provided, any method is allowed :param handler: request handler function. When executed, it should provide a response object. :param name: user defined route name for url_for :return: Nothing """ if host is not None: if isinstance(host, str): uri = host + uri self.hosts.add(host) else: if not isinstance(host, Iterable): raise ValueError( "Expected either string or Iterable of " "host strings, not {!r}".format(host) ) for host_ in host: self.add(uri, methods, handler, host_, name) return # Dict for faster lookups of if method allowed if methods: methods = frozenset(methods) parameters = [] parameter_names = set() properties = {"unhashable": None} def add_parameter(match): name = match.group(1) name, _type, pattern = self.parse_parameter_string(name) if name in parameter_names: raise ParameterNameConflicts( "Multiple parameter named <{name}> " "in route uri {uri}".format(name=name, uri=uri) ) parameter_names.add(name) parameter = Parameter(name=name, cast=_type) parameters.append(parameter) # Mark the whole route as unhashable if it has the hash key in it if re.search(r"(^|[^^]){1}/", pattern): properties["unhashable"] = True # Mark the route as unhashable if it matches the hash key elif re.search(r"/", pattern): properties["unhashable"] = True return "({})".format(pattern) pattern_string = re.sub(self.parameter_pattern, add_parameter, uri) pattern = re.compile(r"^{}$".format(pattern_string)) def merge_route(route, methods, handler): # merge to the existing route when possible. if not route.methods or not methods: # method-unspecified routes are not mergeable. raise RouteExists("Route already registered: {}".format(uri)) elif route.methods.intersection(methods): # already existing method is not overloadable. duplicated = methods.intersection(route.methods) raise RouteExists( "Route already registered: {} [{}]".format( uri, ",".join(list(duplicated)) ) ) if isinstance(route.handler, CompositionView): view = route.handler else: view = CompositionView() view.add(route.methods, route.handler) view.add(methods, handler) route = route._replace( handler=view, methods=methods.union(route.methods) ) return route if parameters: # TODO: This is too complex, we need to reduce the complexity if properties["unhashable"]: routes_to_check = self.routes_always_check ndx, route = self.check_dynamic_route_exists( pattern, routes_to_check, parameters ) else: routes_to_check = self.routes_dynamic[url_hash(uri)] ndx, route = self.check_dynamic_route_exists( pattern, routes_to_check, parameters ) if ndx != -1: # Pop the ndx of the route, no dups of the same route routes_to_check.pop(ndx) else: route = self.routes_all.get(uri) # prefix the handler name with the blueprint name # if available # special prefix for static files is_static = False if name and name.startswith("_static_"): is_static = True name = name.split("_static_", 1)[-1] if hasattr(handler, "__blueprintname__"): handler_name = "{}.{}".format( handler.__blueprintname__, name or handler.__name__ ) else: handler_name = name or getattr(handler, "__name__", None) if route: route = merge_route(route, methods, handler) else: route = Route( handler=handler, methods=methods, pattern=pattern, parameters=parameters, name=handler_name, uri=uri, ) self.routes_all[uri] = route if is_static: pair = self.routes_static_files.get(handler_name) if not (pair and (pair[0] + "/" == uri or uri + "/" == pair[0])): self.routes_static_files[handler_name] = (uri, route) else: pair = self.routes_names.get(handler_name) if not (pair and (pair[0] + "/" == uri or uri + "/" == pair[0])): self.routes_names[handler_name] = (uri, route) if properties["unhashable"]: self.routes_always_check.append(route) elif parameters: self.routes_dynamic[url_hash(uri)].append(route) else: self.routes_static[uri] = route
java
public FacebookPage addInstagramUser(String username) { ResourceParams parameterSet = newResourceParams(); parameterSet.set("id", username); parameterSet.set("type", "instagram_user"); return this; }
java
@Override protected boolean initiateClient() { String message = null; for (String host : hosts) { vaildateHostPort(host, port); Configuration hadoopConf = new Configuration(); hadoopConf.set("hbase.master", host + ":" + port); conn = HBasePropertyReader.hsmd.getDataStore() != null ? HBasePropertyReader.hsmd.getDataStore() .getConnection() : null; if (conn != null && conn.getProperties() != null) { String zookeeperHost = conn.getProperties().getProperty("hbase.zookeeper.quorum").trim(); String zookeeperPort = conn.getProperties().getProperty("hbase.zookeeper.property.clientPort").trim(); vaildateHostPort(zookeeperHost, zookeeperPort); hadoopConf.set("hbase.zookeeper.quorum", zookeeperHost != null ? zookeeperHost : host); hadoopConf.set("hbase.zookeeper.property.clientPort", zookeeperPort != null ? zookeeperPort : DEFAULT_ZOOKEEPER_PORT); } else { hadoopConf.set("hbase.zookeeper.quorum", host); hadoopConf.set("hbase.zookeeper.property.clientPort", DEFAULT_ZOOKEEPER_PORT); } Configuration conf = HBaseConfiguration.create(hadoopConf); try { Connection connection = ConnectionFactory.createConnection(conf); admin = (HBaseAdmin) connection.getAdmin(); return true; } catch (MasterNotRunningException mnre) { message = mnre.getMessage(); logger.error("Master not running exception, Caused by:", mnre); } catch (ZooKeeperConnectionException zkce) { message = zkce.getMessage(); logger.error("Unable to connect to zookeeper, Caused by:", zkce); } catch (IOException ioe) { message = ioe.getMessage(); logger.error("I/O exception, Caused by:", ioe); } } throw new SchemaGenerationException("Master not running exception, Caused by:" + message); }
java
public void marshall(FacetAttributeDefinition facetAttributeDefinition, ProtocolMarshaller protocolMarshaller) { if (facetAttributeDefinition == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(facetAttributeDefinition.getType(), TYPE_BINDING); protocolMarshaller.marshall(facetAttributeDefinition.getDefaultValue(), DEFAULTVALUE_BINDING); protocolMarshaller.marshall(facetAttributeDefinition.getIsImmutable(), ISIMMUTABLE_BINDING); protocolMarshaller.marshall(facetAttributeDefinition.getRules(), RULES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def niggli_reduce(lattice, eps=1e-5): """Run Niggli reduction Args: lattice: Lattice parameters in the form of [[a_x, a_y, a_z], [b_x, b_y, b_z], [c_x, c_y, c_z]] eps: float: Tolerance to check if difference of norms of two basis vectors is close to zero or not and if two basis vectors are orthogonal by the value of dot product being close to zero or not. The detail is shown at https://atztogo.github.io/niggli/. Returns: if the Niggli reduction succeeded: Reduced lattice parameters are given as a numpy 'double' array: [[a_x, a_y, a_z], [b_x, b_y, b_z], [c_x, c_y, c_z]] otherwise None is returned. """ _set_no_error() niggli_lattice = np.array(np.transpose(lattice), dtype='double', order='C') result = spg.niggli_reduce(niggli_lattice, float(eps)) _set_error_message() if result == 0: return None else: return np.array(np.transpose(niggli_lattice), dtype='double', order='C')
python
def _filter_attrs(self, feature, request): """ Remove some attributes from the feature and set the geometry to None in the feature based ``attrs`` and the ``no_geom`` parameters. """ if 'attrs' in request.params: attrs = request.params['attrs'].split(',') props = feature.properties new_props = {} for name in attrs: if name in props: new_props[name] = props[name] feature.properties = new_props if asbool(request.params.get('no_geom', False)): feature.geometry = None return feature
python
def get_unset_cache(self): """return : returns a tuple (num_of_not_None_caches, [list of unset caches endpoint]) """ caches = [] if self._cached_api_global_response is None: caches.append('global') if self._cached_api_ticker_response is None: caches.append('ticker') return (len(caches), caches)
python
def get_active_modifiers(self): """ Get a list of active keys. Uses XQueryKeymap. :return: list of charcodemap_t instances """ keys = ctypes.pointer(charcodemap_t()) nkeys = ctypes.c_int(0) _libxdo.xdo_get_active_modifiers( self._xdo, ctypes.byref(keys), ctypes.byref(nkeys)) return [keys[i] for i in range(nkeys.value)]
java
public T basic(String user, String password){ connection.setRequestProperty("Authorization", "Basic " + toBase64((user + ":" + password).getBytes())); return (T) this; }
python
def connect_ensime_server(self): """Start initial connection with the server.""" self.log.debug('connect_ensime_server: in') server_v2 = isinstance(self, EnsimeClientV2) def disable_completely(e): if e: self.log.error('connection error: %s', e, exc_info=True) self.shutdown_server() self._display_ws_warning() if self.running and self.number_try_connection: self.number_try_connection -= 1 if not self.ensime_server: port = self.ensime.http_port() uri = "websocket" if server_v2 else "jerky" self.ensime_server = gconfig["ensime_server"].format(port, uri) with catch(websocket.WebSocketException, disable_completely): # Use the default timeout (no timeout). options = {"subprotocols": ["jerky"]} if server_v2 else {} options['enable_multithread'] = True self.log.debug("About to connect to %s with options %s", self.ensime_server, options) self.ws = websocket.create_connection(self.ensime_server, **options) if self.ws: self.send_request({"typehint": "ConnectionInfoReq"}) else: # If it hits this, number_try_connection is 0 disable_completely(None)
java
public <T> T[] toArray(T[] a) { final ReentrantLock lock = this.lock; lock.lock(); try { return q.toArray(a); } finally { lock.unlock(); } }
java
public Collection<WsGetGroupsResult> getGroupsForSubjectId(final String subjectId) { try { val groupsClient = new GcGetGroups().addSubjectId(subjectId); val results = groupsClient.execute().getResults(); if (results == null || results.length == 0) { LOGGER.warn("Subject id [{}] could not be located.", subjectId); return new ArrayList<>(0); } LOGGER.debug("Found [{}] groups for [{}]", results.length, subjectId); return CollectionUtils.wrapList(results); } catch (final Exception e) { LOGGER.warn("Grouper WS did not respond successfully. Ensure your credentials are correct " + ", the url endpoint for Grouper WS is correctly configured and the subject [{}] exists in Grouper.", subjectId, e); } return new ArrayList<>(0); }
java
public Set<Class<?>> findReferencedTypes(String typeName) { Set<Class<?>> referencedTypes = new HashSet<>(); // use the cached version if possible if (referencedTypesCache.containsKey(typeName)) { return referencedTypesCache.get(typeName); } try { CtClass cc = classPool.get(typeName); for (Object referencedType : cc.getRefClasses()) { String referencedTypeName = (String)referencedType; if (!isExcluded(referencedTypeName)) { try { referencedTypes.add(loadClass(referencedTypeName)); } catch (Throwable t) { log.debug("Could not find " + referencedTypeName + " ... ignoring."); } } } // remove the type itself referencedTypes.remove(loadClass(typeName)); } catch (Exception e) { log.debug("Error finding referenced types for " + typeName + " ... ignoring."); // since there was an error, we can't find the set of referenced types from it, so... referencedTypesCache.put(typeName, new HashSet<>()); } // cache for the next time referencedTypesCache.put(typeName, referencedTypes); return referencedTypes; }
python
def channel(self): """If no channel exists, a new one is requested.""" if not self._channel: self._channel_ref = weakref.ref(self.connection.get_channel()) return self._channel
java
public void setTarget( File target) { try { OutputStream targetStream = target==null? null : new FileOutputStream( target); setTarget( targetStream); } catch( Exception e) { throw new RuntimeException( "Can't create target stream", e); } }
python
def write_gif(dataset, filename, fps=10): """Write a NumPy array to GIF 89a format. Or write a list of NumPy arrays to an animation (GIF 89a format). - Positional arguments:: :param dataset: A NumPy arrayor list of arrays with shape rgb x rows x cols and integer values in [0, 255]. :param filename: The output file that will contain the GIF image. :param fps: The (integer) frames/second of the animation (default 10). :type dataset: a NumPy array or list of NumPy arrays. :return: None - Example: a minimal array, with one red pixel, would look like this:: import numpy as np one_red_pixel = np.array([[[255]], [[0]], [[0]]]) write_gif(one_red_pixel, 'red_pixel.gif') ..raises:: ValueError """ try: check_dataset(dataset) except ValueError as e: dataset = try_fix_dataset(dataset) check_dataset(dataset) delay_time = 100 // int(fps) def encode(d): four_d = isinstance(dataset, numpy.ndarray) and len(dataset.shape) == 4 if four_d or not isinstance(dataset, numpy.ndarray): return _make_animated_gif(d, delay_time=delay_time) else: return _make_gif(d) with open(filename, 'wb') as outfile: outfile.write(HEADER) for block in encode(dataset): outfile.write(block) outfile.write(TRAILER)
java
@Mode(TestMode.EXPERIMENTAL) @Test @AllowedFFDC // The tested exceptions cause FFDC so we have to allow for this. public void runFreshMasterBranchTck() throws Exception { File repoParent = new File(GIT_REPO_PARENT_DIR); File repo = new File(repoParent, GIT_REPO_NAME); MvnUtils.mvnCleanInstall(repo); HashMap<String, String> addedProps = new HashMap<String, String>(); String apiVersion = MvnUtils.getApiSpecVersionAfterClone(repo); System.out.println("Queried api.version is : " + apiVersion); addedProps.put(MvnUtils.API_VERSION, apiVersion); String tckVersion = MvnUtils.getTckVersionAfterClone(repo); System.out.println("Queried tck.version is : " + tckVersion); addedProps.put(MvnUtils.TCK_VERSION, tckVersion); // A command line -Dprop=value actually gets to here as a environment variable... String implVersion = System.getenv("impl.version"); System.out.println("Passed in impl.version is : " + implVersion); addedProps.put(MvnUtils.IMPL_VERSION, implVersion); // We store a set of keys that we want the system to add "1.1" or "1.2" etc to // depending on the pom.xml contents. HashSet<String> versionedLibraries = new HashSet<>(Arrays.asList("com.ibm.websphere.org.eclipse.microprofile.config")); String backStopImpl = "1.4"; // Used if there is no impl matching the spec/pom.xml <version> AND impl.version is not set addedProps.put(MvnUtils.BACKSTOP_VERSION, backStopImpl); MvnUtils.runTCKMvnCmd(server, "com.ibm.ws.microprofile.config_fat_tck", this .getClass() + ":launchConfigTCK", MvnUtils.DEFAULT_SUITE_FILENAME, addedProps, versionedLibraries); }
python
def is_valid_ipv6 (ip): """ Return True if given ip is a valid IPv6 address. """ # XXX this is not complete: check ipv6 and ipv4 semantics too here if not (_ipv6_re.match(ip) or _ipv6_ipv4_re.match(ip) or _ipv6_abbr_re.match(ip) or _ipv6_ipv4_abbr_re.match(ip)): return False return True
python
def effective_wavelength(self, binned=True, wavelengths=None, mode='efflerg'): """Calculate :ref:`effective wavelength <synphot-formula-effwave>`. Parameters ---------- binned : bool Sample data in native wavelengths if `False`. Else, sample binned data (default). wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None` Wavelength values for sampling. If not a Quantity, assumed to be in Angstrom. If `None`, ``self.waveset`` or `binset` is used, depending on ``binned``. mode : {'efflerg', 'efflphot'} Flux is first converted to the unit below before calculation: * 'efflerg' - FLAM * 'efflphot' - PHOTLAM (deprecated) Returns ------- eff_lam : `~astropy.units.quantity.Quantity` Observation effective wavelength. Raises ------ synphot.exceptions.SynphotError Invalid mode. """ mode = mode.lower() if mode == 'efflerg': flux_unit = units.FLAM elif mode == 'efflphot': warnings.warn( 'Usage of EFFLPHOT is deprecated.', AstropyDeprecationWarning) flux_unit = units.PHOTLAM else: raise exceptions.SynphotError( 'mode must be "efflerg" or "efflphot"') if binned: x = self._validate_binned_wavelengths(wavelengths).value y = self.sample_binned(wavelengths=x, flux_unit=flux_unit).value else: x = self._validate_wavelengths(wavelengths).value y = units.convert_flux(x, self(x), flux_unit).value num = np.trapz(y * x ** 2, x=x) den = np.trapz(y * x, x=x) if den == 0.0: # pragma: no cover eff_lam = 0.0 else: eff_lam = abs(num / den) return eff_lam * self._internal_wave_unit
java
void invokeAction(UrlInfo urlInfo, ResponseHelper responseHelper) throws ServletException { try { // May be there isn't any controller, so the page will be rendered // without calling any action if (urlInfo.getController() != null) { Strategy strategy = calculateStrategy(urlInfo.getController()); LOGGER.debug("Strategy: {}", strategy); Object result = null; try { switch (strategy) { case INHERIT: result = inheritedStrategy(urlInfo, responseHelper); break; default: result = signatureStrategy(urlInfo, responseHelper); break; } // Si hay un resultado, lo serializamos, así no lo tiene que hacer el // desarrollador en el método del controlador if (result != null) { responseHelper.serialize(result); } } catch (InvocationTargetException e) { responseHelper.serialize(e.getTargetException()); } } else { LOGGER.warn("There is not controller defined for url [{}]", urlInfo.getUrl()); } } catch (ClassNotFoundException e) { LOGGER.warn(e.getLocalizedMessage(), e); } catch (NoSuchMethodException e) { LOGGER.warn("NoSuchMethodException {}", e.getLocalizedMessage()); } catch (Exception e) { LOGGER.warn(e.getLocalizedMessage(), e); throw new ServletException(e); } }
java
public final <R extends Request<T>> R setShouldCache(boolean shouldCache) { checkIfActive(); this.shouldCache = shouldCache; return (R) this; }
python
def cli(env): """List options for creating Reserved Capacity""" manager = CapacityManager(env.client) items = manager.get_create_options() items.sort(key=lambda term: int(term['capacity'])) table = formatting.Table(["KeyName", "Description", "Term", "Default Hourly Price Per Instance"], title="Reserved Capacity Options") table.align["Hourly Price"] = "l" table.align["Description"] = "l" table.align["KeyName"] = "l" for item in items: table.add_row([ item['keyName'], item['description'], item['capacity'], get_price(item) ]) env.fout(table) regions = manager.get_available_routers() location_table = formatting.Table(['Location', 'POD', 'BackendRouterId'], 'Orderable Locations') for region in regions: for location in region['locations']: for pod in location['location']['pods']: location_table.add_row([region['keyname'], pod['backendRouterName'], pod['backendRouterId']]) env.fout(location_table)
python
def _quickLevels(self, data): """ Estimate the min/max values of *data* by subsampling. """ while data.size > 1e6: ax = np.argmax(data.shape) sl = [slice(None)] * data.ndim sl[ax] = slice(None, None, 2) data = data[sl] return self._levelsFromMedianAndStd(data)
python
def get_zname(self, var, coords=None): """Get the name of the z-dimension This method gives the name of the z-dimension (which is not necessarily the name of the coordinate if the variable has a coordinate attribute) Parameters ---------- var: xarray.Variables The variable to get the dimension for coords: dict The coordinates to use for checking the axis attribute. If None, they are not used Returns ------- str or None The coordinate name or None if no vertical coordinate could be found See Also -------- get_z""" if coords is not None: coord = self.get_variable_by_axis(var, 'z', coords) if coord is not None and coord.name in var.dims: return coord.name dimlist = list(self.z.intersection(var.dims)) if dimlist: if len(dimlist) > 1: warn("Found multiple matches for z coordinate in the variable:" "%s. I use %s" % (', '.join(dimlist), dimlist[0]), PsyPlotRuntimeWarning) return dimlist[0] # otherwise we return the coordinate in the third last position is_unstructured = self.is_unstructured(var) icheck = -2 if is_unstructured else -3 min_dim = abs(icheck) if 'variable' not in var.dims else abs(icheck-1) if var.ndim >= min_dim and var.dims[icheck] != self.get_tname( var, coords): return var.dims[icheck] return None
python
def Elasticsearch(*args, **kwargs): """Elasticsearch wrapper function Wrapper function around the official Elasticsearch class that adds a simple version check upon initialization. In particular it checks if the major version of the library in use match the one of the cluster that we are tring to interact with. The check can be skipped by setting to false the check_version parameter. #note: Boyska didn't like subclassing :) """ check_version = kwargs.pop('check_version', True) es = Elasticsearch_official(*args, **kwargs) if check_version: es_version = es.info()['version']['number'].split('.') if(int(es_version[0]) != int(es_pylib_version[0])): raise RuntimeError("The Elasticsearch python library version does not match the one of the running cluster: {} != {}. Please install the correct elasticsearch-py version".format(es_pylib_version[0], es_version[0])) return es
java
public String resolve(String key) { if (key.equals("HTTP_USER_AGENT")) { return request.getHeader("user-agent"); } else if (key.equals("HTTP_REFERER")) { return request.getHeader("referer"); } else if (key.equals("HTTP_COOKIE")) { return request.getHeader("cookie"); } else if (key.equals("HTTP_FORWARDED")) { return request.getHeader("forwarded"); } else if (key.equals("HTTP_HOST")) { String host = request.getHeader("host"); int index = (host != null) ? host.indexOf(':') : -1; if (index != -1) host = host.substring(0, index); return host; } else if (key.equals("HTTP_PROXY_CONNECTION")) { return request.getHeader("proxy-connection"); } else if (key.equals("HTTP_ACCEPT")) { return request.getHeader("accept"); } else if (key.equals("REMOTE_ADDR")) { return request.getRemoteAddr(); } else if (key.equals("REMOTE_HOST")) { return request.getRemoteHost(); } else if (key.equals("REMOTE_PORT")) { return String.valueOf(request.getRemotePort()); } else if (key.equals("REMOTE_USER")) { return request.getRemoteUser(); } else if (key.equals("REMOTE_IDENT")) { return request.getRemoteUser(); } else if (key.equals("REQUEST_METHOD")) { return request.getMethod(); } else if (key.equals("SCRIPT_FILENAME")) { return request.getRealPath(request.getServletPath()); } else if (key.equals("REQUEST_PATH")) { return servletRequestContext.getExchange().getRelativePath(); } else if (key.equals("CONTEXT_PATH")) { return request.getContextPath(); } else if (key.equals("SERVLET_PATH")) { return emptyStringIfNull(request.getServletPath()); } else if (key.equals("PATH_INFO")) { return emptyStringIfNull(request.getPathInfo()); } else if (key.equals("QUERY_STRING")) { return emptyStringIfNull(request.getQueryString()); } else if (key.equals("AUTH_TYPE")) { return request.getAuthType(); } else if (key.equals("DOCUMENT_ROOT")) { return request.getRealPath("/"); } else if (key.equals("SERVER_NAME")) { return request.getLocalName(); } else if (key.equals("SERVER_ADDR")) { return request.getLocalAddr(); } else if (key.equals("SERVER_PORT")) { return String.valueOf(request.getLocalPort()); } else if (key.equals("SERVER_PROTOCOL")) { return request.getProtocol(); } else if (key.equals("SERVER_SOFTWARE")) { return "tomcat"; } else if (key.equals("THE_REQUEST")) { return request.getMethod() + " " + request.getRequestURI() + " " + request.getProtocol(); } else if (key.equals("REQUEST_URI")) { return request.getRequestURI(); } else if (key.equals("REQUEST_FILENAME")) { return request.getPathTranslated(); } else if (key.equals("HTTPS")) { return request.isSecure() ? "on" : "off"; } else if (key.equals("TIME_YEAR")) { return String.valueOf(Calendar.getInstance().get(Calendar.YEAR)); } else if (key.equals("TIME_MON")) { return String.valueOf(Calendar.getInstance().get(Calendar.MONTH)); } else if (key.equals("TIME_DAY")) { return String.valueOf(Calendar.getInstance().get(Calendar.DAY_OF_MONTH)); } else if (key.equals("TIME_HOUR")) { return String.valueOf(Calendar.getInstance().get(Calendar.HOUR_OF_DAY)); } else if (key.equals("TIME_MIN")) { return String.valueOf(Calendar.getInstance().get(Calendar.MINUTE)); } else if (key.equals("TIME_SEC")) { return String.valueOf(Calendar.getInstance().get(Calendar.SECOND)); } else if (key.equals("TIME_WDAY")) { return String.valueOf(Calendar.getInstance().get(Calendar.DAY_OF_WEEK)); } else if (key.equals("TIME")) { return DateUtils.getCurrentDateTime(servletRequestContext.getExchange()); } return null; }
python
def remove_event_source(event_source, lambda_arn, target_function, boto_session, dry=False): """ Given an event_source dictionary, create the object and remove the event source. """ event_source_obj, ctx, funk = get_event_source(event_source, lambda_arn, target_function, boto_session, dry=False) # This is slightly dirty, but necessary for using Kappa this way. funk.arn = lambda_arn if not dry: rule_response = event_source_obj.remove(funk) return rule_response else: return event_source_obj
python
def delimit_words(self): """This method takes the existing encoded binary string and returns a binary string that will pad it such that the encoded string contains only full bytes. """ bits_short = 8 - (len(self.buffer.getvalue()) % 8) #The string already falls on an byte boundary do nothing if bits_short == 0 or bits_short == 8: return None else: return self.binary_string(0, bits_short)
java
public List<PersonGroup> list(ListPersonGroupsOptionalParameter listOptionalParameter) { return listWithServiceResponseAsync(listOptionalParameter).toBlocking().single().body(); }
java
public void handleNotification(Notification notification, Object obj) { // handle JMX connection status notification if (notification instanceof JMXConnectionNotification) { JMXConnectionNotification jmxcNotification = (JMXConnectionNotification) notification; if (jmxcNotification.getType().equals(JMXConnectionNotification.FAILED)) { logger.error( "JMX connection to " + componentName + " (" + jmxcNotification.getConnectionId() + ") has been lost !\n" + jmxcNotification.getMessage()); propertiesHistory.signalPossibleNotificationLoss(); } else if (jmxcNotification.getType().equals(JMXConnectionNotification.NOTIFS_LOST)) { logger.error("JMX connection to " + componentName + " (" + jmxcNotification.getConnectionId() + ") could have lost some notifications:\n" + jmxcNotification.getMessage()); /* if (jmxcNotification.getUserData() instanceof Long) { lastNotificationSequenceNumber += (Long)jmxcNotification.getUserData(); }*/ propertiesHistory.signalPossibleNotificationLoss(); } return; } // handle JMX attribute change notification if (!(notification instanceof AttributeChangeNotification)) { logger.error( "Received a JMX notification from " + componentName + " which is not of type AttributeChangeNotification (" + notification.getClass().getName() + ")"); return; } AttributeChangeNotification attributeChangeNotification = (AttributeChangeNotification) notification; Object source = attributeChangeNotification.getSource(); if (!(source instanceof String)) { // ignore AttributeChangeDetected sent by RequiredModelMBean#sendAttributeChangeNotification because sequence // number is always set to 1 if (source instanceof ObjectName && "AttributeChangeDetected".equals(attributeChangeNotification.getMessage())) { logger.warn("Ignoring JMX AttributeChangeDetected notification from " + componentName + " for attribute " + attributeChangeNotification.getAttributeName()); } else { logger.error( "JMX notification source from " + componentName + " is not of type String (" + source.getClass().getName() + ")"); } return; } // check that we didn't miss a notification long notificationSequenceNumber = notification.getSequenceNumber(); if (lastNotificationSequenceNumber != -1) { if (notificationSequenceNumber != (lastNotificationSequenceNumber + 1)) { logger.error("Missed a JMX notification from " + componentName + "! (Received sequence number " + notificationSequenceNumber + " while last was " + lastNotificationSequenceNumber + ")"); propertiesHistory.signalPossibleNotificationLoss(); } } lastNotificationSequenceNumber = notificationSequenceNumber; String message = attributeChangeNotification.getMessage(); int separatorIndex = message.indexOf(':'); String subComponent = ((separatorIndex != -1) ? message.substring(0, separatorIndex) : ""); String subComponentName = subComponentsMap.get(subComponent); if (subComponentName == null) { logger.error("Received JMX notification from " + componentName + " from unknown sub-component " + subComponent + " - message: " + attributeChangeNotification.getMessage()); return; } String attributeName = attributeChangeNotification.getAttributeName(); // remove "prefix." or "prefix:" if any int dotIndex = attributeName.lastIndexOf('.'); if (dotIndex < 0) { dotIndex = attributeName.lastIndexOf(':'); } if (dotIndex > 0) { if (dotIndex == attributeName.length() - 1) { logger.error("Received JMX notification from " + componentName + " for empty attribute name: " + attributeName); return; } attributeName = attributeName.substring(dotIndex + 1); } // rename "StateProperty" into "state" if (attributeName.equals("StateProperty")) { attributeName = "state"; } String propertyName = (subComponentName.isEmpty() ? "" : subComponentName.toLowerCase() + ".") + attributeName.toLowerCase(); String oldValue = String.valueOf(attributeChangeNotification.getOldValue()).toLowerCase(); String newValue = String.valueOf(attributeChangeNotification.getNewValue()).toLowerCase(); // hack to get "null" string if the String is empty ("") if (oldValue.isEmpty()) { oldValue = "null"; } if (newValue.isEmpty()) { newValue = "null"; } boolean checkOldValue = (attributeChangeNotification.getOldValue() != null); propertiesHistory.addChange(propertyName, oldValue, newValue, notificationSequenceNumber, checkOldValue); }
java
private static <E extends Element> List<E> findTopLevelElementsRecurse(Class<E> elementType, Node node, List<E> matches) { for(Element elem : node.getChildElements()) { if(elementType.isInstance(elem)) { // Found match if(matches == null) matches = new ArrayList<>(); matches.add(elementType.cast(elem)); } else { // Look further down the tree matches = findTopLevelElementsRecurse(elementType, elem, matches); } } return matches; }
java
@Override public List<Diagnostic> getDiagnostics(final int severity) { InputModel model = getComponentModel(); switch (severity) { case Diagnostic.ERROR: return model.errorDiagnostics; case Diagnostic.WARNING: return model.warningDiagnostics; case Diagnostic.INFO: return model.infoDiagnostics; case Diagnostic.SUCCESS: return model.successDiagnostics; default: return null; } }
python
def gaps(args): """ %prog gaps agpfile Print out the distribution of gapsizes. Option --merge allows merging of adjacent gaps which is used by tidy(). """ from jcvi.graphics.histogram import loghistogram p = OptionParser(gaps.__doc__) p.add_option("--merge", dest="merge", default=False, action="store_true", help="Merge adjacent gaps (to conform to AGP specification)") p.add_option("--header", default=False, action="store_true", help="Produce an AGP header [default: %default]") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) merge = opts.merge agpfile, = args if merge: merged_agpfile = agpfile.replace(".agp", ".merged.agp") fw = open(merged_agpfile, "w") agp = AGP(agpfile) sizes = [] data = [] # store merged AGPLine's priorities = ("centromere", "telomere", "scaffold", "contig", \ "clone", "fragment") for is_gap, alines in groupby(agp, key=lambda x: (x.object, x.is_gap)): alines = list(alines) is_gap = is_gap[1] if is_gap: gap_size = sum(x.gap_length for x in alines) gap_types = set(x.gap_type for x in alines) for gtype in ("centromere", "telomere"): if gtype in gap_types: gap_size = gtype sizes.append(gap_size) b = deepcopy(alines[0]) b.object_beg = min(x.object_beg for x in alines) b.object_end = max(x.object_end for x in alines) b.gap_length = sum(x.gap_length for x in alines) assert b.gap_length == b.object_end - b.object_beg + 1 b.component_type = 'U' if b.gap_length == 100 else 'N' gtypes = [x.gap_type for x in alines] for gtype in priorities: if gtype in gtypes: b.gap_type = gtype break linkages = [x.linkage for x in alines] for linkage in ("no", "yes"): if linkage in linkages: b.linkage = linkage break alines = [b] data.extend(alines) loghistogram(sizes) if opts.header: AGP.print_header(fw, organism="Medicago truncatula", taxid=3880, source="J. Craig Venter Institute") if merge: for ob, bb in groupby(data, lambda x: x.object): for i, b in enumerate(bb): b.part_number = i + 1 print(b, file=fw) return merged_agpfile
java
private synchronized void initializeAllFatClients() { updateCoordinatorMetadataWithLatestState(); // get All stores defined in the config file Map<String, Properties> storeClientConfigsMap = storeClientConfigs.getAllConfigsMap(); for(StoreDefinition storeDef: this.coordinatorMetadata.getStoresDefs()) { String storeName = storeDef.getName(); // Initialize only those stores defined in the client configs file if(storeClientConfigsMap.get(storeName) != null) { initializeFatClient(storeName, storeClientConfigsMap.get(storeName)); } } }
python
def download(*packages, **kwargs): ''' Download packages to the local disk. refresh force a refresh if set to True. If set to False (default) it depends on zypper if a refresh is executed. root operate on a different root directory. CLI example: .. code-block:: bash salt '*' pkg.download httpd salt '*' pkg.download httpd postfix ''' if not packages: raise SaltInvocationError('No packages specified') root = kwargs.get('root', None) refresh = kwargs.get('refresh', False) if refresh: refresh_db(root) pkg_ret = {} for dld_result in __zypper__(root=root).xml.call('download', *packages).getElementsByTagName("download-result"): repo = dld_result.getElementsByTagName("repository")[0] path = dld_result.getElementsByTagName("localfile")[0].getAttribute("path") pkg_info = { 'repository-name': repo.getAttribute('name'), 'repository-alias': repo.getAttribute('alias'), 'path': path, } key = _get_first_aggregate_text( dld_result.getElementsByTagName('name') ) if __salt__['lowpkg.checksum'](pkg_info['path'], root=root): pkg_ret[key] = pkg_info if pkg_ret: failed = [pkg for pkg in packages if pkg not in pkg_ret] if failed: pkg_ret['_error'] = ('The following package(s) failed to download: {0}'.format(', '.join(failed))) return pkg_ret raise CommandExecutionError( 'Unable to download packages: {0}'.format(', '.join(packages)) )
java
public ServiceCall<ValueCollection> listValues(ListValuesOptions listValuesOptions) { Validator.notNull(listValuesOptions, "listValuesOptions cannot be null"); String[] pathSegments = { "v1/workspaces", "entities", "values" }; String[] pathParameters = { listValuesOptions.workspaceId(), listValuesOptions.entity() }; RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments, pathParameters)); builder.query("version", versionDate); Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("conversation", "v1", "listValues"); for (Entry<String, String> header : sdkHeaders.entrySet()) { builder.header(header.getKey(), header.getValue()); } builder.header("Accept", "application/json"); if (listValuesOptions.export() != null) { builder.query("export", String.valueOf(listValuesOptions.export())); } if (listValuesOptions.pageLimit() != null) { builder.query("page_limit", String.valueOf(listValuesOptions.pageLimit())); } if (listValuesOptions.includeCount() != null) { builder.query("include_count", String.valueOf(listValuesOptions.includeCount())); } if (listValuesOptions.sort() != null) { builder.query("sort", listValuesOptions.sort()); } if (listValuesOptions.cursor() != null) { builder.query("cursor", listValuesOptions.cursor()); } if (listValuesOptions.includeAudit() != null) { builder.query("include_audit", String.valueOf(listValuesOptions.includeAudit())); } return createServiceCall(builder.build(), ResponseConverterUtils.getObject(ValueCollection.class)); }
python
def tags(self): """ :return: Returns tags from config and `JAEGER_TAGS` environment variable to use as process-wide tracer tags """ tags = self.config.get('tags', {}) env_tags = os.environ.get('JAEGER_TAGS', '') if env_tags: for kv in env_tags.split(','): key, value = kv.split('=') tags[key.strip()] = value.strip() return tags
java
public static SingleItemSketch create(final byte[] data) { if ((data == null) || (data.length == 0)) { return null; } return new SingleItemSketch(hash(data, DEFAULT_UPDATE_SEED)[0] >>> 1); }
java
public static StringExpression groupConcat(Expression<String> expr) { return Expressions.stringOperation(SQLOps.GROUP_CONCAT, expr); }
java
public void destroy(GSSCredential credential, DestroyParams params) throws MyProxyException { if (credential == null) { throw new IllegalArgumentException("credential == null"); } if (params == null) { throw new IllegalArgumentException("params == null"); } String msg = params.makeRequest(); Socket gsiSocket = null; OutputStream out = null; InputStream in = null; try { gsiSocket = getSocket(credential); out = gsiSocket.getOutputStream(); in = gsiSocket.getInputStream(); if (!((GssSocket)gsiSocket).getContext().getConfState()) throw new Exception("Confidentiality requested but not available"); // send message out.write(msg.getBytes()); out.flush(); if (logger.isDebugEnabled()) { logger.debug("Req sent:" + params); } handleReply(in); } catch(Exception e) { throw new MyProxyException("MyProxy destroy failed.", e); } finally { // close socket close(out, in, gsiSocket); } }
python
def snmp_server_agtconfig_location(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") snmp_server = ET.SubElement(config, "snmp-server", xmlns="urn:brocade.com:mgmt:brocade-snmp") agtconfig = ET.SubElement(snmp_server, "agtconfig") location = ET.SubElement(agtconfig, "location") location.text = kwargs.pop('location') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def btaddrtochars(addr): """ Takes a bluetooth address and returns a tuple with the corresponding char values. This can then be used to construct a IOBluetoothDevice object, providing the signature of the withAddress: selector has been set (as in _setpyobjcsignatures() in this module). For example: >>> chars = btaddrtochars("00:0e:0a:00:a2:00") >>> chars (0, 14, 10, 0, 162, 0) >>> device = _IOBluetooth.IOBluetoothDevice.withAddress_(chars) >>> type(device) <objective-c class IOBluetoothDevice at 0xa4024988> >>> device.getAddressString() u'00-0e-0a-00-a2-00' """ if not _lightbluecommon._isbtaddr(addr): raise TypeError("address %s not valid bluetooth address" % str(addr)) if addr.find(":") == -1: addr = addr.replace("-", ":") # consider alternative addr separator # unhexlify gives binary value like '\x0e', then ord to get the char value. # unhexlify throws TypeError if value is not a hex pair. import binascii chars = [ord(binascii.unhexlify(part)) for part in addr.split(":")] return tuple(chars)
python
def parse(cls, src, dist=None): """Parse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """ m = cls.pattern.match(src) if not m: msg = "EntryPoint must be in 'name=module:attrs [extras]' format" raise ValueError(msg, src) res = m.groupdict() extras = cls._parse_extras(res['extras']) attrs = res['attr'].split('.') if res['attr'] else () return cls(res['name'], res['module'], attrs, extras, dist)
java
@Override public com.liferay.commerce.product.type.virtual.model.CPDefinitionVirtualSetting createCPDefinitionVirtualSetting( long CPDefinitionVirtualSettingId) { return _cpDefinitionVirtualSettingLocalService.createCPDefinitionVirtualSetting(CPDefinitionVirtualSettingId); }
python
def ancovan(dv=None, covar=None, between=None, data=None, export_filename=None): """ANCOVA with n covariates. This is an internal function. The main call to this function should be done by the :py:func:`pingouin.ancova` function. Parameters ---------- dv : string Name of column containing the dependant variable. covar : string Name(s) of columns containing the covariates. between : string Name of column containing the between factor. data : pandas DataFrame DataFrame export_filename : string Filename (without extension) for the output file. If None, do not export the table. By default, the file will be created in the current python console directory. To change that, specify the filename with full path. Returns ------- aov : DataFrame ANCOVA summary :: 'Source' : Names of the factor considered 'SS' : Sums of squares 'DF' : Degrees of freedom 'F' : F-values 'p-unc' : Uncorrected p-values """ # Check that stasmodels is installed from pingouin.utils import _is_statsmodels_installed _is_statsmodels_installed(raise_error=True) from statsmodels.api import stats from statsmodels.formula.api import ols # Check that covariates are numeric ('float', 'int') assert all([data[covar[i]].dtype.kind in 'fi' for i in range(len(covar))]) # Fit ANCOVA model formula = dv + ' ~ C(' + between + ')' for c in covar: formula += ' + ' + c model = ols(formula, data=data).fit() aov = stats.anova_lm(model, typ=2).reset_index() aov.rename(columns={'index': 'Source', 'sum_sq': 'SS', 'df': 'DF', 'PR(>F)': 'p-unc'}, inplace=True) aov.loc[0, 'Source'] = between aov['DF'] = aov['DF'].astype(int) aov[['SS', 'F']] = aov[['SS', 'F']].round(3) # Export to .csv if export_filename is not None: _export_table(aov, export_filename) return aov
python
def logged_timer(message): "Context manager for timing snippets of code. Echos to logging module." tick = time.time() yield tock = time.time() logging.info("%s: %.3f seconds", message, (tock - tick))
python
def _log_diff_memory_data(self, prefix, new_memory_data, old_memory_data): """ Computes and logs the difference in memory utilization between the given old and new memory data. """ def _vmem_used(memory_data): return memory_data['machine_data'].used def _process_mem_percent(memory_data): return memory_data['process_data']['memory_percent'] def _process_rss(memory_data): return memory_data['process_data']['memory_info'].rss def _process_vms(memory_data): return memory_data['process_data']['memory_info'].vms if new_memory_data and old_memory_data: log.info( u"%s Diff Vmem used: %s, Diff percent memory: %s, Diff rss: %s, Diff vms: %s", prefix, _vmem_used(new_memory_data) - _vmem_used(old_memory_data), _process_mem_percent(new_memory_data) - _process_mem_percent(old_memory_data), _process_rss(new_memory_data) - _process_rss(old_memory_data), _process_vms(new_memory_data) - _process_vms(old_memory_data), )
python
def calc_evpo_v1(self): """Calculate land use and month specific values of potential evapotranspiration. Required control parameters: |NHRU| |Lnk| |FLn| Required derived parameter: |MOY| Required flux sequence: |ET0| Calculated flux sequence: |EvPo| Additional requirements: |Model.idx_sim| Basic equation: :math:`EvPo = FLn \\cdot ET0` Example: For clarity, this is more of a kind of an integration example. Parameter |FLn| both depends on time (the actual month) and space (the actual land use). Firstly, let us define a initialization time period spanning the transition from June to July: >>> from hydpy import pub >>> pub.timegrids = '30.06.2000', '02.07.2000', '1d' Secondly, assume that the considered subbasin is differenciated in two HRUs, one of primarily consisting of arable land and the other one of deciduous forests: >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> nhru(2) >>> lnk(ACKER, LAUBW) Thirdly, set the |FLn| values, one for the relevant months and land use classes: >>> fln.acker_jun = 1.299 >>> fln.acker_jul = 1.304 >>> fln.laubw_jun = 1.350 >>> fln.laubw_jul = 1.365 Fourthly, the index array connecting the simulation time steps defined above and the month indexes (0...11) can be retrieved from the |pub| module. This can be done manually more conveniently via its update method: >>> derived.moy.update() >>> derived.moy moy(5, 6) Finally, the actual method (with its simple equation) is applied as usual: >>> fluxes.et0 = 2.0 >>> model.idx_sim = 0 >>> model.calc_evpo_v1() >>> fluxes.evpo evpo(2.598, 2.7) >>> model.idx_sim = 1 >>> model.calc_evpo_v1() >>> fluxes.evpo evpo(2.608, 2.73) Reset module |pub| to not interfere the following examples: >>> del pub.timegrids """ con = self.parameters.control.fastaccess der = self.parameters.derived.fastaccess flu = self.sequences.fluxes.fastaccess for k in range(con.nhru): flu.evpo[k] = con.fln[con.lnk[k]-1, der.moy[self.idx_sim]] * flu.et0[k]
python
def visitTypeDirective(self, ctx: jsgParser.TypeDirectiveContext): """ directive: '.TYPE' name typeExceptions? SEMI """ self._context.directives.append('_CONTEXT.TYPE = "{}"'.format(as_token(ctx.name()))) self._context.has_typeid = True self.visitChildren(ctx)
java
public static NavigationAnimation create(MapPresenter mapPresenter, Trajectory trajectory, int millis) { return new NavigationAnimationImpl(mapPresenter.getViewPort(), mapPresenter.getEventBus(), trajectory, millis); }
java
private void replayLogs(Collection<TransactionLog> logs) { for (TransactionLog log : logs) { LOG.info("Replaying edits from transaction log " + log.getName()); int editCnt = 0; try { TransactionLogReader reader = log.getReader(); // reader may be null in the case of an empty file if (reader == null) { continue; } TransactionEdit edit = null; while ((edit = reader.next()) != null) { editCnt++; switch (edit.getState()) { case INPROGRESS: long expiration = edit.getExpiration(); TransactionType type = edit.getType(); // Check if transaction needs to be migrated to have expiration and type. Previous version of // long running transactions were represented with expiration time as -1. // This can be removed when we stop supporting TransactionEditCodecV2. if (expiration < 0) { expiration = getTxExpirationFromWritePointer(edit.getWritePointer(), defaultLongTimeout); type = TransactionType.LONG; } else if (type == null) { type = TransactionType.SHORT; } addInProgressAndAdvance(edit.getWritePointer(), edit.getVisibilityUpperBound(), expiration, type); break; case COMMITTING: addCommittingChangeSet(edit.getWritePointer(), edit.getChanges()); break; case COMMITTED: // TODO: need to reconcile usage of transaction id v/s write pointer TEPHRA-140 long transactionId = edit.getWritePointer(); long[] checkpointPointers = edit.getCheckpointPointers(); long writePointer = checkpointPointers == null || checkpointPointers.length == 0 ? transactionId : checkpointPointers[checkpointPointers.length - 1]; doCommit(transactionId, writePointer, edit.getChanges(), edit.getCommitPointer(), edit.getCanCommit()); break; case INVALID: doInvalidate(edit.getWritePointer()); break; case ABORTED: type = edit.getType(); // Check if transaction edit needs to be migrated to have type. Previous versions of // ABORTED edits did not contain type. // This can be removed when we stop supporting TransactionEditCodecV2. if (type == null) { InProgressTx inProgressTx = inProgress.get(edit.getWritePointer()); if (inProgressTx != null) { type = inProgressTx.getType(); } else { // If transaction is not in-progress, then it has either been already aborted or invalidated. // We cannot determine the transaction's state based on current information, to be safe invalidate it. LOG.warn("Invalidating transaction {} as it's type cannot be determined during replay", edit.getWritePointer()); doInvalidate(edit.getWritePointer()); break; } } doAbort(edit.getWritePointer(), edit.getCheckpointPointers(), type); break; case TRUNCATE_INVALID_TX: if (edit.getTruncateInvalidTxTime() != 0) { doTruncateInvalidTxBefore(edit.getTruncateInvalidTxTime()); } else { doTruncateInvalidTx(edit.getTruncateInvalidTx()); } break; case CHECKPOINT: doCheckpoint(edit.getWritePointer(), edit.getParentWritePointer()); break; default: // unknown type! throw new IllegalArgumentException("Invalid state for WAL entry: " + edit.getState()); } } } catch (IOException ioe) { throw Throwables.propagate(ioe); } catch (InvalidTruncateTimeException e) { throw Throwables.propagate(e); } LOG.info("Read " + editCnt + " edits from log " + log.getName()); } }
python
def get_load(jid): ''' Return the load associated with a given job id ''' conn, mdb = _get_conn(ret=None) return mdb.jobs.find_one({'jid': jid}, {'_id': 0})
java
public void marshall(Category category, ProtocolMarshaller protocolMarshaller) { if (category == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(category.getCategoryId(), CATEGORYID_BINDING); protocolMarshaller.marshall(category.getCategoryName(), CATEGORYNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }