language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def validate(self, class_, tag, contents): """ Ensures that the class and tag specified exist as an alternative :param class_: The integer class_ from the encoded value header :param tag: The integer tag from the encoded value header :param contents: A byte string of the contents of the value - used when the object is explicitly tagged :raises: ValueError - when value is not a valid alternative """ id_ = (class_, tag) if self.explicit is not None: if self.explicit[-1] != id_: raise ValueError(unwrap( ''' %s was explicitly tagged, but the value provided does not match the class and tag ''', type_name(self) )) ((class_, _, tag, _, _, _), _) = _parse(contents, len(contents)) id_ = (class_, tag) if id_ in self._id_map: self._choice = self._id_map[id_] return # This means the Choice was implicitly tagged if self.class_ is not None and self.tag is not None: if len(self._alternatives) > 1: raise ValueError(unwrap( ''' %s was implicitly tagged, but more than one alternative exists ''', type_name(self) )) if id_ == (self.class_, self.tag): self._choice = 0 return asn1 = self._format_class_tag(class_, tag) asn1s = [self._format_class_tag(pair[0], pair[1]) for pair in self._id_map] raise ValueError(unwrap( ''' Value %s did not match the class and tag of any of the alternatives in %s: %s ''', asn1, type_name(self), ', '.join(asn1s) ))
python
def local_run(): """Whether we should hit GCS dev appserver stub.""" server_software = os.environ.get('SERVER_SOFTWARE') if server_software is None: return True if 'remote_api' in server_software: return False if server_software.startswith(('Development', 'testutil')): return True return False
python
def add_data_point(self, x, y): """Adds a data point to the series. :param x: The numerical x value to be added. :param y: The numerical y value to be added.""" if not is_numeric(x): raise TypeError("x value must be numeric, not '%s'" % str(x)) if not is_numeric(y): raise TypeError("y value must be numeric, not '%s'" % str(y)) current_last_x = self._data[-1][0] self._data.append((x, y)) if x < current_last_x: self._data = sorted(self._data, key=lambda k: k[0])
python
def GetMemReservationMB(self): '''Retrieves the minimum amount of memory that is reserved for the virtual machine. For information about setting a memory reservation, see "Limits and Reservations" on page 14.''' counter = c_uint() ret = vmGuestLib.VMGuestLib_GetMemReservationMB(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret) return counter.value
java
private void reportFailedCheckpoint(FailedCheckpointStats failed) { statsReadWriteLock.lock(); try { counts.incrementFailedCheckpoints(); history.replacePendingCheckpointById(failed); dirty = true; } finally { statsReadWriteLock.unlock(); } }
java
public void marshall(EndpointConfiguration endpointConfiguration, ProtocolMarshaller protocolMarshaller) { if (endpointConfiguration == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(endpointConfiguration.getEndpointId(), ENDPOINTID_BINDING); protocolMarshaller.marshall(endpointConfiguration.getWeight(), WEIGHT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def for_executor(cls, executor: Optional[Executor]) -> 'Subsystem': """Return a subsystem based on the given executor. If ``executor`` is None, use :mod:`asyncio`. If ``executor`` is a :class:`concurrent.futures.ThreadPoolExecutor`, use :mod:`threading`. Args: executor: The executor in use, if any. """ if isinstance(executor, ThreadPoolExecutor): return _ThreadingSubsystem(executor) elif executor is None: return _AsyncioSubsystem() else: raise TypeError(executor)
java
public Pager<Namespace> findNamespaces(String query, int itemsPerPage) throws GitLabApiException { GitLabApiForm formData = new GitLabApiForm().withParam("search", query, true); return (new Pager<Namespace>(this, Namespace.class, itemsPerPage, formData.asMap(), "namespaces")); }
java
@Override public void doInit() throws IOException, RepositoryException { QueryHandlerContext context = getContext(); setPath(context.getIndexDirectory()); if (path == null) { throw new IOException("SearchIndex requires 'path' parameter in configuration!"); } final File indexDirectory; if (path != null) { indexDirectory = new File(path); if (!indexDirectory.exists()) { if (!indexDirectory.mkdirs()) { throw new RepositoryException("fail to create index dir " + path); } } } else { throw new IOException("SearchIndex requires 'path' parameter in configuration!"); } log.info("Index created: " + path); extractor = context.getExtractor(); synProvider = createSynonymProvider(); directoryManager = createDirectoryManager(); if (context.getParentHandler() instanceof SearchIndex) { // use system namespace mappings SearchIndex sysIndex = (SearchIndex)context.getParentHandler(); nsMappings = sysIndex.getNamespaceMappings(); } else { // read local namespace mappings File mapFile = new File(indexDirectory, NS_MAPPING_FILE); if (mapFile.exists()) { // be backward compatible and use ns_mappings.properties from // index folder nsMappings = new FileBasedNamespaceMappings(mapFile); } else { // otherwise use repository wide stable index prefix from // namespace registry nsMappings = new NSRegistryBasedNamespaceMappings(context.getNamespaceRegistry()); } } scs = new SharedFieldComparatorSource(FieldNames.PROPERTIES, context.getItemStateManager(), nsMappings); sics = new SharedFieldInsensitiveComparatorSource(FieldNames.PROPERTIES, context.getItemStateManager(), nsMappings); npResolver = new LocationFactory(nsMappings); indexingConfig = createIndexingConfiguration(nsMappings); analyzer.setIndexingConfig(indexingConfig); MultiIndex index = new MultiIndex(this, context.getIndexingTree(), modeHandler, getIndexInfos(), getIndexUpdateMonitor()); //register the default index this.indexRegister = new IndexRegister(index); // if RW mode, create initial index and start check if (modeHandler.getMode() == IndexerIoMode.READ_WRITE) { // set true if indexRecoveryFilters are required and some filter gives positive flag final boolean doReindexing = (index.numDocs() == 0 && context.isCreateInitialIndex()); // if existing index should be removed final boolean doForceReindexing = (context.isRecoveryFilterUsed() && isIndexRecoveryRequired()); final boolean doCheck = (consistencyCheckEnabled && (index.getRedoLogApplied() || forceConsistencyCheck)); final ItemDataConsumer itemStateManager = context.getItemStateManager(); if (isAsyncReindexing() && doReindexing) { log.info("Launching reindexing in asynchronous mode."); new Thread(new Runnable() { public void run() { try { reindex(doReindexing, doForceReindexing, doCheck, itemStateManager); } catch (IOException e) { log.error( "Error while reindexing the workspace. Please fix the problem, delete index and restart server.", e); } } }, "Reindexing-" + context.getRepositoryName() + "-" + context.getContainer().getWorkspaceName()).start(); } else { reindex(doReindexing, doForceReindexing, doCheck, itemStateManager); } } // initialize spell checker spellChecker = createSpellChecker(); log.info("Index initialized: {} Version: {}", new Object[]{path, index.getIndexFormatVersion()}); if (!index.getIndexFormatVersion().equals(getIndexFormatVersion())) { log.warn("Using Version {} for reading. Please re-index version " + "storage for optimal performance.", new Integer(getIndexFormatVersion().getVersion())); } this.errorLog = doInitErrorLog(path); // reprocess any notfinished notifies; if (modeHandler.getMode() == IndexerIoMode.READ_WRITE) { recoverErrorLog(errorLog); } modeHandler.addIndexerIoModeListener(this); if (indexingLoadBatchingThresholdDynamic && (indexingLoadBatchingThresholdProperty > -1 || indexingLoadBatchingThresholdNode > -1)) { // The load batching has been enabled and configured in dynamic mode // We first check if the statistics have been enabled if (JDBCWorkspaceDataContainer.STATISTICS_ENABLED) { // The statistics are enabled so we can initialize the variables if (indexingLoadBatchingThresholdTTL <= 0) indexingLoadBatchingThresholdTTL = DEFAULT_INDEXING_LOAD_BATCHING_THRESHOLD_TTL; log.debug("The statistics have been enabled so the load batching " + "thresholds will be modified dynamically"); if (TIMER == null) { synchronized (SearchIndex.class) { if (TIMER == null) { TIMER = new Timer("SearchIndex Update Load Batching Thresholds Timer", true); } } } scheduleUpdateLoadBatchingThresholdTask(); } else { log.debug("The statistics have not been enabled so the load batching " + "thresholds won't be modified dynamically"); } } indexingLoadPropertyByName = indexingLoadBatchingThresholdProperty > -1; }
java
public String getFirst(String name) { HeaderName hn = getHeaderName(name); return getFirst(hn); }
python
def convert_convolution1d(builder, layer, input_names, output_names, keras_layer): """ Convert convolution layer from keras to coreml. Parameters ---------- keras_layer: layer A keras layer object. builder: NeuralNetworkBuilder A neural network builder object. """ # Get input and output names input_name, output_name = (input_names[0], output_names[0]) has_bias = keras_layer.use_bias # Get the weights from _keras. # Keras stores convolution weights as list of numpy arrays weightList = keras_layer.get_weights() output_shape = list(filter(None, keras_layer.output_shape))[:-1] # Parameter filter_length, input_dim, n_filters = weightList[0].shape stride_width = keras_layer.strides if type(keras_layer.strides) is int \ else keras_layer.strides[0] # Weights and bias terms W = _np.expand_dims(weightList[0],axis=0) b = weightList[1] if has_bias else None dilations = [1,1] if (type(keras_layer.dilation_rate) is list) or \ (type(keras_layer.dilation_rate) is tuple): dilations = [1, keras_layer.dilation_rate[0]] else: dilations = [1, keras_layer.dilation_rate] keras_padding = keras_layer.padding if keras_padding == 'causal': builder.add_padding(name = layer + '__causal_pad__', left = filter_length-1, right=0, top=0, bottom=0, value= 0, input_name = input_name, output_name= input_name + '__causal_pad__') input_name = input_name + '__causal_pad__' keras_padding = 'valid' builder.add_convolution(name = layer, kernel_channels = input_dim, output_channels = n_filters, height = 1, width = filter_length, stride_height = 1, stride_width = stride_width, border_mode = keras_padding, groups = 1, W = W, b = b, has_bias = has_bias, is_deconv = False, output_shape = output_shape, input_name = input_name, output_name = output_name, dilation_factors = dilations)
java
public static boolean isReservedVariable( String variableName ){ return ReservedVariables.NOW.equalsIgnoreCase( variableName ) || ReservedVariables.WORKFLOW_INSTANCE_ID.equalsIgnoreCase( variableName ); }
python
def _get_interpreter_info(interpreter=None): """Return the interpreter's full path using pythonX.Y format.""" if interpreter is None: # If interpreter is None by default returns the current interpreter data. major, minor = sys.version_info[:2] executable = sys.executable else: args = [interpreter, '-c', SHOW_VERSION_CMD] try: requested_interpreter_info = logged_exec(args) except Exception as error: logger.error("Error getting requested interpreter version: %s", error) raise FadesError("Could not get interpreter version") requested_interpreter_info = json.loads(requested_interpreter_info[0]) executable = requested_interpreter_info['path'] major = requested_interpreter_info['major'] minor = requested_interpreter_info['minor'] if executable[-1].isdigit(): executable = executable.split(".")[0][:-1] interpreter = "{}{}.{}".format(executable, major, minor) return interpreter
java
@SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case AfplibPackage.MCFRG__RG_LENGTH: setRGLength((Integer)newValue); return; case AfplibPackage.MCFRG__TRIPLETS: getTriplets().clear(); getTriplets().addAll((Collection<? extends Triplet>)newValue); return; } super.eSet(featureID, newValue); }
java
protected void addModifiers(MemberDoc member, Content htmltree) { String mod = modifierString(member); // According to JLS, we should not be showing public modifier for // interface methods. if ((member.isField() || member.isMethod()) && writer instanceof ClassWriterImpl && ((ClassWriterImpl) writer).getClassDoc().isInterface()) { // This check for isDefault() and the default modifier needs to be // added for it to appear on the method details section. Once the // default modifier is added to the Modifier list on DocEnv and once // it is updated to use the javax.lang.model.element.Modifier, we // will need to remove this. mod = (member.isMethod() && ((MethodDoc)member).isDefault()) ? Util.replaceText(mod, "public", "default").trim() : Util.replaceText(mod, "public", "").trim(); } if(mod.length() > 0) { htmltree.addContent(mod); htmltree.addContent(writer.getSpace()); } }
python
def load_MACHO(macho_id): """lightcurve of 2 bands (R, B) from the MACHO survey. Notes ----- The files are gathered from the original FATS project tutorial: https://github.com/isadoranun/tsfeat """ tarfname = "{}.tar.bz2".format(macho_id) tarpath = os.path.join(DATA_PATH, tarfname) rpath = "{}.R.mjd".format(macho_id) bpath = "{}.B.mjd".format(macho_id) with tarfile.open(tarpath, mode="r:bz2") as tf: rlc = np.loadtxt(tf.extractfile(rpath)) blc = np.loadtxt(tf.extractfile(bpath)) bands = ("R", "B") data = { "R": { "time": rlc[:, 0], "magnitude": rlc[:, 1], "error": rlc[:, 2]}, "B": { "time": blc[:, 0], "magnitude": blc[:, 1], "error": blc[:, 2]} } descr = ("The files are gathered from the original FATS project " "tutorial: https://github.com/isadoranun/tsfeat") return Data( id=macho_id, metadata=None, ds_name="MACHO", description=descr, bands=bands, data=data)
java
public Boolean getBooleanProperty(String key, Boolean defaultValue) { String val = getProperty(key, defaultValue.toString()); Boolean booleanVal = Boolean.parseBoolean(val); return booleanVal; }
java
public void setNetworkInterfaceIds(java.util.Collection<String> networkInterfaceIds) { if (networkInterfaceIds == null) { this.networkInterfaceIds = null; return; } this.networkInterfaceIds = new java.util.ArrayList<String>(networkInterfaceIds); }
python
def recv_msg(self): '''message receive routine''' if self._index >= self._count: return None m = self._msgs[self._index] type = m.get_type() self._index += 1 self.percent = (100.0 * self._index) / self._count self.messages[type] = m self._timestamp = m._timestamp if self._flightmode_index < len(self._flightmodes): (mode, tstamp, t2) = self._flightmodes[self._flightmode_index] if m._timestamp >= tstamp: self.flightmode = mode self._flightmode_index += 1 self.check_param(m) return m
python
def _connected(self, link_uri): """ This callback is called form the Crazyflie API when a Crazyflie has been connected and the TOCs have been downloaded.""" print('Connected to %s' % link_uri) self._cf.packet_received.add_callback(self._got_packet)
python
def transformer_moe_base(): """Set of hyperparameters.""" hparams = common_hparams.basic_params1() hparams.norm_type = "layer" hparams.hidden_size = 512 hparams.batch_size = 4096 hparams.max_length = 2001 hparams.max_input_seq_length = 2000 hparams.max_target_seq_length = 2000 hparams.dropout = 0.0 hparams.clip_grad_norm = 0. # i.e. no gradient clipping hparams.optimizer_adam_epsilon = 1e-9 hparams.learning_rate_decay_scheme = "noam" hparams.learning_rate = 0.1 hparams.learning_rate_warmup_steps = 2000 hparams.initializer_gain = 1.0 hparams.num_hidden_layers = 5 hparams.initializer = "uniform_unit_scaling" hparams.weight_decay = 0.0 hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.98 hparams.num_sampled_classes = 0 hparams.label_smoothing = 0.0 hparams.shared_embedding_and_softmax_weights = True # According to noam, ("n", "da") seems better for harder-to-learn models hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" # Hparams used by transformer_prepare_decoder() function hparams.add_hparam("pos", "timing") # timing, none hparams.add_hparam("proximity_bias", False) hparams.add_hparam("causal_decoder_self_attention", True) hparams = common_attention.add_standard_attention_hparams(hparams) # Decoder layers type. If set, num_decoder_layers parameter will be ignored # and the number of decoder layer will be deduced from the string # See top file comment for example of usage hparams.add_hparam("layer_types", "") # Default attention type (ex: a, loc, red,...) and feed-forward type (ex: fc, # sep, moe,...) hparams.add_hparam("default_att", "a") hparams.add_hparam("default_ff", "fc") return hparams
python
def Nu_cylinder_Perkins_Leppert_1962(Re, Pr, mu=None, muw=None): r'''Calculates Nusselt number for crossflow across a single tube as shown in [1]_ at a specified `Re` and `Pr`, both evaluated at the free stream temperature. Recommends a viscosity exponent correction of 0.25, which is applied only if provided. Also shown in [2]_. .. math:: Nu = \left[0.30Re^{0.5} + 0.10Re^{0.67}\right]Pr^{0.4} \left(\frac{\mu}{\mu_w}\right)^{0.25} Parameters ---------- Re : float Reynolds number with respect to cylinder diameter, [-] Pr : float Prandtl number at free stream temperature, [-] mu : float, optional Viscosity of fluid at the free stream temperature [Pa*s] muw : float, optional Viscosity of fluid at the wall temperature [Pa*s] Returns ------- Nu : float Nusselt number with respect to cylinder diameter, [-] Notes ----- Considered results with Re from 40 to 1E5, Pr from 1 to 300; and viscosity ratios of 0.25 to 4. Examples -------- >>> Nu_cylinder_Perkins_Leppert_1962(6071, 0.7) 49.97164291175499 References ---------- .. [1] Perkins, Jr., H. C., and G. Leppert. "Forced Convection Heat Transfer From a Uniformly Heated Cylinder." Journal of Heat Transfer 84, no. 3 (August 1, 1962): 257-261. doi:10.1115/1.3684359. .. [2] Sanitjai, S., and R. J. Goldstein. "Forced Convection Heat Transfer from a Circular Cylinder in Crossflow to Air and Liquids." International Journal of Heat and Mass Transfer 47, no. 22 (October 2004): 4795-4805. doi:10.1016/j.ijheatmasstransfer.2004.05.012. ''' Nu = (0.30*Re**0.5 + 0.10*Re**0.67)*Pr**0.4 if mu and muw: Nu *= (mu/muw)**0.25 return Nu
java
public static List<Class<?>> convertArgumentClassesToPrimitives( Class<?>... arguments ) { if (arguments == null || arguments.length == 0) return Collections.emptyList(); List<Class<?>> result = new ArrayList<Class<?>>(arguments.length); for (Class<?> clazz : arguments) { if (clazz == Boolean.class) clazz = Boolean.TYPE; else if (clazz == Character.class) clazz = Character.TYPE; else if (clazz == Byte.class) clazz = Byte.TYPE; else if (clazz == Short.class) clazz = Short.TYPE; else if (clazz == Integer.class) clazz = Integer.TYPE; else if (clazz == Long.class) clazz = Long.TYPE; else if (clazz == Float.class) clazz = Float.TYPE; else if (clazz == Double.class) clazz = Double.TYPE; else if (clazz == Void.class) clazz = Void.TYPE; result.add(clazz); } return result; }
java
public static int[] getHeaderToViewPosition(JTextArea view, String header, int start, int end) { validateView(view); validateHeader(header); validateStartEnd(start, end); if (!isValidStartEndForLength(start, end, header.length())) { return INVALID_POSITION; } int excessChars = 0; int pos = 0; while ((pos = header.indexOf(HttpHeader.CRLF, pos)) != -1 && pos < start) { pos += 2; ++excessChars; } int len = view.getDocument().getLength(); int finalStartPos = start - excessChars; if (finalStartPos > len) { return INVALID_POSITION; } if (pos != -1) { while ((pos = header.indexOf(HttpHeader.CRLF, pos)) != -1 && pos < end) { pos += 2; ++excessChars; } } int finalEndPos = end - excessChars; if (finalEndPos > len) { return INVALID_POSITION; } return new int[] { finalStartPos, finalEndPos }; }
java
public void run(String[] args) { System.out.println("SampleStarter.run(String[])"); System.out.println("- args.length: " + args.length); for (String arg : args) System.out.println(" - " + arg); System.out.println(this); }
python
def threeD_seismplot(stations, nodes, size=(10.5, 7.5), **kwargs): """ Plot seismicity and stations in a 3D, movable, zoomable space. Uses matplotlibs Axes3D package. :type stations: list :param stations: list of one tuple per station of (lat, long, elevation), \ with up positive. :type nodes: list :param nodes: list of one tuple per event of (lat, long, depth) with down \ positive. :type size: tuple :param size: Size of figure in inches. :returns: :class:`matplotlib.figure.Figure` .. Note:: See :func:`eqcorrscan.utils.plotting.obspy_3d_plot` for example output. """ import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D stalats, stalongs, staelevs = zip(*stations) evlats, evlongs, evdepths = zip(*nodes) # Cope with +/-180 latitudes... _evlongs = [] for evlong in evlongs: if evlong < 0: evlong = float(evlong) evlong += 360 _evlongs.append(evlong) evlongs = _evlongs _stalongs = [] for stalong in stalongs: if stalong < 0: stalong = float(stalong) stalong += 360 _stalongs.append(stalong) stalongs = _stalongs evdepths = [-1 * depth for depth in evdepths] fig = plt.figure(figsize=size) ax = Axes3D(fig) ax.scatter(evlats, evlongs, evdepths, marker="x", c="k", label='Hypocenters') ax.scatter(stalats, stalongs, staelevs, marker="v", c="r", label='Stations') ax.set_ylabel("Longitude (deg)") ax.set_xlabel("Latitude (deg)") ax.set_zlabel("Elevation (km)") ax.get_xaxis().get_major_formatter().set_scientific(False) ax.get_yaxis().get_major_formatter().set_scientific(False) plt.legend() fig = _finalise_figure(fig=fig, **kwargs) # pragma: no cover return fig
java
private static void addDeleteDirective( Element compViewNode, String elementID, IPerson person, Document plf, Element delSet) throws PortalException { String ID = null; try { ID = getDLS().getNextStructDirectiveId(person); } catch (Exception e) { throw new PortalException( "Exception encountered while " + "generating new delete node " + "Id for userId=" + person.getID(), e); } Element delete = plf.createElement(Constants.ELM_DELETE); delete.setAttribute(Constants.ATT_TYPE, Constants.ELM_DELETE); delete.setAttribute(Constants.ATT_ID, ID); delete.setAttributeNS(Constants.NS_URI, Constants.ATT_NAME, elementID); delSet.appendChild(delete); // now pass through children and add delete directives for those with // IDs indicating that they were incorporated Element child = (Element) compViewNode.getFirstChild(); while (child != null) { String childID = child.getAttribute("ID"); if (childID.startsWith(Constants.FRAGMENT_ID_USER_PREFIX)) addDeleteDirective(child, childID, person, plf, delSet); child = (Element) child.getNextSibling(); } }
python
def fix_line_range(source_code, start, end, options): """Apply autopep8 (and docformatter) between the lines start and end of source.""" # TODO confirm behaviour outside range (indexing starts at 1) start = max(start, 1) options.line_range = [start, end] from autopep8 import fix_code fixed = fix_code(source_code, options) try: if options.docformatter: from docformatter import format_code fixed = format_code( fixed, summary_wrap_length=options.max_line_length - 1, description_wrap_length=(options.max_line_length - 2 * options.indent_size), pre_summary_newline=options.pre_summary_newline, post_description_blank=options.post_description_blank, force_wrap=options.force_wrap, line_range=[start, end]) except AttributeError: # e.g. using autopep8.parse_args, pragma: no cover pass return fixed
python
def cmd(self, cmd_name): """ Returns tarantool queue command name for current tube. """ return "{0}.tube.{1}:{2}".format(self.queue.lua_queue_name, self.name, cmd_name)
java
private JCheckBox getChkParseRobotsTxt() { if (parseRobotsTxt == null) { parseRobotsTxt = new JCheckBox(); parseRobotsTxt.setText(Constant.messages.getString("spider.options.label.robotstxt")); } return parseRobotsTxt; }
python
def kosaraju(graph): """Strongly connected components by Kosaraju :param graph: directed graph in listlist format, cannot be listdict :returns: list of lists for each component :complexity: linear """ n = len(graph) order = [] sccp = [] kosaraju_dfs(graph, range(n), order, []) kosaraju_dfs(reverse(graph), order[::-1], [], sccp) return sccp[::-1]
java
public Packer inset(final Insets insets) { gc.insets = insets; setConstraints(comp, gc); return this; }
python
def setAttributeType(self, namespaceURI, localName): '''set xsi:type Keyword arguments: namespaceURI -- namespace of attribute value localName -- name of new attribute value ''' self.logger.debug('setAttributeType: (%s,%s)', namespaceURI, localName) value = localName if namespaceURI: value = '%s:%s' %(self.getPrefix(namespaceURI),localName) xsi_prefix = self.getPrefix(self._xsi_nsuri) self._setAttributeNS(self._xsi_nsuri, '%s:type' %xsi_prefix, value)
python
def _send_update_port_statuses(self, port_ids, status): """Sends update notifications to set the operational status of the list of router ports provided. To make each notification doesn't exceed the RPC length, each message contains a maximum of MAX_PORTS_IN_BATCH port ids. :param port_ids: List of ports to update the status :param status: operational status to update (ex: bc.constants.PORT_STATUS_ACTIVE) """ if not port_ids: return MAX_PORTS_IN_BATCH = 50 list_chunks_ports = [port_ids[i:i + MAX_PORTS_IN_BATCH] for i in six.moves.range(0, len(port_ids), MAX_PORTS_IN_BATCH)] for chunk_ports in list_chunks_ports: self.plugin_rpc.send_update_port_statuses(self.context, chunk_ports, status)
java
public void marshall(GetProtectionStatusRequest getProtectionStatusRequest, ProtocolMarshaller protocolMarshaller) { if (getProtectionStatusRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getProtectionStatusRequest.getPolicyId(), POLICYID_BINDING); protocolMarshaller.marshall(getProtectionStatusRequest.getMemberAccountId(), MEMBERACCOUNTID_BINDING); protocolMarshaller.marshall(getProtectionStatusRequest.getStartTime(), STARTTIME_BINDING); protocolMarshaller.marshall(getProtectionStatusRequest.getEndTime(), ENDTIME_BINDING); protocolMarshaller.marshall(getProtectionStatusRequest.getNextToken(), NEXTTOKEN_BINDING); protocolMarshaller.marshall(getProtectionStatusRequest.getMaxResults(), MAXRESULTS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
@Override public DescribeHostReservationsResult describeHostReservations(DescribeHostReservationsRequest request) { request = beforeClientExecution(request); return executeDescribeHostReservations(request); }
python
def parse_0134_013b(v): """ Parses the O2 Sensor Value (0134 - 013B) and returns two values parsed from it: 1. Fuel-Air Equivalence [Ratio] as a float from 0 - 2 2. Current in [mA] as a float from -128 - 128 :param str v: :return tuple of float, float: """ try: trim_val = trim_obd_value(v) val_ab = int(trim_val[0:2], 16) val_cd = int(trim_val[2:4], 16) return (2 / 65536) * val_ab, val_cd - 128 except ValueError: return None, None
java
public static Tracing getTracing(String serviceName, CurrentTraceContext context) { Tracing tracing = Tracing.current(); if (tracing == null) { // TODO reporter based on prop/config tracing = getBuilder(serviceName, context).build(); } return tracing; }
python
def check(self, url_data): """Check content for invalid anchors.""" headers = [] for name, value in url_data.headers.items(): if name.startswith(self.prefixes): headers.append(name) if headers: items = [u"%s=%s" % (name.capitalize(), url_data.headers[name]) for name in headers] info = u"HTTP headers %s" % u", ".join(items) url_data.add_info(info)
java
public static <T extends User, V extends EditorWithErrorHandling<?, ?>, // M extends LoginMessages, H extends HttpMessages> LoginCallback<T, V, M, H> buildLoginCallback( final V pview, final Session psession, final M ploginErrorMessage) { return new LoginCallback<>(pview, psession, ploginErrorMessage); }
java
public HMap removeAll(HMap hMap) { return alter(t -> t.keySet().removeAll(hMap.table.keySet())); }
python
def cross_v3(vec_a, vec_b): """Return the crossproduct between vec_a and vec_b.""" return Vec3(vec_a.y * vec_b.z - vec_a.z * vec_b.y, vec_a.z * vec_b.x - vec_a.x * vec_b.z, vec_a.x * vec_b.y - vec_a.y * vec_b.x)
python
def python_file_with_version(self): """Return Python filename with ``__version__`` marker, if configured. Enable this by adding a ``python-file-with-version`` option:: [zest.releaser] python-file-with-version = reinout/maurits.py Return None when nothing has been configured. """ default = None if self.config is None: return default try: result = self.config.get('zest.releaser', 'python-file-with-version') except (NoSectionError, NoOptionError, ValueError): return default return result
java
public String processModification(Properties attributes) throws XDocletException { String name = attributes.getProperty(ATTRIBUTE_NAME); Properties mods = _curClassDef.getModification(name); String key; String value; if (mods == null) { mods = new Properties(); _curClassDef.addModification(name, mods); } attributes.remove(ATTRIBUTE_NAME); for (Enumeration en = attributes.keys(); en.hasMoreElements();) { key = (String)en.nextElement(); value = attributes.getProperty(key); mods.setProperty(key, value); } return ""; }
java
public long skip(long numToSkip) throws IOException { // REVIEW // This is horribly inefficient, but it ensures that we // properly skip over bytes via the TarBuffer... // byte[] skipBuf = new byte[8 * 1024]; long skip = numToSkip; while (skip > 0) { int realSkip = (int) (skip > skipBuf.length ? skipBuf.length : skip); int numRead = this.read(skipBuf, 0, realSkip); if (numRead == -1) { break; } skip -= numRead; } return (numToSkip - skip); }
python
def authenticate(username, password): """ Returns: a dict with: pk: the pk of the user token: dict containing all the data from the api (access_token, refresh_token, expires_at etc.) user_data: dict containing user data such as first_name, last_name etc. if the authentication succeeds Raises Unauthorized if the authentication fails """ session = MoJOAuth2Session( client=LegacyApplicationClient( client_id=settings.API_CLIENT_ID ) ) token = session.fetch_token( token_url=get_request_token_url(), username=username, password=password, auth=HTTPBasicAuth(settings.API_CLIENT_ID, settings.API_CLIENT_SECRET), timeout=15, encoding='utf-8' ) user_data = session.get('/users/{username}/'.format(username=username)).json() return { 'pk': user_data.get('pk'), 'token': token, 'user_data': user_data }
java
public String printColumnTypes(FixedWidthReadOptions options) throws IOException { Table structure = read(options, true).structure(); return getTypeString(structure); }
java
private static InputStreamReader decompressWithBZip2( final String archivePath) throws ConfigurationException { Bzip2Archiver archiver = new Bzip2Archiver(); InputStreamReader reader = null; try { reader = archiver.getDecompressionStream(archivePath, WIKIPEDIA_ENCODING); } catch (IOException e) { e.printStackTrace(); } return reader; }
java
public static boolean injectionNeeded() { // first check whether we need to modify ObjectInputStream boolean debugEnabled = PreMainUtil.isDebugEnabled(); InputStream is = String.class.getResourceAsStream("/java/io/ObjectInputStream.class"); if (is == null) { if (debugEnabled) { System.out.println("Could not locate /java/io/ObjectInputStream.class as a resource"); } } else { try { ClassReader cr = new ClassReader(is); final Set<String> fieldsToLookFor = new HashSet<String>(asList(FACTORY_FIELD, VALIDATOR_FIELD)); if (debugEnabled) { System.out.println("Searching ObjectInputStream.class bytes for fields: " + fieldsToLookFor); } cr.accept(new ClassVisitor(Opcodes.ASM7) { @Override public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) { if (PreMainUtil.isDebugEnabled()) { System.out.println("Found field '" + name + "' with description '" + desc + "'"); } fieldsToLookFor.remove(name); return null; } }, 0); if (fieldsToLookFor.isEmpty()) { if (debugEnabled) { System.out.println("Found all fields already in ObjectInputStream.class"); } return false; } } catch (IOException e) { if (debugEnabled) { System.out.println("Could not read /java/io/ObjectInputStream.class as a resource"); } } } return true; }
java
@Override public ResultSet getCrossReference(String parentCatalog, String parentSchema, String parentTable, String foreignCatalog, String foreignSchema, String foreignTable) throws SQLException { checkClosed(); throw SQLError.noSupport(); }
java
public MonetaryRounding getRounding(RoundingQuery query) { Collection<MonetaryRounding> roundings = getRoundings(query); if (roundings.isEmpty()) { return null; } return roundings.iterator().next(); }
python
def validate_multiindex(self, obj): """validate that we can store the multi-index; reset and return the new object """ levels = [l if l is not None else "level_{0}".format(i) for i, l in enumerate(obj.index.names)] try: return obj.reset_index(), levels except ValueError: raise ValueError("duplicate names/columns in the multi-index when " "storing as a table")
java
private void addSingleProjectToProjects(ProjectsDetails projectsDetails, String projectName, ProjectsDetails projects) { if (projectsDetails == null || projects == null || projectName == null) { logger.debug("projectsDetails {} , projects {} , projectName {}", projectsDetails, projectName, projects); return; } if (projectsDetails.getProjects().size() == 1) { String projectVersion = config.getRequest().getProjectVersion(); AgentProjectInfo projectInfo = projectsDetails.getProjects().stream().findFirst().get(); projectInfo.setCoordinates(new Coordinates(null, projectName, projectVersion)); LinkedList<ViaComponents> viaComponents = projectsDetails.getProjectToViaComponents().get(projectInfo); projects.getProjectToViaComponents().put(projectInfo, viaComponents); } else { for (AgentProjectInfo projectInfo : projectsDetails.getProjects()) { logger.debug("Project not added - {}", projectInfo); } } }
java
@Deprecated public synchronized void sendJobEndingMessageToClient(final Optional<Throwable> exception) { if (!this.isClosing()) { LOG.log(Level.SEVERE, "Sending message in a state different that SHUTTING_DOWN or FAILING. " + "This is likely a illegal call to clock.close() at play. Current state: {0}", this.driverStatus); } if (this.driverTerminationHasBeenCommunicatedToClient) { LOG.log(Level.SEVERE, ".sendJobEndingMessageToClient() called twice. Ignoring the second call"); return; } // Log the shutdown situation if (this.shutdownCause.isPresent()) { LOG.log(Level.WARNING, "Sending message about an unclean driver shutdown.", this.shutdownCause.get()); } if (exception.isPresent()) { LOG.log(Level.WARNING, "There was an exception during clock.close().", exception.get()); } if (this.shutdownCause.isPresent() && exception.isPresent()) { LOG.log(Level.WARNING, "The driver is shutdown because of an exception (see above) and there was " + "an exception during clock.close(). Only the first exception will be sent to the client"); } // Send the earlier exception, if there was one. Otherwise, send the exception passed. this.clientConnection.send(getJobEndingMessage( this.shutdownCause.isPresent() ? this.shutdownCause : exception)); this.driverTerminationHasBeenCommunicatedToClient = true; }
java
private static Node replaceReturns( Node block, String resultName, String labelName, boolean resultMustBeSet) { checkNotNull(block); checkNotNull(labelName); Node root = block; boolean hasReturnAtExit = false; int returnCount = NodeUtil.getNodeTypeReferenceCount( block, Token.RETURN, new NodeUtil.MatchShallowStatement()); if (returnCount > 0) { hasReturnAtExit = hasReturnAtExit(block); // TODO(johnlenz): Simpler not to special case this, // and let it be optimized later. if (hasReturnAtExit) { convertLastReturnToStatement(block, resultName); returnCount--; } if (returnCount > 0) { // A label and breaks are needed. // Add the breaks replaceReturnWithBreak(block, null, resultName, labelName); // Add label Node name = IR.labelName(labelName).srcref(block); Node label = IR.label(name, block).srcref(block); Node newRoot = IR.block().srcref(block); newRoot.addChildToBack(label); // The label is now the root. root = newRoot; } } // If there wasn't an return at the end of the function block, and we need // a result, add one to the block. if (resultMustBeSet && !hasReturnAtExit && resultName != null) { addDummyAssignment(block, resultName); } return root; }
java
public static boolean delete(File file) { if (file == null) { return false; } else if (!file.isFile()) { return false; } return file.delete(); }
python
def predict(self, X, num_batch=None, return_data=False, reset=True): """Run the prediction, always only use one device. Parameters ---------- X : mxnet.DataIter num_batch : int or None The number of batch to run. Go though all batches if ``None``. Returns ------- y : numpy.ndarray or a list of numpy.ndarray if the network has multiple outputs. The predicted value of the output. """ X = self._init_iter(X, None, is_train=False) if reset: X.reset() data_shapes = X.provide_data data_names = [x[0] for x in data_shapes] type_dict = dict((key, value.dtype) for (key, value) in self.arg_params.items()) for x in X.provide_data: if isinstance(x, DataDesc): type_dict[x.name] = x.dtype else: type_dict[x[0]] = mx_real_t self._init_predictor(data_shapes, type_dict) batch_size = X.batch_size data_arrays = [self._pred_exec.arg_dict[name] for name in data_names] output_list = [[] for _ in range(len(self._pred_exec.outputs))] if return_data: data_list = [[] for _ in X.provide_data] label_list = [[] for _ in X.provide_label] i = 0 for batch in X: _load_data(batch, data_arrays) self._pred_exec.forward(is_train=False) padded = batch.pad real_size = batch_size - padded for o_list, o_nd in zip(output_list, self._pred_exec.outputs): o_list.append(o_nd[0:real_size].asnumpy()) if return_data: for j, x in enumerate(batch.data): data_list[j].append(x[0:real_size].asnumpy()) for j, x in enumerate(batch.label): label_list[j].append(x[0:real_size].asnumpy()) i += 1 if num_batch is not None and i == num_batch: break outputs = [np.concatenate(x) for x in output_list] if len(outputs) == 1: outputs = outputs[0] if return_data: data = [np.concatenate(x) for x in data_list] label = [np.concatenate(x) for x in label_list] if len(data) == 1: data = data[0] if len(label) == 1: label = label[0] return outputs, data, label else: return outputs
python
def image_url(self, pixel_size=None): """ Get the URL for the user icon in the desired pixel size, if it exists. If no size is supplied, give the URL for the full-size image. """ if "profile" not in self._raw: return profile = self._raw["profile"] if (pixel_size): img_key = "image_%s" % pixel_size if img_key in profile: return profile[img_key] return profile[self._DEFAULT_IMAGE_KEY]
java
@Override public boolean isEqual(String data1, String data2) { return LangUtils.isEqual(data1, data2); }
python
def find(self, instance_id): """ find an instance Create a new instance and populate it with data stored if it exists. Args: instance_id (str): UUID of the instance Returns: AtlasServiceInstance.Instance: An instance """ instance = AtlasServiceInstance.Instance(instance_id, self.backend) self.backend.storage.populate(instance) return instance
python
def _shapeletOutput(self, r, phi, beta, shapelets): """ returns the the numerical values of a set of shapelets at polar coordinates :param shapelets: set of shapelets [l=,r=,a_lr=] :type shapelets: array of size (n,3) :param coordPolar: set of coordinates in polar units :type coordPolar: array of size (n,2) :returns: array of same size with coords [r,phi] :raises: AttributeError, KeyError """ if type(r) == float or type(r) == int or type(r) == type(np.float64(1)) or len(r) <= 1: values = 0. else: values = np.zeros(len(r), 'complex') for nl in range(0,len(shapelets)): #sum over different shapelets for nr in range(0,len(shapelets)): value = shapelets[nl][nr]*self._chi_lr(r, phi, nl, nr, beta) values += value return values.real
java
protected void requestAttributeChange (String name, Object value, Object oldValue) { requestAttributeChange(name, value, oldValue, Transport.DEFAULT); }
python
def format_permission_object(self, permissions): """Formats a list of permission key names into something the SLAPI will respect. :param list permissions: A list of SLAPI permissions keyNames. keyName of ALL will return all permissions. :returns: list of dictionaries that can be sent to the api to add or remove permissions :throws SoftLayerError: If any permission is invalid this exception will be thrown. """ pretty_permissions = [] available_permissions = self.get_all_permissions() # pp(available_permissions) for permission in permissions: # Handle data retrieved directly from the API if isinstance(permission, dict): permission = permission['keyName'] permission = permission.upper() if permission == 'ALL': return available_permissions # Search through available_permissions to make sure what the user entered was valid if _keyname_search(available_permissions, permission): pretty_permissions.append({'keyName': permission}) else: raise exceptions.SoftLayerError("'%s' is not a valid permission" % permission) return pretty_permissions
python
def get_module_can_publish(cursor, id): """Return userids allowed to publish this book.""" cursor.execute(""" SELECT DISTINCT user_id FROM document_acl WHERE uuid = %s AND permission = 'publish'""", (id,)) return [i[0] for i in cursor.fetchall()]
python
def op(self, i, o): """ Tries to update the registers values with the given instruction. """ for ii in range(len(o)): if is_register(o[ii]): o[ii] = o[ii].lower() if i == 'ld': self.set(o[0], o[1]) return if i == 'push': if valnum(self.regs['sp']): self.set('sp', (self.getv(self.regs['sp']) - 2) % 0xFFFF) else: self.set('sp', None) self.stack.append(self.regs[o[0]]) return if i == 'pop': self.set(o[0], self.stack and self.stack.pop() or None) if valnum(self.regs['sp']): self.set('sp', (self.getv(self.regs['sp']) + 2) % 0xFFFF) else: self.set('sp', None) return if i == 'inc': self.inc(o[0]) return if i == 'dec': self.dec(o[0]) return if i == 'rra': self.rr('a') return if i == 'rla': self.rl('a') return if i == 'rlca': self.rlc('a') return if i == 'rrca': self.rrc('a') return if i == 'rr': self.rr(o[0]) return if i == 'rl': self.rl(o[0]) return if i == 'exx': tmp = self.regs['bc'] self.set('bc', "bc'") self.set("bc'", tmp) tmp = self.regs['de'] self.set('de', "de'") self.set("de'", tmp) tmp = self.regs['hl'] self.set('hl', "hl'") self.set("hl'", tmp) return if i == 'ex': tmp = self.get(o[1]) self.set(o[1], o[0]) self.set(o[0], tmp) return if i == 'xor': self.C = 0 if o[0] == 'a': self.set('a', 0) self.Z = 1 return if self.getv('a') is None or self.getv(o[0]) is None: self.Z = None self.set('a', None) return self.set('a', self.getv('a') ^ self.getv(o[0])) self.Z = int(self.get('a') == 0) return if i in ('or', 'and'): self.C = 0 if self.getv('a') is None or self.getv(o[0]) is None: self.Z = None self.set('a', None) return if i == 'or': self.set('a', self.getv('a') | self.getv(o[0])) else: self.set('a', self.getv('a') & self.getv(o[0])) self.Z = int(self.get('a') == 0) return if i in ('adc', 'sbc'): if len(o) == 1: o = ['a', o[0]] if self.C is None: self.set(o[0], 'None') self.Z = None self.set(o[0], None) return if i == 'sbc' and o[0] == o[1]: self.Z = int(not self.C) self.set(o[0], -self.C) return if self.getv(o[0]) is None or self.getv(o[1]) is None: self.set_flag(None) self.set(o[0], None) return if i == 'adc': val = self.getv(o[0]) + self.getv(o[1]) + self.C if is_8bit_register(o[0]): self.C = int(val > 0xFF) else: self.C = int(val > 0xFFFF) self.set(o[0], val) return val = self.getv(o[0]) - self.getv(o[1]) - self.C self.C = int(val < 0) self.Z = int(val == 0) self.set(o[0], val) return if i in ('add', 'sub'): if len(o) == 1: o = ['a', o[0]] if i == 'sub' and o[0] == o[1]: self.Z = 1 self.C = 0 self.set(o[0], 0) return if not is_number(self.get(o[0])) or not is_number(self.get(o[1])) is None: self.set_flag(None) self.set(o[0], None) return if i == 'add': val = self.getv(o[0]) + self.getv(o[1]) if is_8bit_register(o[0]): self.C = int(val > 0xFF) val &= 0xFF self.Z = int(val == 0) self.S = val >> 7 else: self.C = int(val > 0xFFFF) val &= 0xFFFF self.set(o[0], val) return val = self.getv(o[0]) - self.getv(o[1]) if is_8bit_register(o[0]): self.C = int(val < 0) val &= 0xFF self.Z = int(val == 0) self.S = val >> 7 else: self.C = int(val < 0) val &= 0xFFFF self.set(o[0], val) return if i == 'neg': if self.getv('a') is None: self.set_flag(None) return val = -self.getv('a') self.set('a', val) self.Z = int(not val) val &= 0xFF self.S = val >> 7 return if i == 'scf': self.C = 1 return if i == 'ccf': if self.C is not None: self.C = int(not self.C) return if i == 'cpl': if self.getv('a') is None: return self.set('a', 0xFF ^ self.getv('a')) return # Unknown. Resets ALL self.reset()
java
protected void registerEditorsWithSubClasses(PropertyEditor editor, Class<? extends BioPAXElement> domain) { for (Class<? extends BioPAXElement> c : classToEditorMap.keySet()) { if (domain.isAssignableFrom(c)) { //workaround for participants - can be replaced w/ a general // annotation based system. For the time being, I am just handling it //as a special case if ((editor.getProperty().equals("PARTICIPANTS") && (conversion.class.isAssignableFrom(c) || control.class.isAssignableFrom(c))) || (editor.getProperty().equals("participant") && (Conversion.class.isAssignableFrom(c) || Control.class.isAssignableFrom(c)))) { if (log.isDebugEnabled()) { log.debug("skipping restricted participant property"); } } else { classToEditorMap.get(c).put(editor.getProperty(), editor); } } } if (editor instanceof ObjectPropertyEditor) { registerInverseEditors((ObjectPropertyEditor) editor); } }
java
public Delete createDelete(URI id, String tableName) throws IOException { Delete del = null; Object tTable = getTable(tableName); if (tTable != null) { del = new Delete(Bytes.toBytes(id.toString())); } return del; }
java
private static AccessType getAccessTypeInCaseElementIsRoot(TypeElement searchedElement, Context context) { List<? extends Element> myMembers = searchedElement.getEnclosedElements(); for ( Element subElement : myMembers ) { List<? extends AnnotationMirror> entityAnnotations = context.getElementUtils().getAllAnnotationMirrors( subElement ); for ( Object entityAnnotation : entityAnnotations ) { AnnotationMirror annotationMirror = (AnnotationMirror) entityAnnotation; if ( isIdAnnotation( annotationMirror ) ) { return getAccessTypeOfIdAnnotation( subElement ); } } } return null; }
python
def fobj_to_tempfile(f, suffix=''): """Context manager which copies a file object to disk and return its name. When done the file is deleted. """ with tempfile.NamedTemporaryFile( dir=TEMPDIR, suffix=suffix, delete=False) as t: shutil.copyfileobj(f, t) try: yield t.name finally: os.remove(t.name)
python
def plot_overlaps(otus, group_otus, group_colors, out_fp, fig_size=None, title="", filter_common=False): """ Given a list of OTUs and a number of groups containing subsets of the OTU set, plot a presence/absence bar chart showing which species belong to which groups. :type otus: list :param otus: A list of OTU identifiers (names or otherwise) ordered by greatest presence across the groups, i.e. those that come first appear in all the groups, next come the OTUs that appear in n-1 groups, etc... :type group_otus: OrderedDict :param group_otus: A dictionary of OTU identifiers (subset of otus) keyed on group name (for display purposes) in display order (bottom to top). :type group_colors: dict :param group_colors: Color assignment for each group. """ def sort_order_group(sp): """ Assign a score (for use with sorting) to each OTU based on the number of groups they occur in and the order within those groups (order priority as set by group_otus). """ count = 0 rank = 0 in_prev = True max_penalty = len(group_otus) for i, grp in enumerate(group_otus): if sp in group_otus[grp]: count += 1 if in_prev: rank += 1 else: rank -= max_penalty - i in_prev = False return count, rank if filter_common: otus = [otu for otu in otus if sort_order_group(otu)[0] < len(group_otus)] otus = sorted(otus, key=sort_order_group, reverse=True) #TODO: fill shared_table during the double loop below and add arg to enable output to file shared_table = [merge_dicts({grp: None for grp in group_otus},{"OTU": otu_id}) for otu_id in otus] fig, ax = plt.subplots(figsize=fig_size) ax.xaxis.set_major_locator(MaxNLocator(nbins=len(otus), integer=True)) # rectangle prototype modified for each plot marker base = [(0,0),(0,0.5),(0,0.5),(0,0)] y_step = 1 x_step = 2 bars = [] bar_colors = [] for i, grp in enumerate(group_otus): for j, otu in enumerate(otus): if otu in group_otus[grp]: bars.append(translate(base, j*x_step+0.5, i*y_step)) bar_colors.append(group_colors[grp]) black = (0,0,0,1) collection = PolyCollection( verts=bars, facecolors = bar_colors, edgecolors = (black,), linewidths = (1,), transOffset = ax.transData, zorder=3 ) ax.add_collection(collection) # ax.legend([plt.Rectangle((0, 0), 1, 1, fc=color) for color in group_colors.values()], # group_colors.keys(), loc="best") # Title axttl = ax.title axttl.set_position([.5, 1.05]) ax.set_title(title, {"fontsize": fontsize*1.5, "fontweight": "bold"}) plt.xticks(range(1, len(otus)*x_step, x_step), otus, rotation="vertical") plt.yticks([i-0.75 for i in range(1, len(group_otus)*y_step+1, y_step)], group_otus.keys(), rotation="horizontal") ax.margins(0.05) ax.yaxis.set_visible(True) ax.set_xlim((0, len(otus)*x_step)) # save or display result if out_fp: plt.savefig(out_fp, facecolors="0.9", edgecolor="none", bbox_inches="tight", pad_inches=0.1) else: plt.show()
java
public static Config load(final ClassLoader loader) { return ConfigImpl.computeCachedConfig(loader, "load", new Callable<Config>() { @Override public Config call() { return loadDefaultConfig(loader); } }); }
java
public final void addFields(Document document, CellName cellName) { String serializedKey = ByteBufferUtils.toString(cellName.toByteBuffer()); Field field = new StringField(FIELD_NAME, serializedKey, Field.Store.YES); document.add(field); }
python
def ASHRAE_k(ID): r'''Returns thermal conductivity of a building or insulating material from a table in [1]_. Thermal conductivity is independent of temperature here. Many entries in the table are listed for varying densities, but the appropriate ID from the table must be selected to account for that. Parameters ---------- ID : str ID corresponding to a material in the dictionary `ASHRAE` Returns ------- k : float Thermal conductivity of the material, [W/m/K] Examples -------- >>> ASHRAE_k(ID='Mineral fiber') 0.036 References ---------- .. [1] ASHRAE Handbook: Fundamentals. American Society of Heating, Refrigerating and Air-Conditioning Engineers, Incorporated, 2013. ''' values = ASHRAE[ID] if values[2]: return values[2] else: R = values[3] t = values[4]/1000. # mm to m return R_to_k(R, t)
python
def getDeltaLogLike(self, dlnl, upper=True): """Find the point at which the log-likelihood changes by a given value with respect to its value at the MLE.""" mle_val = self.mle() # A little bit of paranoia to avoid zeros if mle_val <= 0.: mle_val = self._interp.xmin if mle_val <= 0.: mle_val = self._interp.x[1] log_mle = np.log10(mle_val) lnl_max = self.fn_mle() # This ultra-safe code to find an absolute maximum # fmax = self.fn_mle() # m = (fmax-self.interp.y > 0.1+dlnl) & (self.interp.x>self._mle) # if sum(m) == 0: # xmax = self.interp.x[-1]*10 # else: # xmax = self.interp.x[m][0] # Matt has found that it is faster to use an interpolator # than an actual root-finder to find the root, # probably b/c of python overhead. # That would be something like this: # rf = lambda x: self._interp(x)+dlnl-lnl_max # return opt.brentq(rf,self._mle,self._interp.xmax, # xtol=1e-10*np.abs(self._mle)) if upper: x = np.logspace(log_mle, np.log10(self._interp.xmax), 100) retVal = np.interp(dlnl, self.interp(x) - lnl_max, x) else: x = np.linspace(self._interp.xmin, self._mle, 100) retVal = np.interp(dlnl, self.interp(x)[::-1] - lnl_max, x[::-1]) return retVal
java
@Throws(IllegalPositionIndexException.class) public static int positionIndex(final int index, final int size) { final boolean isIndexValid = (size >= 0) && (index >= 0) && (index < size); if (!isIndexValid) { throw new IllegalPositionIndexException(index, size); } return index; }
java
public HttpCopy createCopyMethod(final String sourcePath, final String destinationPath) { return new HttpCopy(repositoryURL + sourcePath, repositoryURL + destinationPath); }
python
def visit_Attribute(self, node): """ Compute typing for an attribute node. """ obj, path = attr_to_path(node) # If no type is given, use a decltype if obj.isliteral(): typename = pytype_to_ctype(obj.signature) self.result[node] = self.builder.NamedType(typename) else: self.result[node] = self.builder.DeclType('::'.join(path) + '{}')
java
@Override public int available() throws IOException { if (_readOffset < _readLength) { return _readLength - _readOffset; } StreamImpl source = _source; if (source != null) { return source.getAvailable(); } else { return -1; } }
python
def namedb_read_version(path): """ Get the db version """ con = sqlite3.connect( path, isolation_level=None, timeout=2**30 ) con.row_factory = namedb_row_factory sql = 'SELECT version FROM db_version;' args = () try: rowdata = namedb_query_execute(con, sql, args, abort=False) row = rowdata.fetchone() return row['version'] except: # no version defined return '0.0.0.0' finally: con.close()
python
def get_map(self, create_html=True): """Strike data should be a pd.DF from the WWLN data files read by read_WWLN()""" strike_data = self.df num_rows = len(self.df) if num_rows > 1000: print("Warning, you have requested lots of data be mapped." / " Limiting your request to the first 1,000 rows" / " as this is currently only a preview feature.") strike_data = self.df[0:1000] m = folium.Map(location=[0.0, 0.01], zoom_start=2) marker_cluster = folium.MarkerCluster().add_to(m) for event in strike_data.index: self.add_to_map(map_obj=m, date_time=strike_data.dt[event], cluster_obj=marker_cluster, lat=strike_data.geometry[event].y, lon=strike_data.geometry[event].x, key=event) if create_html: data_date = strike_data.dt[0].split()[0] m.save('map_{0}.html'.format(data_date)) return m
python
def _reprJSON(self): """Returns a JSON serializable represenation of a ``Ci`` class instance. Use :func:`maspy.core.Ci._fromJSON()` to generate a new ``Ci`` instance from the return value. :returns: a JSON serializable python object """ return {'__Ci__': (self.id, self.specfile, self.dataProcessingRef, self.precursor, self.product, self.params, self.attrib, self.arrayInfo ) }
python
def setup_profile(self, firebug=True, netexport=True): """ Setup the profile for firefox :param firebug: whether add firebug extension :param netexport: whether add netexport extension :return: a firefox profile object """ profile = webdriver.FirefoxProfile() profile.set_preference("app.update.enabled", False) if firebug: profile.add_extension(os.path.join(self.cur_path, 'extensions/firebug-2.0.8.xpi')) profile.set_preference("extensions.firebug.currentVersion", "2.0.8") profile.set_preference("extensions.firebug.allPagesActivation", "on") profile.set_preference("extensions.firebug.defaultPanelName", "net") profile.set_preference("extensions.firebug.net.enableSites", True) profile.set_preference("extensions.firebug.delayLoad", False) profile.set_preference("extensions.firebug.onByDefault", True) profile.set_preference("extensions.firebug.showFirstRunPage", False) profile.set_preference("extensions.firebug.net.defaultPersist", True) # persist all redirection responses if netexport: har_path = os.path.join(self.cur_path, "har") if not os.path.exists(har_path): os.mkdir(har_path) profile.add_extension(os.path.join(self.cur_path, 'extensions/netExport-0.9b7.xpi')) profile.set_preference("extensions.firebug.DBG_NETEXPORT", True) profile.set_preference("extensions.firebug.netexport.alwaysEnableAutoExport", True) profile.set_preference("extensions.firebug.netexport.defaultLogDir", har_path) profile.set_preference("extensions.firebug.netexport.includeResponseBodies", True) return profile
java
@Override @Trivial public void cancel() throws IllegalStateException, NoSuchObjectLocalException, EJBException { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "cancel: " + this); boolean removed; // Determine if the calling bean is in a state that allows timer service // method access - throws IllegalStateException if not allowed. checkTimerAccess(); try { removed = persistentTimerRuntime.remove(taskId); } catch (Throwable ex) { throw newTimerServiceException(ex); } if (!removed) { throw newNoSuchObjectLocalException(); } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "cancel: successful"); }
python
def get_segmentid_range(self, orchestrator_id): """Get segment id range from DCNM. """ url = "%s/%s" % (self._segmentid_ranges_url, orchestrator_id) res = self._send_request('GET', url, None, 'segment-id range') if res and res.status_code in self._resp_ok: return res.json()
python
def async_stats_job_data(klass, account, url, **kwargs): """ Returns the results of the specified async job IDs """ resource = urlparse(url) domain = '{0}://{1}'.format(resource.scheme, resource.netloc) response = Request(account.client, 'get', resource.path, domain=domain, raw_body=True, stream=True).perform() return response.body
python
def revoke_user_access( self, access_id ): ''' Takes an access_id, probably obtained from the get_access_list structure, and revokes that access. No return value, but may raise ValueError. ''' path = "/api/v3/publisher/user/access/revoke" data = { 'api_token': self.api_token, 'access_id': access_id, } r = requests.get( self.base_url + path, data=data ) if r.status_code != 200: raise ValueError( path + ":" + r.reason )
python
def get_gradebook_column_gradebook_session(self): """Gets the session for retrieving gradebook column to gradebook mappings. return: (osid.grading.GradebookColumnGradebookSession) - a ``GradebookColumnGradebookSession`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_gradebook_column_gradebook()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_gradebook_column_gradebook()`` is ``true``.* """ if not self.supports_gradebook_column_gradebook(): raise errors.Unimplemented() # pylint: disable=no-member return sessions.GradebookColumnGradebookSession(runtime=self._runtime)
java
@Override public void trace(String o, Throwable t) { log.trace(o, t); }
java
@Nonnull public static LBoolToByteFunction boolToByteFunctionFrom(Consumer<LBoolToByteFunctionBuilder> buildingFunction) { LBoolToByteFunctionBuilder builder = new LBoolToByteFunctionBuilder(); buildingFunction.accept(builder); return builder.build(); }
java
public static HullWhiteModel of( AbstractRandomVariableFactory randomVariableFactory, TimeDiscretization liborPeriodDiscretization, AnalyticModel analyticModel, ForwardCurve forwardRateCurve, DiscountCurve discountCurve, ShortRateVolatilityModel volatilityModel, CalibrationProduct[] calibrationProducts, Map<String, Object> properties ) throws CalculationException { HullWhiteModel model = new HullWhiteModel(randomVariableFactory, liborPeriodDiscretization, analyticModel, forwardRateCurve, discountCurve, volatilityModel, properties); // Perform calibration, if data is given if(calibrationProducts != null && calibrationProducts.length > 0) { ShortRateVolatilityModelCalibrateable volatilityModelParametric = null; try { volatilityModelParametric = (ShortRateVolatilityModelCalibrateable)volatilityModel; } catch(Exception e) { throw new ClassCastException("Calibration restricted to covariance models implementing HullWhiteModelCalibrateable."); } Map<String,Object> calibrationParameters = null; if(properties != null && properties.containsKey("calibrationParameters")) { calibrationParameters = (Map<String,Object>)properties.get("calibrationParameters"); } ShortRateVolatilityModelCalibrateable volatilityModelCalibrated = volatilityModelParametric.getCloneCalibrated(model, calibrationProducts, calibrationParameters); HullWhiteModel modelCalibrated = model.getCloneWithModifiedVolatilityModel(volatilityModelCalibrated); return modelCalibrated; } else { return model; } }
python
def send_video(self, user_id, media_id, title=None, description=None, account=None): """ 发送视频消息 详情请参考 http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html :param user_id: 用户 ID 。 就是你收到的 `Message` 的 source :param media_id: 发送的视频的媒体ID。 可以通过 :func:`upload_media` 上传。 :param title: 视频消息的标题 :param description: 视频消息的描述 :param account: 可选,客服账号 :return: 返回的 JSON 数据包 使用示例:: from wechatpy import WeChatClient client = WeChatClient('appid', 'secret') res = client.message.send_video('openid', 'media_id', 'title', 'description') """ video_data = { 'media_id': media_id, } if title: video_data['title'] = title if description: video_data['description'] = description data = { 'touser': user_id, 'msgtype': 'video', 'video': video_data } return self._send_custom_message(data, account=account)
python
def _create_signing_params(self, url, keypair_id, expire_time=None, valid_after_time=None, ip_address=None, policy_url=None, private_key_file=None, private_key_string=None): """ Creates the required URL parameters for a signed URL. """ params = {} # Check if we can use a canned policy if expire_time and not valid_after_time and not ip_address and not policy_url: # we manually construct this policy string to ensure formatting # matches signature policy = self._canned_policy(url, expire_time) params["Expires"] = str(expire_time) else: # If no policy_url is specified, default to the full url. if policy_url is None: policy_url = url # Can't use canned policy policy = self._custom_policy(policy_url, expires=expire_time, valid_after=valid_after_time, ip_address=ip_address) encoded_policy = self._url_base64_encode(policy) params["Policy"] = encoded_policy #sign the policy signature = self._sign_string(policy, private_key_file, private_key_string) #now base64 encode the signature (URL safe as well) encoded_signature = self._url_base64_encode(signature) params["Signature"] = encoded_signature params["Key-Pair-Id"] = keypair_id return params
python
def recruit_participants(self, n=1): """Recruit n participants.""" auto_recruit = os.environ['auto_recruit'] == 'true' if auto_recruit: print "Starting Wallace's recruit_participants." hit_id = str( Participant.query. with_entities(Participant.hitid).first().hitid) print "hit_id is {}.".format(hit_id) is_sandbox = self.config.getboolean( 'Shell Parameters', 'launch_in_sandbox_mode') if is_sandbox: host = 'mechanicalturk.sandbox.amazonaws.com' else: host = 'mechanicalturk.amazonaws.com' mturkparams = dict( aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key, host=host) self.mtc = MTurkConnection(**mturkparams) self.mtc.extend_hit( hit_id, assignments_increment=int(n or 0)) expiration_increment = self.config.get('HIT Configuration', 'duration') self.mtc.extend_hit( hit_id, expiration_increment=int( float(expiration_increment or 0) * 3600)) else: print(">>>> auto_recruit set to {}: recruitment suppressed" .format(auto_recruit))
python
def user_assigned_policies(user): """Return sequence of policies assigned to a user (or the anonymous user is ``user`` is ``None``). (Also installed as ``assigned_policies`` method on ``User`` model. """ key = user_cache_key(user) cached = cache.get(key) if cached is not None: return cached if user is None: pset = PermissionSet.objects.filter(anonymous_user=True).first() else: pset = user.permissionset.first() if pset is None: return [] res = [] skip_role_policies = False skip_role = None skip_role_variables = None for pi in pset.policyinstance_set.select_related('policy', 'role'): if skip_role_policies: if pi.role == skip_role and pi.variables == skip_role_variables: continue else: skip_role_policies = False if pi.role: res.append(pi.role) skip_role = pi.role skip_role_variables = pi.variables skip_role_policies = True else: if pi.variables != '{}': res.append((pi.policy, json.loads(pi.variables))) else: res.append(pi.policy) cache.set(key, res) return res
python
def accumulate_from_superclasses(cls, propname): ''' Traverse the class hierarchy and accumulate the special sets of names ``MetaHasProps`` stores on classes: Args: name (str) : name of the special attribute to collect. Typically meaningful values are: ``__container_props__``, ``__properties__``, ``__properties_with_refs__`` ''' cachename = "__cached_all" + propname # we MUST use cls.__dict__ NOT hasattr(). hasattr() would also look at base # classes, and the cache must be separate for each class if cachename not in cls.__dict__: s = set() for c in inspect.getmro(cls): if issubclass(c, HasProps) and hasattr(c, propname): base = getattr(c, propname) s.update(base) setattr(cls, cachename, s) return cls.__dict__[cachename]
python
def resolvables_from_iterable(iterable, builder, interpreter=None): """Given an iterable of resolvable-like objects, return list of Resolvable objects. :param iterable: An iterable of :class:`Resolvable`, :class:`Requirement`, :class:`Package`, or `str` to map into an iterable of :class:`Resolvable` objects. :returns: A list of :class:`Resolvable` objects. """ def translate(obj): if isinstance(obj, Resolvable): return obj elif isinstance(obj, Requirement): return ResolvableRequirement(obj, builder.build(obj.key)) elif isinstance(obj, Package): return ResolvablePackage(obj, builder.build(obj.name)) elif isinstance(obj, compatibility_string): return Resolvable.get(obj, builder, interpreter=interpreter) else: raise ValueError('Do not know how to resolve %s' % type(obj)) return list(map(translate, iterable))
python
def command_umount(self, system_id, *system_ids): """Unmounts the specified sftp system. Usage: sftpman umount {id}.. """ system_ids = (system_id,) + system_ids has_failed = False for system_id in system_ids: try: system = SystemModel.create_by_id(system_id, self.environment) controller = SystemControllerModel(system, self.environment) controller.unmount() except SftpConfigException as e: sys.stderr.write('Cannot unmount %s: %s\n\n' % (system_id, str(e))) has_failed = True if has_failed: sys.exit(1)
java
public Observable<ServiceResponse<VirtualMachineExtensionsListResultInner>> getExtensionsWithServiceResponseAsync(String resourceGroupName, String vmName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (vmName == null) { throw new IllegalArgumentException("Parameter vmName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } final String expand = null; return service.getExtensions(resourceGroupName, vmName, this.client.subscriptionId(), expand, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<VirtualMachineExtensionsListResultInner>>>() { @Override public Observable<ServiceResponse<VirtualMachineExtensionsListResultInner>> call(Response<ResponseBody> response) { try { ServiceResponse<VirtualMachineExtensionsListResultInner> clientResponse = getExtensionsDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); }
java
public static Query createQueryForNodesWithFieldEqualTo(String constraintValue, String fieldName, Function<String, String> caseOperation) { return FieldComparison.EQ.createQueryForNodesWithField(constraintValue, fieldName, caseOperation); }
java
public double add(int index, double delta) { magnitude -= vector[index] * vector[index]; vector[index] += delta; magnitude += vector[index] * vector[index]; return vector[index]; }