language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def rate_limited(max_per_hour: int, *args: Any) -> Callable[..., Any]: """Decorator to limit function to N calls/hour.""" min_interval = 3600.0 / float(max_per_hour) def _decorate(func: Callable[..., Any]) -> Callable[..., Any]: things = [func.__name__] things.extend(args) key = "".join(things) LOG.debug(f"Rate limiter called for '{key}'.") if key not in LAST_CALLED: LOG.debug(f"Initializing entry for '{key}'.") LAST_CALLED[key] = 0.0 def _rate_limited_function(*args: Any, **kargs: Any) -> Any: last_called = LAST_CALLED[key] now = time.time() elapsed = now - last_called remaining = min_interval - elapsed LOG.debug(f"Rate limiter last called for '{key}' at {last_called}.") LOG.debug(f"Remaining cooldown time for '{key}' is {remaining}.") if remaining > 0 and last_called > 0.0: LOG.info(f"Self-enforced rate limit hit, sleeping {remaining} seconds.") for i in tui.progress.bar(range(math.ceil(remaining))): time.sleep(1) LAST_CALLED[key] = time.time() ret = func(*args, **kargs) LOG.debug(f"Updating rate limiter last called for '{key}' to {now}.") return ret return _rate_limited_function return _decorate
python
def create_session(self, username, prekeybundle, autotrust=False): """ :param username: :type username: str :param prekeybundle: :type prekeybundle: PreKeyBundle :return: :rtype: """ logger.debug("create_session(username=%s, prekeybunder=[omitted], autotrust=%s)" % (username, autotrust)) session_builder = SessionBuilder(self._store, self._store, self._store, self._store, username, 1) try: session_builder.processPreKeyBundle(prekeybundle) except UntrustedIdentityException as ex: if autotrust: self.trust_identity(ex.getName(), ex.getIdentityKey()) else: raise exceptions.UntrustedIdentityException(ex.getName(), ex.getIdentityKey())
java
public void clientConnected(RespokeClient client) { if (null != pushToken) { registerPushServices(); } if (!factoryStaticInitialized) { // Perform a one-time WebRTC global initialization PeerConnectionFactory.initializeAndroidGlobals(context, true, true, true, VideoRendererGui.getEGLContext()); factoryStaticInitialized = true; } }
java
private DiscountCurve createDiscountCurve(String discountCurveName) { DiscountCurve discountCurve = model.getDiscountCurve(discountCurveName); if(discountCurve == null) { discountCurve = DiscountCurveInterpolation.createDiscountCurveFromDiscountFactors(discountCurveName, new double[] { 0.0 }, new double[] { 1.0 }); model = model.addCurves(discountCurve); } return discountCurve; }
java
public String getStringAttribute(String name, String defaultValue) { return getValue(stringAttributes, name, defaultValue); }
python
def vb_list_machines(**kwargs): ''' Which machines does the hypervisor have @param kwargs: Passed to vb_xpcom_to_attribute_dict to filter the attributes @type kwargs: dict @return: Untreated dicts of the machines known to the hypervisor @rtype: [{}] ''' manager = vb_get_manager() machines = manager.getArray(vb_get_box(), 'machines') return [ vb_xpcom_to_attribute_dict(machine, 'IMachine', **kwargs) for machine in machines ]
python
def from_config(cls, cp, model, nprocesses=1, use_mpi=False): """ Loads the sampler from the given config file. For generating the temperature ladder to be used by emcee_pt, either the number of temperatures (provided by the option 'ntemps'), or the path to a file storing inverse temperature values (provided under a subsection inverse-temperatures-file) can be loaded from the config file. If the latter, the file should be of hdf format, having an attribute named 'betas' storing the list of inverse temperature values to be provided to emcee_pt. If the former, emcee_pt will construct the ladder with "ntemps" geometrically spaced temperatures. """ section = "sampler" # check name assert cp.get(section, "name") == cls.name, ( "name in section [sampler] must match mine") # get the number of walkers to use nwalkers = int(cp.get(section, "nwalkers")) if cp.has_option(section, "ntemps") and \ cp.has_option(section, "inverse-temperatures-file"): raise ValueError("Must specify either ntemps or " "inverse-temperatures-file, not both.") if cp.has_option(section, "inverse-temperatures-file"): # get the path of the file containing inverse temperatures values. inverse_temperatures_file = cp.get(section, "inverse-temperatures-file") with h5py.File(inverse_temperatures_file, "r") as fp: try: betas = numpy.array(fp.attrs['betas']) ntemps = betas.shape[0] except KeyError: raise AttributeError("No attribute called betas") else: # get the number of temperatures betas = None ntemps = int(cp.get(section, "ntemps")) # get the checkpoint interval, if it's specified checkpoint_interval = cls.checkpoint_from_config(cp, section) checkpoint_signal = cls.ckpt_signal_from_config(cp, section) # get the loglikelihood function logl = get_optional_arg_from_config(cp, section, 'logl-function') obj = cls(model, ntemps, nwalkers, betas=betas, checkpoint_interval=checkpoint_interval, checkpoint_signal=checkpoint_signal, loglikelihood_function=logl, nprocesses=nprocesses, use_mpi=use_mpi) # set target obj.set_target_from_config(cp, section) # add burn-in if it's specified obj.set_burn_in_from_config(cp) # set prethin options obj.set_thin_interval_from_config(cp, section) return obj
python
def read_namespaced_pod(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_pod # noqa: E501 read the specified Pod # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_pod(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Pod (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1Pod If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_pod_with_http_info(name, namespace, **kwargs) # noqa: E501 else: (data) = self.read_namespaced_pod_with_http_info(name, namespace, **kwargs) # noqa: E501 return data
java
@NonNull @CheckResult @Override public Flowable<DeleteResults<T>> asRxFlowable(@NonNull BackpressureStrategy backpressureStrategy) { return RxJavaUtils.createFlowable(storIOSQLite, this, backpressureStrategy); }
python
def _decoherence_noise_model(gates, T1=30e-6, T2=30e-6, gate_time_1q=50e-9, gate_time_2q=150e-09, ro_fidelity=0.95): """ The default noise parameters - T1 = 30 us - T2 = 30 us - 1q gate time = 50 ns - 2q gate time = 150 ns are currently typical for near-term devices. This function will define new gates and add Kraus noise to these gates. It will translate the input program to use the noisy version of the gates. :param Sequence[Gate] gates: The gates to provide the noise model for. :param Union[Dict[int,float],float] T1: The T1 amplitude damping time either globally or in a dictionary indexed by qubit id. By default, this is 30 us. :param Union[Dict[int,float],float] T2: The T2 dephasing time either globally or in a dictionary indexed by qubit id. By default, this is also 30 us. :param float gate_time_1q: The duration of the one-qubit gates, namely RX(+pi/2) and RX(-pi/2). By default, this is 50 ns. :param float gate_time_2q: The duration of the two-qubit gates, namely CZ. By default, this is 150 ns. :param Union[Dict[int,float],float] ro_fidelity: The readout assignment fidelity :math:`F = (p(0|0) + p(1|1))/2` either globally or in a dictionary indexed by qubit id. :return: A NoiseModel with the appropriate Kraus operators defined. """ all_qubits = set(sum(([t.index for t in g.qubits] for g in gates), [])) if isinstance(T1, dict): all_qubits.update(T1.keys()) if isinstance(T2, dict): all_qubits.update(T2.keys()) if isinstance(ro_fidelity, dict): all_qubits.update(ro_fidelity.keys()) if not isinstance(T1, dict): T1 = {q: T1 for q in all_qubits} if not isinstance(T2, dict): T2 = {q: T2 for q in all_qubits} if not isinstance(ro_fidelity, dict): ro_fidelity = {q: ro_fidelity for q in all_qubits} noisy_identities_1q = { q: damping_after_dephasing(T1.get(q, INFINITY), T2.get(q, INFINITY), gate_time_1q) for q in all_qubits } noisy_identities_2q = { q: damping_after_dephasing(T1.get(q, INFINITY), T2.get(q, INFINITY), gate_time_2q) for q in all_qubits } kraus_maps = [] for g in gates: targets = tuple(t.index for t in g.qubits) key = (g.name, tuple(g.params)) if g.name in NO_NOISE: continue matrix, _ = get_noisy_gate(g.name, g.params) if len(targets) == 1: noisy_I = noisy_identities_1q[targets[0]] else: if len(targets) != 2: raise ValueError("Noisy gates on more than 2Q not currently supported") # note this ordering of the tensor factors is necessary due to how the QVM orders # the wavefunction basis noisy_I = tensor_kraus_maps(noisy_identities_2q[targets[1]], noisy_identities_2q[targets[0]]) kraus_maps.append(KrausModel(g.name, tuple(g.params), targets, combine_kraus_maps(noisy_I, [matrix]), # FIXME (Nik): compute actual avg gate fidelity for this simple # noise model 1.0)) aprobs = {} for q, f_ro in ro_fidelity.items(): aprobs[q] = np.array([[f_ro, 1. - f_ro], [1. - f_ro, f_ro]]) return NoiseModel(kraus_maps, aprobs)
python
def select_graphic_rendition(self, *attrs): """Set display attributes. :param list attrs: a list of display attributes to set. """ replace = {} # Fast path for resetting all attributes. if not attrs or attrs == (0, ): self.cursor.attrs = self.default_char return else: attrs = list(reversed(attrs)) while attrs: attr = attrs.pop() if attr == 0: # Reset all attributes. replace.update(self.default_char._asdict()) elif attr in g.FG_ANSI: replace["fg"] = g.FG_ANSI[attr] elif attr in g.BG: replace["bg"] = g.BG_ANSI[attr] elif attr in g.TEXT: attr = g.TEXT[attr] replace[attr[1:]] = attr.startswith("+") elif attr in g.FG_AIXTERM: replace.update(fg=g.FG_AIXTERM[attr], bold=True) elif attr in g.BG_AIXTERM: replace.update(bg=g.BG_AIXTERM[attr], bold=True) elif attr in (g.FG_256, g.BG_256): key = "fg" if attr == g.FG_256 else "bg" try: n = attrs.pop() if n == 5: # 256. m = attrs.pop() replace[key] = g.FG_BG_256[m] elif n == 2: # 24bit. # This is somewhat non-standard but is nonetheless # supported in quite a few terminals. See discussion # here https://gist.github.com/XVilka/8346728. replace[key] = "{0:02x}{1:02x}{2:02x}".format( attrs.pop(), attrs.pop(), attrs.pop()) except IndexError: pass self.cursor.attrs = self.cursor.attrs._replace(**replace)
python
def file_fingerprint(fullpath): """ Get a metadata fingerprint for a file """ stat = os.stat(fullpath) return ','.join([str(value) for value in [stat.st_ino, stat.st_mtime, stat.st_size] if value])
java
public Collection storeObjects(Collection objects) { try { /* One possibility of storing objects is to use the current transaction associated with the container */ Transaction tx = odmg.currentTransaction(); for (Iterator iterator = objects.iterator(); iterator.hasNext();) { tx.lock(iterator.next(), Transaction.WRITE); } } catch (LockNotGrantedException e) { log.error("Failure while storing objects " + objects, e); throw new EJBException("Failure while storing objects", e); } return objects; }
python
def do_mag_map(date, lon_0=0, alt=0, file="", mod="cals10k",resolution='low'): """ returns lists of declination, inclination and intensities for lat/lon grid for desired model and date. Parameters: _________________ date = Required date in decimal years (Common Era, negative for Before Common Era) Optional Parameters: ______________ mod = model to use ('arch3k','cals3k','pfm9k','hfm10k','cals10k.2','shadif14k','cals10k.1b','custom') file = l m g h formatted filefor custom model lon_0 : central longitude for Hammer projection alt = altitude resolution = ['low','high'] default is low Returns: ______________ Bdec=list of declinations Binc=list of inclinations B = list of total field intensities in nT Br = list of radial field intensities lons = list of longitudes evaluated lats = list of latitudes evaluated """ if resolution=='low': incr = 10 # we can vary to the resolution of the model elif resolution=='high': incr = 2 # we can vary to the resolution of the model if lon_0 == 180: lon_0 = 179.99 if lon_0 > 180: lon_0 = lon_0-360. # get some parameters for our arrays of lat/lon lonmax = (lon_0 + 180.) % 360 + incr lonmin = (lon_0 - 180.) latmax = 90 + incr # make a 1D array of longitudes (like elons) lons = np.arange(lonmin, lonmax, incr) # make a 1D array of longitudes (like elats) lats = np.arange(-90, latmax, incr) # set up some containers for the field elements B = np.zeros((len(lats), len(lons))) Binc = np.zeros((len(lats), len(lons))) Bdec = np.zeros((len(lats), len(lons))) Brad = np.zeros((len(lats), len(lons))) if mod == 'custom' and file != '': gh = [] lmgh = np.loadtxt(file).transpose() gh.append(lmgh[2][0]) for i in range(1, lmgh.shape[1]): gh.append(lmgh[2][i]) if lmgh[1][i] != 0: gh.append(lmgh[3][i]) for j in range(len(lats)): # step through the latitudes for i in range(len(lons)): # and the longitudes # get the field elements if mod == 'custom': x, y, z, f = docustom(lons[i], lats[j], alt, gh) else: x, y, z, f = doigrf( lons[i], lats[j], alt, date, mod=mod, file=file) # turn them into polar coordinates Dec, Inc, Int = cart2dir([x, y, z]) if mod != 'custom': # convert the string to microtesla (from nT) B[j][i] = Int * 1e-3 else: B[j][i] = Int # convert the string to microtesla (from nT) Binc[j][i] = Inc # store the inclination value if Dec > 180: Dec = Dec-360. Bdec[j][i] = Dec # store the declination value if mod != 'custom': Brad[j][i] = z*1e-3 else: Brad[j][i] = z return Bdec, Binc, B, Brad, lons, lats
java
public boolean getIsXml() { if (m_isXml == null) { m_isXml = Boolean.valueOf( CmsResourceTypeXmlPage.isXmlPage(this) || CmsResourceTypeXmlContent.isXmlContent(this)); } return m_isXml.booleanValue(); }
java
public static boolean isRegistered(@Nonnull Item item) { ItemDeletion instance = instance(); if (instance == null) { return false; } instance.lock.readLock().lock(); try { return instance.registrations.contains(item); } finally { instance.lock.readLock().unlock(); } }
java
public static void renameSubreportsInMaster(List<JcrFile> jasperFiles, String id) { if (jasperFiles.size() > 1) { JcrFile masterFile = jasperFiles.get(0); try { String masterContent = new String(masterFile.getDataProvider().getBytes(), "UTF-8"); List<String> subreportsContent = new ArrayList<String>(); for (int i = 1, size = jasperFiles.size(); i < size; i++) { subreportsContent.add(new String(jasperFiles.get(i).getDataProvider().getBytes(), "UTF-8")); } for (int i = 1, size = jasperFiles.size(); i < size; i++) { String name = jasperFiles.get(i).getName(); String oldName = FilenameUtils.getBaseName(name) + "." + JASPER_COMPILED_EXT; String newName = getUnique(name, id) + "." + JASPER_COMPILED_EXT; masterContent = masterContent.replaceAll(oldName, newName); for (int j = 1; j < size; j++) { if (j != i) { subreportsContent.set(j - 1, subreportsContent.get(j - 1).replaceAll(oldName, newName)); } } if (LOG.isDebugEnabled()) { LOG.debug("Subreport " + name + ": " + oldName + " > " + newName); // LOG.debug("master = " + master); } } masterFile.setDataProvider(new JcrDataProviderImpl(masterContent.getBytes("UTF-8"))); for (int i = 1, size = jasperFiles.size(); i < size; i++) { jasperFiles.get(i).setDataProvider(new JcrDataProviderImpl(subreportsContent.get(i - 1).getBytes("UTF-8"))); } } catch (UnsupportedEncodingException e) { LOG.error("Error inside JasperUtil.renameSubreportsInMaster: " + e.getMessage(), e); e.printStackTrace(); } } }
python
def _to_dict(self): """Return a json dictionary representing this model.""" _dict = {} if hasattr(self, 'collection_id') and self.collection_id is not None: _dict['collection_id'] = self.collection_id if hasattr(self, 'status') and self.status is not None: _dict['status'] = self.status return _dict
python
def adjust_text(x, y, texts, ax=None, expand_text=(1.2, 1.2), expand_points=(1.2, 1.2), autoalign=True, va='center', ha='center', force_text=1., force_points=1., lim=100, precision=0, only_move={}, text_from_text=True, text_from_points=True, save_steps=False, save_prefix='', save_format='png', add_step_numbers=True, draggable=True, repel_from_axes=False, min_arrow_sep=0.0, *args, **kwargs): """ Iteratively adjusts the locations of texts. First moves all texts that are outside the axes limits inside. Then in each iteration moves all texts away from each other and from points. In the end hides texts and substitutes them with annotations to link them to the rescpective points. Args: x (seq): x-coordinates of labelled points y (seq): y-coordinates of labelled points texts (list): a list of text.Text objects to adjust ax (obj): axes object with the plot; if not provided is determined by plt.gca() expand_text (seq): a tuple/list/... with 2 numbers (x, y) to expand texts when repelling them from each other; default (1.2, 1.2) expand_points (seq): a tuple/list/... with 2 numbers (x, y) to expand texts when repelling them from points; default (1.2, 1.2) autoalign (bool): If True, the best alignment of all texts will be determined automatically before running the iterative adjustment; if 'x' will only align horizontally, if 'y' - vertically; overrides va and ha va (str): vertical alignment of texts ha (str): horizontal alignment of texts force_text (float): the repel force from texts is multiplied by this value; default 0.5 force_points (float): the repel force from points is multiplied by this value; default 0.5 lim (int): limit of number of iterations precision (float): up to which sum of all overlaps along both x and y to iterate; may need to increase for complicated situations; default 0, so no overlaps with anything. only_move (dict): a dict to restrict movement of texts to only certain axis. Valid keys are 'points' and 'text', for each of them valid values are 'x', 'y' and 'xy'. This way you can forbid moving texts along either of the axes due to overlaps with points, but let it happen if there is an overlap with texts: only_move={'points':'y', 'text':'xy'}. Default: None, so everything is allowed. text_from_text (bool): whether to repel texts from each other; default True text_from_points (bool): whether to repel texts from points; default True; can helpful to switch of in extremely crouded plots save_steps (bool): whether to save intermediate steps as images; default False save_prefix (str): a path and/or prefix to the saved steps; default '' save_format (str): a format to save the steps into; default 'png *args and **kwargs: any arguments will be fed into plt.annotate after all the optimization is done just for plotting add_step_numbers (bool): whether to add step numbers as titles to the images of saving steps draggable (bool): whether to make the annotations draggable; default True """ if ax is None: ax = plt.gca() r = ax.get_figure().canvas.get_renderer() orig_xy = [text.get_position() for text in texts] orig_x = [xy[0] for xy in orig_xy] orig_y = [xy[1] for xy in orig_xy] for text in texts: text.set_va(va) text.set_ha(ha) if save_steps: if add_step_numbers: plt.title('0a') plt.savefig(save_prefix + '0a.' + save_format, format=save_format) if autoalign: if autoalign is not True: texts = optimally_align_text(x, y, texts, direction=autoalign, expand=expand_points, renderer=r, ax=ax) else: texts = optimally_align_text(orig_x, orig_y, texts, expand=expand_points, renderer=r, ax=ax) if save_steps: if add_step_numbers: plt.title('0b') plt.savefig(save_prefix + '0b.' + save_format, format=save_format) if repel_from_axes is True: texts = repel_text_from_axes( texts, ax, renderer=r, expand=expand_points) history = [np.inf] * 5 for i in xrange(lim): q1, q2 = np.inf, np.inf if text_from_text: d_x_text, d_y_text, q1 = repel_text(texts, renderer=r, ax=ax, expand=expand_text) else: d_x_text, d_y_text, q1 = [0] * len(texts), [0] * len(texts), 0 if text_from_points: d_x_points, d_y_points, q2 = repel_text_from_points(x, y, texts, ax=ax, renderer=r, expand=expand_points) else: d_x_points, d_y_points, q1 = [0] * len(texts), [0] * len(texts), 0 if only_move: if 'text' in only_move: if 'x' not in only_move['text']: d_x_text = np.zeros_like(d_x_text) if 'y' not in only_move['text']: d_y_text = np.zeros_like(d_y_text) if 'points' in only_move: if 'x' not in only_move['points']: d_x_points = np.zeros_like(d_x_points) if 'y' not in only_move['points']: d_y_points = np.zeros_like(d_y_points) dx = np.array(d_x_text) + np.array(d_x_points) dy = np.array(d_y_text) + np.array(d_y_points) q = round(np.sum(np.array([q1, q2])[np.array([q1, q2]) < np.inf]), 5) if q > precision and q < np.max(history): history.pop(0) history.append(q) move_texts(texts, dx * force_text, dy * force_points, bboxes=get_bboxes(texts, r, (1, 1)), ax=ax) if save_steps: if add_step_numbers: plt.title(i + 1) plt.savefig(save_prefix + str(i + 1) + '.' + save_format, format=save_format) else: break bboxes = get_bboxes(texts, r, (1, 1)) originLW = kwargs["arrowprops"]["lw"] for j, text in enumerate(texts): cx, cy = get_midpoint(bboxes[j]) one = (orig_xy[j][0] - cx)**2 two = (orig_xy[j][1] - cy)**2 sep = (one + two)**0.5 print text.get_text(), sep try: if sep < min_arrow_sep: kwargs["arrowprops"]["lw"] = 0. else: kwargs["arrowprops"]["lw"] = originLW except Exception, e: print e a = ax.annotate(text.get_text(), xy=(orig_xy[j]), xytext=text.get_position(), *args, **kwargs) a.__dict__.update(text.__dict__) if draggable: a.draggable() texts[j].remove() if save_steps: if add_step_numbers: plt.title(i + 1) plt.savefig(save_prefix + str(i + 1) + '.' + save_format, format=save_format)
java
private Integer getIntegerTimeInMinutes(Date date) { Integer result = null; if (date != null) { Calendar cal = DateHelper.popCalendar(date); int time = cal.get(Calendar.HOUR_OF_DAY) * 60; time += cal.get(Calendar.MINUTE); DateHelper.pushCalendar(cal); result = Integer.valueOf(time); } return (result); }
python
def auto2unicode(text): """ This function tries to identify encode in available encodings. If it finds, then it will convert text into unicode string. Author : Arulalan.T 04.08.2014 """ _all_unique_encodes_, _all_common_encodes_ = _get_unique_common_encodes() # get unique word which falls under any one of available encodes from # user passed text lines unique_chars = _get_unique_ch(text, _all_common_encodes_) # count common encode chars clen = len(_all_common_encodes_) msg = "Sorry, couldn't find encode :-(\n" msg += 'Need more words to find unique encode out side of %d ' % clen msg += 'common compound characters' if not unique_chars: print(msg) return '' # end of if not unique_chars: for encode_name, encode_keys in _all_unique_encodes_: if not len(encode_keys): continue for ch in encode_keys: # check either encode char is presnent in word if ch in unique_chars: # found encode print(("Found encode : ", encode_name)) encode = _all_encodes_[encode_name] return encode2unicode(text, encode) # end of if ch in unique_chars: # end of ifor ch in encode_keys: else: print(msg) return ''
python
def loadValues(self, values): """ Loads the values from the inputed dictionary to the widget. :param values | <dict> """ table = self.tableType() if table: schema = table.schema() else: schema = None process = [] for widget in self.findChildren(QWidget): prop = widget.property('columnName') if not prop: continue order = widget.property('columnOrder') if order: order = unwrapVariant(order) else: order = 10000000 process.append((order, widget, prop)) process.sort() for order, widget, prop in process: columnName = nativestring(unwrapVariant(prop, '')) if not columnName: continue if isinstance(widget, XEnumBox) and schema: column = schema.column(columnName) if column.enum() is not None: widget.setEnum(column.enum()) if columnName in values: projexui.setWidgetValue(widget, values.get(columnName))
java
protected void recoveryCompleted(Transaction transaction) throws ObjectManagerException { throw new InvalidStateException(this, InternalTransaction.stateTerminated, InternalTransaction.stateNames[InternalTransaction.stateTerminated]); }
java
@Override public void visitCode() { if (injectedTraceAnnotationVisitor == null && classAdapter.isInjectedTraceAnnotationRequired()) { AnnotationVisitor av = visitAnnotation(INJECTED_TRACE_TYPE.getDescriptor(), true); av.visitEnd(); } super.visitCode(); // Label the entry to the method so we can update the local var // debug data and indicate that we're referencing them during // method entry methodEntryLabel = new Label(); visitLabel(methodEntryLabel); if (!waitingForSuper) { // This must be done before all instruction callbacks, but it must // also be done before the visitLabel() callback, which might have // an associated UNINITIALIZED entry in the stack map frame that is // supposed to be pointing at a NEW opcode. processMethodEntry(); } }
java
public static CommerceWarehouseItem fetchByCommerceWarehouseId_First( long commerceWarehouseId, OrderByComparator<CommerceWarehouseItem> orderByComparator) { return getPersistence() .fetchByCommerceWarehouseId_First(commerceWarehouseId, orderByComparator); }
java
private boolean hoistIfExtern(CompilerInput input) { if (input.getHasExternsAnnotation()) { // If the input file is explicitly marked as an externs file, then move it out of the main // JS root and put it with the other externs. externsRoot.addChildToBack(input.getAstRoot(this)); JSModule module = input.getModule(); if (module != null) { module.remove(input); } externs.add(input); return true; } return false; }
python
def findattr(self, name, resolved=True): """ Find an attribute type definition. @param name: An attribute name. @type name: basestring @param resolved: A flag indicating that the fully resolved type should be returned. @type resolved: boolean @return: The found schema I{type} @rtype: L{xsd.sxbase.SchemaObject} """ name = '@%s' % name parent = self.top().resolved if parent is None: result, ancestry = self.query(name, node) else: result, ancestry = self.getchild(name, parent) if result is None: return result if resolved: result = result.resolve() return result
python
def masking_noise(data, sess, v): """Apply masking noise to data in X. In other words a fraction v of elements of X (chosen at random) is forced to zero. :param data: array_like, Input data :param sess: TensorFlow session :param v: fraction of elements to distort, float :return: transformed data """ data_noise = data.copy() rand = tf.random_uniform(data.shape) data_noise[sess.run(tf.nn.relu(tf.sign(v - rand))).astype(np.bool)] = 0 return data_noise
python
def parse_lines(stream, separator=None): """ Takes each line of a stream, creating a generator that yields tuples of line, row - where row is the line split by separator (or by whitespace if separator is None. :param stream: :param separator: (optional) :return: generator """ separator = None if separator is None else unicode(separator) for line in stream: line = line.rstrip(u'\r\n') row = [interpret_segment(i) for i in line.split(separator)] yield line, row
java
public void declareInternalData(int maxRows, int maxCols) { this.maxRows = maxRows; this.maxCols = maxCols; U_tran = new DMatrixRMaj(maxRows,maxRows); Qm = new DMatrixRMaj(maxRows,maxRows); r_row = new double[ maxCols ]; }
python
def ensure_coordinator_ready(self): """Block until the coordinator for this group is known (and we have an active connection -- java client uses unsent queue). """ with self._client._lock, self._lock: while self.coordinator_unknown(): # Prior to 0.8.2 there was no group coordinator # so we will just pick a node at random and treat # it as the "coordinator" if self.config['api_version'] < (0, 8, 2): self.coordinator_id = self._client.least_loaded_node() if self.coordinator_id is not None: self._client.maybe_connect(self.coordinator_id) continue future = self.lookup_coordinator() self._client.poll(future=future) if future.failed(): if future.retriable(): if getattr(future.exception, 'invalid_metadata', False): log.debug('Requesting metadata for group coordinator request: %s', future.exception) metadata_update = self._client.cluster.request_update() self._client.poll(future=metadata_update) else: time.sleep(self.config['retry_backoff_ms'] / 1000) else: raise future.exception
java
public String getSelectionInfo(){ StringBuilder result = new StringBuilder(); result.append( "SelectionInfo: "+this.getClass().toString() +"\n" ); result.append( "Page:"+ CITInfo( pageHandling )+"\n" ); result.append( "FirstParagraph:" +CITInfo( firstParagraphHandling )+"\n"); for( String key: sectionHandling.keySet() ){ final String uss = SectionType.USER_SECTION.toString(); if( key.startsWith( uss ) ) result.append(uss+"["+key.substring( uss.length() )+"]:\n"); else result.append(key+":\n"); result.append( SITInfo( sectionHandling.get(key))+"\n" ); } return result.toString(); }
java
public int getStatementCount() { int ret = 0; for (final StatementGroup sg : statementGroups) { ret += sg.getAllStatements().size(); } return ret; }
python
def columnise(rows, padding=2): """Print rows of entries in aligned columns.""" strs = [] maxwidths = {} for row in rows: for i, e in enumerate(row): se = str(e) nse = len(se) w = maxwidths.get(i, -1) if nse > w: maxwidths[i] = nse for row in rows: s = '' for i, e in enumerate(row): se = str(e) if i < len(row) - 1: n = maxwidths[i] + padding - len(se) se += ' ' * n s += se strs.append(s) return strs
java
@Override public Response attachStatement(String CorpNum, int ItemCode, String MgtKey, int SubItemCode, String SubMgtKey) throws PopbillException{ DocRequest request = new DocRequest(); request.ItemCode = SubItemCode; request.MgtKey = SubMgtKey; String PostData = toJsonString(request); return httppost("/Statement/" + ItemCode + "/" + MgtKey + "/AttachStmt/", CorpNum, PostData, null, Response.class); }
java
@Override public IEntityGroup newInstance(Class entityType) throws GroupsException { log.warn("Unsupported method accessed: SmartLdapGroupStore.newInstance"); throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); }
python
def get_status(api_subject): ''' Gets a string representing the status of the Api subject (role or service) based on the health summary and health checks. ''' summary = api_subject.healthSummary if summary is None: return None # status string always starts with "<nagios code>: <summary>" status = "%s: %s" % (NAGIOS_CODE_MESSAGES[CM_STATE_CODES[summary]], summary) if summary != "GOOD" and summary != "DISABLED": # if the summary is CONCERNING or BAD, then append the health checks for health_check in api_subject.healthChecks: if health_check['summary'] != "GOOD" and health_check['summary'] != "DISABLED": status = ("%s, %s=%s" % (status, health_check['name'], health_check['summary'])) return status
java
public static void applyRegisteredControls(final Request request, final boolean useRequestValues) { Set<String> controls = getRegisteredSubordinateControls(); if (controls == null) { return; } // Process Controls for (String controlId : controls) { // Find the Component for this ID ComponentWithContext controlWithContext = WebUtilities.getComponentById(controlId, true); if (controlWithContext == null) { LOG.warn( "Subordinate control for id " + controlId + " is no longer in the tree."); continue; } if (!(controlWithContext.getComponent() instanceof WSubordinateControl)) { LOG.warn("Component for id " + controlId + " is not a subordinate control."); continue; } WSubordinateControl control = (WSubordinateControl) controlWithContext.getComponent(); UIContext uic = controlWithContext.getContext(); UIContextHolder.pushContext(uic); try { if (useRequestValues) { control.applyTheControls(request); } else { control.applyTheControls(); } } finally { UIContextHolder.popContext(); } } }
java
public ServiceFuture<PublicIPAddressInner> beginUpdateTagsAsync(String resourceGroupName, String publicIpAddressName, Map<String, String> tags, final ServiceCallback<PublicIPAddressInner> serviceCallback) { return ServiceFuture.fromResponse(beginUpdateTagsWithServiceResponseAsync(resourceGroupName, publicIpAddressName, tags), serviceCallback); }
python
def flux_components_bottom(self): '''Compute the contributions to the downwelling flux to surface due to emissions from each level.''' N = self.lev.size atmComponents = np.zeros_like(self.Tatm) flux_down_top = np.zeros_like(self.Ts) # same comment as above... would be nice to vectorize for n in range(N): emission = np.zeros_like(self.emission) emission[..., n] = self.emission[..., n] this_flux_down = self.trans.flux_down(flux_down_top, emission) atmComponents[..., n] = this_flux_down[..., 0] return atmComponents
python
def check_levels(imls, imt, min_iml=1E-10): """ Raise a ValueError if the given levels are invalid. :param imls: a list of intensity measure and levels :param imt: the intensity measure type :param min_iml: minimum intensity measure level (default 1E-10) >>> check_levels([0.1, 0.2], 'PGA') # ok >>> check_levels([], 'PGA') Traceback (most recent call last): ... ValueError: No imls for PGA: [] >>> check_levels([0.2, 0.1], 'PGA') Traceback (most recent call last): ... ValueError: The imls for PGA are not sorted: [0.2, 0.1] >>> check_levels([0.2, 0.2], 'PGA') Traceback (most recent call last): ... ValueError: Found duplicated levels for PGA: [0.2, 0.2] """ if len(imls) < 1: raise ValueError('No imls for %s: %s' % (imt, imls)) elif imls != sorted(imls): raise ValueError('The imls for %s are not sorted: %s' % (imt, imls)) elif len(distinct(imls)) < len(imls): raise ValueError("Found duplicated levels for %s: %s" % (imt, imls)) elif imls[0] == 0 and imls[1] <= min_iml: # apply the cutoff raise ValueError("The min_iml %s=%s is larger than the second level " "for %s" % (imt, min_iml, imls)) elif imls[0] == 0 and imls[1] > min_iml: # apply the cutoff imls[0] = min_iml
python
def on_disconnect(self, client, userdata, result_code): """ Callback when the MQTT client is disconnected. In this case, the server waits five seconds before trying to reconnected. :param client: the client being disconnected. :param userdata: unused. :param result_code: result code. """ self.log_info("Disconnected with result code " + str(result_code)) self.state_handler.set_state(State.goodbye) time.sleep(5) self.thread_handler.run(target=self.start_blocking)
python
def calc_distance_to_border(polygons, template_raster, dst_raster, overwrite=False, keep_interim_files=False): """Calculate the distance of each raster cell (in and outside the polygons) to the next polygon border. Arguments: polygons {str} -- Filename to a geopandas-readable file with polygon features. template_raster {[type]} -- Filename to a rasterio-readable file. dst_raster {[type]} -- Destination filename for the distance to polygon border raster file (tif). Keyword Arguments: overwrite {bool} -- Overwrite files if they exists? (default: {False}) keep_interim_files {bool} -- Keep the interim line vector and raster files (default: {True}) Returns: [type] -- [description] """ if Path(dst_raster).exists() and not overwrite: print(f"Returning 0 - File exists: {dst_raster}") return 0 with rasterio.open(template_raster) as tmp: crs = tmp.crs dst_raster = Path(dst_raster) dst_raster.parent.mkdir(exist_ok=True, parents=True) tempdir = Path(tempfile.mkdtemp(prefix=f"TEMPDIR_{dst_raster.stem}_", dir=dst_raster.parent)) interim_file_lines_vector = tempdir / "interim_sample_vector_dataset_lines.shp" interim_file_lines_raster = tempdir / "interim_sample_vector_dataset_lines.tif" exit_code = convert_polygons_to_lines(polygons, interim_file_lines_vector, crs=crs, add_allone_col=True) rasterize(src_vector=str(interim_file_lines_vector), burn_attribute="ALLONE", src_raster_template=str(template_raster), dst_rasterized=str(interim_file_lines_raster), gdal_dtype=1) cmd = f"{PROXIMITY_PATH} " \ f"{str(Path(interim_file_lines_raster).absolute())} " \ f"{str(Path(dst_raster).absolute())} " \ f"-ot Float32 -distunits PIXEL -values 1 -maxdist 255" subprocess.check_call(cmd, shell=True) if not keep_interim_files: shutil.rmtree(tempdir) else: print(f"Interim files are in {tempdir}") return 0
python
def post(self, request, *args, **kwargs): """ Handles POST requests. """ return self.subscribe(request, *args, **kwargs)
python
def _set_show_mpls_ldp(self, v, load=False): """ Setter method for show_mpls_ldp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_mpls_ldp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_mpls_ldp() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=show_mpls_ldp.show_mpls_ldp, is_leaf=True, yang_name="show-mpls-ldp", rest_name="show-mpls-ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsSummary'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """show_mpls_ldp must be of a type compatible with rpc""", 'defined-type': "rpc", 'generated-type': """YANGDynClass(base=show_mpls_ldp.show_mpls_ldp, is_leaf=True, yang_name="show-mpls-ldp", rest_name="show-mpls-ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsSummary'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""", }) self.__show_mpls_ldp = t if hasattr(self, '_set'): self._set()
java
private void createSet( char[] customCharsArray ) { // If there are custom chars then I expand the font texture twice if (customCharsArray != null && customCharsArray.length > 0) { textureWidth *= 2; } // In any case this should be done in other way. Texture with size 512x512 // can maintain only 256 characters with resolution of 32x32. The texture // size should be calculated dynamicaly by looking at character sizes. try { BufferedImage imgTemp = new BufferedImage(textureWidth, textureHeight, BufferedImage.TYPE_INT_ARGB); Graphics2D g = (Graphics2D) imgTemp.getGraphics(); g.setColor(new Color(255,255,255,1)); g.fillRect(0,0,textureWidth,textureHeight); int rowHeight = 0; int positionX = 0; int positionY = 0; int customCharsLength = ( customCharsArray != null ) ? customCharsArray.length : 0; for (int i = 0; i < 256 + customCharsLength; i++) { // get 0-255 characters and then custom characters char ch = ( i < 256 ) ? (char) i : customCharsArray[i-256]; BufferedImage fontImage = getFontImage(ch); IntObject newIntObject = new IntObject(); newIntObject.width = fontImage.getWidth(); newIntObject.height = fontImage.getHeight(); if (positionX + newIntObject.width >= textureWidth) { positionX = 0; positionY += rowHeight; rowHeight = 0; } newIntObject.storedX = positionX; newIntObject.storedY = positionY; if (newIntObject.height > fontHeight) { fontHeight = newIntObject.height; } if (newIntObject.height > rowHeight) { rowHeight = newIntObject.height; } // Draw it here g.drawImage(fontImage, positionX, positionY, null); positionX += newIntObject.width; if( i < 256 ) { // standard characters charArray[i] = newIntObject; } else { // custom characters customChars.put( new Character( ch ), newIntObject ); } fontImage = null; } fontTexture = BufferedImageUtil .getTexture(font.toString(), imgTemp); } catch (IOException e) { System.err.println("Failed to create font."); e.printStackTrace(); } }
java
public static Object[] getEnumValues(Class<? extends Enum<?>> clazz) { Object[] enums=clazz.getEnumConstants(); if (enums == null) { Method[] mthds = clazz.getDeclaredMethods(); Method mthd = null; for (Method mth : mthds) { if (mth.getName().equals("values")) { mthd = mth; break; } } if (mthd != null) try { enums = (Object[]) mthd.invoke(null); } catch (Throwable e) { throw new RuntimeException(e); } } return enums; }
python
def prepare_search_body(self, should_terms=None, must_terms=None, must_not_terms=None, search_text='', start=None, end=None): """ Prepare body for elasticsearch query Search parameters ^^^^^^^^^^^^^^^^^ These parameters are dictionaries and have format: <term>: [<value 1>, <value 2> ...] should_terms: it resembles logical OR must_terms: it resembles logical AND must_not_terms: it resembles logical NOT search_text : string Text for FTS(full text search) start, end : datetime Filter for event creation time """ self.body = self.SearchBody() self.body.set_should_terms(should_terms) self.body.set_must_terms(must_terms) self.body.set_must_not_terms(must_not_terms) self.body.set_search_text(search_text) self.body.set_timestamp_filter(start, end) self.body.prepare()
java
@Override public URL getURL() throws IOException { URL url = this.servletContext.getResource(this.path); if (url == null) { throw new FileNotFoundException(getDescription() + " cannot be resolved to URL because it does not exist"); } return url; }
python
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_rx_disc_solicitations(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") fcoe_get_interface = ET.Element("fcoe_get_interface") config = fcoe_get_interface output = ET.SubElement(fcoe_get_interface, "output") fcoe_intf_list = ET.SubElement(output, "fcoe-intf-list") fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, "fcoe-intf-fcoe-port-id") fcoe_intf_fcoe_port_id_key.text = kwargs.pop('fcoe_intf_fcoe_port_id') fcoe_intf_rx_disc_solicitations = ET.SubElement(fcoe_intf_list, "fcoe-intf-rx-disc-solicitations") fcoe_intf_rx_disc_solicitations.text = kwargs.pop('fcoe_intf_rx_disc_solicitations') callback = kwargs.pop('callback', self._callback) return callback(config)
java
@GwtIncompatible("incompatible method") public static long getFragmentInHours(final Date date, final int fragment) { return getFragment(date, fragment, TimeUnit.HOURS); }
python
def debug(self, request, message, extra_tags='', fail_silently=False): """Add a message with the ``DEBUG`` level.""" add(self.target_name, request, constants.DEBUG, message, extra_tags=extra_tags, fail_silently=fail_silently)
python
def request(self, method, url, query_params=None, headers=None, body=None, post_params=None): """ :param method: http request method :param url: http request url :param query_params: query parameters in the url :param headers: http request headers :param body: request json body, for `application/json` :param post_params: request post parameters, `application/x-www-form-urlencode` and `multipart/form-data` """ method = method.upper() assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS'] if post_params and body: raise ValueError( "body parameter cannot be used with post_params parameter." ) post_params = post_params or {} headers = headers or {} if 'Content-Type' not in headers: headers['Content-Type'] = 'application/json' try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: if query_params: url += '?' + urlencode(query_params) if headers['Content-Type'] == 'application/json': request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request(method, url, body=request_body, headers=headers) if headers['Content-Type'] == 'application/x-www-form-urlencoded': r = self.pool_manager.request(method, url, fields=post_params, encode_multipart=False, headers=headers) if headers['Content-Type'] == 'multipart/form-data': # must del headers['Content-Type'], or the correct Content-Type # which generated by urllib3 will be overwritten. del headers['Content-Type'] r = self.pool_manager.request(method, url, fields=post_params, encode_multipart=True, headers=headers) # For `GET`, `HEAD` else: r = self.pool_manager.request(method, url, fields=query_params, headers=headers, preload_content=False) except urllib3.exceptions.SSLError as e: msg = "{0}\n{1}".format(type(e).__name__, str(e)) raise ApiException(status=0, reason=msg) r = RESTResponse(r, method) # log response body logger.debug("response body: %s" % r.data) if r.status not in range(200, 206): raise ApiException(http_resp=r) return r
python
def rename_dimension(self, old_name, new_name): """Returns a copy where one dimension is renamed.""" if old_name not in self.dimension_names: raise ValueError("Shape %s does not have dimension named %s" % (self, old_name)) return Shape( [Dimension(new_name, d.size) if d.name == old_name else d for d in self.dims])
java
@Override public SofaResponse invoke(SofaRequest request) throws SofaRpcException { SofaResponse response = null; Throwable throwable = null; try { RpcInternalContext.pushContext(); RpcInternalContext context = RpcInternalContext.getContext(); context.setProviderSide(false); // 包装请求 decorateRequest(request); try { // 产生开始调用事件 if (EventBus.isEnable(ClientStartInvokeEvent.class)) { EventBus.post(new ClientStartInvokeEvent(request)); } // 得到结果 response = cluster.invoke(request); } catch (SofaRpcException e) { throwable = e; throw e; } finally { // 产生调用结束事件 if (!request.isAsync()) { if (EventBus.isEnable(ClientEndInvokeEvent.class)) { EventBus.post(new ClientEndInvokeEvent(request, response, throwable)); } } } // 包装响应 decorateResponse(response); return response; } finally { RpcInternalContext.removeContext(); RpcInternalContext.popContext(); } }
java
@Override public void visitCode(Code obj) { collectionGroups.clear(); groupToIterator.clear(); loops.clear(); buildVariableEndScopeMap(); super.visitCode(obj); }
python
def custom_repo(distro, args, cd_conf, rlogger, install_ceph=None): """ A custom repo install helper that will go through config checks to retrieve repos (and any extra repos defined) and install those ``cd_conf`` is the object built from argparse that holds the flags and information needed to determine what metadata from the configuration to be used. """ default_repo = cd_conf.get_default_repo() components = detect_components(args, distro) if args.release in cd_conf.get_repos(): LOG.info('will use repository from conf: %s' % args.release) default_repo = args.release elif default_repo: LOG.info('will use default repository: %s' % default_repo) # At this point we know there is a cd_conf and that it has custom # repos make sure we were able to detect and actual repo if not default_repo: LOG.warning('a ceph-deploy config was found with repos \ but could not default to one') else: options = dict(cd_conf.items(default_repo)) options['install_ceph'] = False if install_ceph is False else True extra_repos = cd_conf.get_list(default_repo, 'extra-repos') rlogger.info('adding custom repository file') try: distro.repo_install( distro, default_repo, options.pop('baseurl'), options.pop('gpgkey'), components=components, **options ) except KeyError as err: raise RuntimeError('missing required key: %s in config section: %s' % (err, default_repo)) for xrepo in extra_repos: rlogger.info('adding extra repo file: %s.repo' % xrepo) options = dict(cd_conf.items(xrepo)) try: distro.repo_install( distro, xrepo, options.pop('baseurl'), options.pop('gpgkey'), components=components, **options ) except KeyError as err: raise RuntimeError('missing required key: %s in config section: %s' % (err, xrepo))
java
public static void main(String[] args) throws RunnerException { new Runner(new OptionsBuilder() .include(WILDCARD + SuperClassInvocationBenchmark.class.getSimpleName() + WILDCARD) .include(WILDCARD + StubInvocationBenchmark.class.getSimpleName() + WILDCARD) .include(WILDCARD + ClassByImplementationBenchmark.class.getSimpleName() + WILDCARD) .include(WILDCARD + ClassByExtensionBenchmark.class.getSimpleName() + WILDCARD) .include(WILDCARD + TrivialClassCreationBenchmark.class.getSimpleName() + WILDCARD) .forks(0) // Should rather be 1 but there seems to be a bug in JMH. .build()).run(); }
java
public <T> void restartableFirst(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext, @Nullable final Action2<View, Throwable> onError) { restartable(restartableId, new Func0<Subscription>() { @Override public Subscription call() { return observableFactory.call() .compose(RxPresenter.this.<T>deliverFirst()) .subscribe(split(onNext, onError)); } }); }
python
def is_resource_protected(self, request, **kwargs): """ Returns true if and only if the resource's URL is *not* exempt and *is* protected. """ exempt_urls = self.get_exempt_url_patterns() protected_urls = self.get_protected_url_patterns() path = request.path_info.lstrip('/') path_is_exempt = any(m.match(path) for m in exempt_urls) if path_is_exempt: return False path_is_protected = any(m.match(path) for m in protected_urls) if path_is_protected: return True return False
python
def minimize(func, x0, data=None, method=None, lower_bounds=None, upper_bounds=None, constraints_func=None, nmr_observations=None, cl_runtime_info=None, options=None): """Minimization of one or more variables. For an easy wrapper of function maximization, see :func:`maximize`. All boundary conditions are enforced using the penalty method. That is, we optimize the objective function: .. math:: F(x) = f(x) \mu \sum \max(0, g_i(x))^2 where :math:`F(x)` is the new objective function, :math:`f(x)` is the old objective function, :math:`g_i` are the boundary functions defined as :math:`g_i(x) \leq 0` and :math:`\mu` is the penalty weight. The penalty weight is by default :math:`\mu = 1e20` and can be set using the ``options`` dictionary as ``penalty_weight``. Args: func (mot.lib.cl_function.CLFunction): A CL function with the signature: .. code-block:: c double <func_name>(local const mot_float_type* const x, void* data, local mot_float_type* objective_list); The objective list needs to be filled when the provided pointer is not null. It should contain the function values for each observation. This list is used by non-linear least-squares routines, and will be squared by the least-square optimizer. This is only used by the ``Levenberg-Marquardt`` routine. x0 (ndarray): Initial guess. Array of real elements of size (n, p), for 'n' problems and 'p' independent variables. data (mot.lib.kernel_data.KernelData): the kernel data we will load. This is returned to the likelihood function as the ``void* data`` pointer. method (str): Type of solver. Should be one of: - 'Levenberg-Marquardt' - 'Nelder-Mead' - 'Powell' - 'Subplex' If not given, defaults to 'Powell'. lower_bounds (tuple): per parameter a lower bound, if given, the optimizer ensures ``a <= x`` with a the lower bound and x the parameter. If not given, -infinity is assumed for all parameters. Each tuple element can either be a scalar or a vector. If a vector is given the first dimension length should match that of the parameters. upper_bounds (tuple): per parameter an upper bound, if given, the optimizer ensures ``x >= b`` with b the upper bound and x the parameter. If not given, +infinity is assumed for all parameters. Each tuple element can either be a scalar or a vector. If a vector is given the first dimension length should match that of the parameters. constraints_func (mot.optimize.base.ConstraintFunction): function to compute (inequality) constraints. Should hold a CL function with the signature: .. code-block:: c void <func_name>(local const mot_float_type* const x, void* data, local mot_float_type* constraints); Where ``constraints_values`` is filled as: .. code-block:: c constraints[i] = g_i(x) That is, for each constraint function :math:`g_i`, formulated as :math:`g_i(x) <= 0`, we should return the function value of :math:`g_i`. nmr_observations (int): the number of observations returned by the optimization function. This is only needed for the ``Levenberg-Marquardt`` method. cl_runtime_info (mot.configuration.CLRuntimeInfo): the CL runtime information options (dict): A dictionary of solver options. All methods accept the following generic options: - patience (int): Maximum number of iterations to perform. - penalty_weight (float): the weight of the penalty term for the boundary conditions Returns: mot.optimize.base.OptimizeResults: The optimization result represented as a ``OptimizeResult`` object. Important attributes are: ``x`` the solution array. """ if not method: method = 'Powell' cl_runtime_info = cl_runtime_info or CLRuntimeInfo() if len(x0.shape) < 2: x0 = x0[..., None] lower_bounds = _bounds_to_array(lower_bounds or np.ones(x0.shape[1]) * -np.inf) upper_bounds = _bounds_to_array(upper_bounds or np.ones(x0.shape[1]) * np.inf) if method == 'Powell': return _minimize_powell(func, x0, cl_runtime_info, lower_bounds, upper_bounds, constraints_func=constraints_func, data=data, options=options) elif method == 'Nelder-Mead': return _minimize_nmsimplex(func, x0, cl_runtime_info, lower_bounds, upper_bounds, constraints_func=constraints_func, data=data, options=options) elif method == 'Levenberg-Marquardt': return _minimize_levenberg_marquardt(func, x0, nmr_observations, cl_runtime_info, lower_bounds, upper_bounds, constraints_func=constraints_func, data=data, options=options) elif method == 'Subplex': return _minimize_subplex(func, x0, cl_runtime_info, lower_bounds, upper_bounds, constraints_func=constraints_func, data=data, options=options) raise ValueError('Could not find the specified method "{}".'.format(method))
java
public synchronized Map<String, String> getConnectionListeners() { Map<String, String> result = new TreeMap<>(); for (Map.Entry<ConnectionListener, Exception> entry : listeners.entrySet()) { String id = Integer.toHexString(System.identityHashCode(entry.getKey())); StringBuilder sb = new StringBuilder(); for (StackTraceElement ste : entry.getValue().getStackTrace()) { sb = sb.append(ste.getClassName()); sb = sb.append(":"); sb = sb.append(ste.getMethodName()); sb = sb.append(":"); sb = sb.append(ste.getLineNumber()); sb = sb.append(newLine); } result.put(id, sb.toString()); } return result; }
java
protected KieBase getKnowledgeBase(String name, String version) throws ActivityException { return getKnowledgeBase(name, version, null); }
java
public void build(final Class<?> clazz, final Field field, final DeviceImpl device, final Object businessObject) throws DevFailed { xlogger.entry(); BuilderUtils.checkStatic(field); Method getter; final String stateName = field.getName(); final String getterName = BuilderUtils.GET + stateName.substring(0, 1).toUpperCase(Locale.ENGLISH) + stateName.substring(1); try { getter = clazz.getMethod(getterName); } catch (final NoSuchMethodException e) { throw DevFailedUtils.newDevFailed(e); } if (getter.getParameterTypes().length != 0) { throw DevFailedUtils.newDevFailed(DevFailedUtils.TANGO_BUILD_FAILED, getter + " must not have a parameter"); } logger.debug("Has an status : {}", field.getName()); if (getter.getReturnType() != String.class) { throw DevFailedUtils.newDevFailed(DevFailedUtils.TANGO_BUILD_FAILED, getter + " must have a return type of " + String.class); } Method setter; final String setterName = BuilderUtils.SET + stateName.substring(0, 1).toUpperCase(Locale.ENGLISH) + stateName.substring(1); try { setter = clazz.getMethod(setterName, String.class); } catch (final NoSuchMethodException e) { throw DevFailedUtils.newDevFailed(e); } device.setStatusImpl(new StatusImpl(businessObject, getter, setter)); final Status annot = field.getAnnotation(Status.class); if (annot.isPolled()) { device.addAttributePolling(DeviceImpl.STATUS_NAME, annot.pollingPeriod()); } xlogger.exit(); }
python
def _get_bases(type_): # type: (type) -> Tuple[type, type] """Get the base and meta classes to use in creating a subclass. Args: type_: The type to subclass. Returns: A tuple containing two values: a base class, and a metaclass. """ try: class _(type_): # type: ignore """Check if type_ is subclassable.""" BaseClass = type_ except TypeError: BaseClass = object class MetaClass(_ValidationMeta, BaseClass.__class__): # type: ignore """Use the type_ meta and include base validation functionality.""" return BaseClass, MetaClass
python
def repeat(self, target, sender, **kwargs): "will repeat whatever yo say" if target.startswith("#"): self.message(target, kwargs["msg"]) else: self.message(sender, kwargs["msg"])
java
public void save(final InputStream templateXlsIn, final OutputStream xlsOut, final Object beanObj) throws XlsMapperException, IOException { saver.save(templateXlsIn, xlsOut, beanObj); }
python
def afx_adafactor(): """Adafactor with recommended learning rate schedule.""" hparams = afx_adam() hparams.optimizer = "Adafactor" hparams.learning_rate_schedule = "rsqrt_decay" hparams.learning_rate_warmup_steps = 10000 return hparams
python
def request(self, action, data={}, headers={}, method='GET'): """ Append the REST headers to every request """ headers = { "Authorization": "Bearer " + self.token, "Content-Type": "application/json", "X-Version": "1", "Accept": "application/json" } return Transport.request(self, action, data, headers, method)
java
public JsonObject exportForFts() { JsonObject result = JsonObject.create(); for (MutationToken token : tokens) { String tokenKey = token.vbucketID() + "/" + token.vbucketUUID(); Long seqno = result.getLong(tokenKey); if (seqno == null || seqno < token.sequenceNumber()) { result.put(tokenKey, token.sequenceNumber()); } } return result; }
java
public String getPositionInString(ModificationFeature mf) { Set vals = SITE_ACC.getValueFromBean(mf); if (!vals.isEmpty()) { int x = ((Integer) vals.iterator().next()); if (x > 0) return "@" + x; } vals = INTERVAL_BEGIN_ACC.getValueFromBean(mf); if (!vals.isEmpty()) { int begin = ((Integer) vals.iterator().next()); vals = INTERVAL_END_ACC.getValueFromBean(mf); if (!vals.isEmpty()) { int end = ((Integer) vals.iterator().next()); if (begin > 0 && end > 0 && begin <= end) { if (begin == end) return "@" + begin; else return "@" + "[" + begin + "-" + end + "]"; } } } return ""; }
java
@Override public final void sample() { long count = _count.getAndSet(0); long time = _time.getAndSet(0); if (count == 0) _value = 0; else _value = time / (double) count; }
java
private void readTableBlock(int startIndex, int blockLength) { for (int index = startIndex; index < (startIndex + blockLength - 11); index++) { if (matchPattern(TABLE_BLOCK_PATTERNS, index)) { int offset = index + 7; int nameLength = FastTrackUtility.getInt(m_buffer, offset); offset += 4; String name = new String(m_buffer, offset, nameLength, CharsetHelper.UTF16LE).toUpperCase(); FastTrackTableType type = REQUIRED_TABLES.get(name); if (type != null) { m_currentTable = new FastTrackTable(type, this); m_tables.put(type, m_currentTable); } else { m_currentTable = null; } m_currentFields.clear(); break; } } }
java
private void fixAdditionalKeyBindings(GameSettings settings) { if (this.additionalKeys == null) { return; // No extra keybindings to add. } // The keybindings are stored in GameSettings as a java built-in array. // There is no way to append to such arrays, so instead we create a new // array of the correct // length, copy across the current keybindings, add our own ones, and // set the new array back // into the GameSettings: KeyBinding[] bindings = (KeyBinding[]) ArrayUtils.addAll(settings.keyBindings, this.additionalKeys.toArray()); settings.keyBindings = bindings; }
python
def cmd_iter( self, tgt, fun, arg=(), timeout=None, tgt_type='glob', ret='', kwarg=None, **kwargs): ''' Execute a single command via the salt-ssh subsystem and return a generator .. versionadded:: 2015.5.0 ''' ssh = self._prep_ssh( tgt, fun, arg, timeout, tgt_type, kwarg, **kwargs) for ret in ssh.run_iter(jid=kwargs.get('jid', None)): yield ret
python
async def delete_chat_sticker_set(self, chat_id: typing.Union[base.Integer, base.String]) -> base.Boolean: """ Use this method to delete a group sticker set from a supergroup. The bot must be an administrator in the chat for this to work and must have the appropriate admin rights. Use the field can_set_sticker_set optionally returned in getChat requests to check if the bot can use this method. Source: https://core.telegram.org/bots/api#deletechatstickerset :param chat_id: Unique identifier for the target chat or username of the target supergroup :type chat_id: :obj:`typing.Union[base.Integer, base.String]` :return: Returns True on success :rtype: :obj:`base.Boolean` """ payload = generate_payload(**locals()) result = await self.request(api.Methods.DELETE_CHAT_STICKER_SET, payload) return result
java
@SneakyThrows public static DataSource createDataSource(final File yamlFile) { YamlRootEncryptRuleConfiguration config = YamlEngine.unmarshal(yamlFile, YamlRootEncryptRuleConfiguration.class); return EncryptDataSourceFactory.createDataSource(config.getDataSource(), new EncryptRuleConfigurationYamlSwapper().swap(config.getEncryptRule())); }
java
@Override public double getWeight(double distance, double max, double stddev) { if(max <= 0) { return 1.0; } double relativedistance = distance / max; // the scaling was picked such that getWeight(a,a,0) is 0.1 // since erfc(1.1630871536766736) == 1.0 return NormalDistribution.erfc(1.1630871536766736 * relativedistance); }
java
private int endCheckLoop() { Node n; int rv = END_UNREACHED; // To find the loop body, we look at the second to last node of the // loop node, which should be the predicate that the loop should // satisfy. // The target of the predicate is the loop-body for all 4 kinds of // loops. for (n = first; n.next != last; n = n.next) { /* skip */ } if (n.type != Token.IFEQ) return END_DROPS_OFF; // The target's next is the loop body block rv = ((Jump)n).target.next.endCheck(); // check to see if the loop condition is true if (n.first.type == Token.TRUE) rv &= ~END_DROPS_OFF; // look for effect of breaks rv |= getIntProp(CONTROL_BLOCK_PROP, END_UNREACHED); return rv; }
java
private void cleanUp(final DataSet ds) { Set<String> indexedKeys = Sets.newHashSet(); for (String key : ds.getDataView().keySet()) { // Search for indexed entries if (key.contains("#")) { // Remember key (without index) indexedKeys.add(key.substring(0, key.indexOf("#"))); } } // Remove all indexes for (String indexedKey : indexedKeys) { ds.removeValue(indexedKey); } }
python
def _dlinear_seaborn_(self, label=None, style=None, opts=None): """ Returns a Seaborn linear regression plot with marginal distribution """ color, size = self._get_color_size(style) try: fig = sns.jointplot(self.x, self.y, color=color, size=size, data=self.df, kind="reg") fig = self._set_with_height(fig, opts) return fig except Exception as e: self.err(e, self.dlinear_, "Can not draw linear regression chart with distribution")
python
def get_col_row_tot_array_from_data_record_array(array): # TODO: max ToT '''Convert raw data array to column, row, and ToT array. Parameters ---------- array : numpy.array Raw data array. Returns ------- Tuple of arrays. ''' def get_col_row_tot_1_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), np.right_shift(np.bitwise_and(value, 0x000000F0), 4) def get_col_row_tot_2_array_from_data_record_array(value): return np.right_shift(np.bitwise_and(value, 0x00FE0000), 17), np.add(np.right_shift(np.bitwise_and(value, 0x0001FF00), 8), 1), np.bitwise_and(value, 0x0000000F) col_row_tot_1_array = np.column_stack(get_col_row_tot_1_array_from_data_record_array(array)) col_row_tot_2_array = np.column_stack(get_col_row_tot_2_array_from_data_record_array(array)) # interweave array here col_row_tot_array = np.vstack((col_row_tot_1_array.T, col_row_tot_2_array.T)).reshape((3, -1), order='F').T # http://stackoverflow.com/questions/5347065/interweaving-two-numpy-arrays # remove ToT > 14 (late hit, no hit) from array, remove row > 336 in case we saw hit in row 336 (no double hit possible) try: col_row_tot_array_filtered = col_row_tot_array[col_row_tot_array[:, 2] < 14] # [np.logical_and(col_row_tot_array[:,2]<14, col_row_tot_array[:,1]<=336)] except IndexError: # logging.warning('Array is empty') return np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')), np.array([], dtype=np.dtype('>u4')) return col_row_tot_array_filtered[:, 0], col_row_tot_array_filtered[:, 1], col_row_tot_array_filtered[:, 2]
python
def change_host_snapshot_command(self, host, snapshot_command): """Modify host snapshot command Format of the line that triggers function call:: CHANGE_HOST_SNAPSHOT_COMMAND;<host_name>;<event_handler_command> :param host: host to modify snapshot command :type host: alignak.objects.host.Host :param snapshot_command: snapshot command command line :type snapshot_command: :return: None """ host.modified_attributes |= DICT_MODATTR["MODATTR_EVENT_HANDLER_COMMAND"].value data = {"commands": self.commands, "call": snapshot_command} host.change_snapshot_command(data) self.send_an_element(host.get_update_status_brok())
java
protected int getSelectedTextColor(Context ctx) { return ColorHolder.color(getSelectedTextColor(), ctx, R.attr.material_drawer_selected_text, R.color.material_drawer_selected_text); }
java
public synchronized XAttribute get(Object key) { if(size == 0) { return null; } else { try { XAttributeMap map = deserialize(); return map.get(key); } catch (IOException e) { e.printStackTrace(); return null; } } }
java
public void validateUpdateFile(Long configId, String fileName) { // // config // Config config = valideConfigExist(configId); // // value // try { if (!config.getName().equals(fileName)) { throw new Exception(); } } catch (Exception e) { throw new FieldException("value", "conf.file.name.not.equal", e); } }
java
@Override public String getLocalAddr() { try { collaborator.preInvoke(componentMetaData); return request.getLocalAddr(); } finally { collaborator.postInvoke(); } }
java
private boolean onLoad(Cursor cursor, T entity) { if (cursor == null) { if (log.isLoggable(Level.FINEST)) { log.finest(L.l("{0} load returned null", _entityInfo)); } _entityInfo.loadFail(entity); return false; } else { _entityInfo.load(cursor, entity); if (log.isLoggable(Level.FINER)) { log.finer("loaded " + entity); } return true; } }
python
def parse_cif_structure(self): """Parse a `StructureData` from the cleaned `CifData` returned by the `CifSelectCalculation`.""" from aiida_codtools.workflows.functions.primitive_structure_from_cif import primitive_structure_from_cif if self.ctx.cif.has_unknown_species: self.ctx.exit_code = self.exit_codes.ERROR_CIF_HAS_UNKNOWN_SPECIES self.report(self.ctx.exit_code.message) return if self.ctx.cif.has_undefined_atomic_sites: self.ctx.exit_code = self.exit_codes.ERROR_CIF_HAS_UNDEFINED_ATOMIC_SITES self.report(self.ctx.exit_code.message) return if self.ctx.cif.has_attached_hydrogens: self.ctx.exit_code = self.exit_codes.ERROR_CIF_HAS_ATTACHED_HYDROGENS self.report(self.ctx.exit_code.message) return parse_inputs = { 'cif': self.ctx.cif, 'parse_engine': self.inputs.parse_engine, 'site_tolerance': self.inputs.site_tolerance, 'symprec': self.inputs.symprec, } try: structure, node = primitive_structure_from_cif.run_get_node(**parse_inputs) except Exception: # pylint: disable=broad-except self.ctx.exit_code = self.exit_codes.ERROR_CIF_STRUCTURE_PARSING_FAILED self.report(self.ctx.exit_code.message) return if node.is_failed: self.ctx.exit_code = self.exit_codes(node.exit_status) # pylint: disable=too-many-function-args self.report(self.ctx.exit_code.message) else: self.ctx.structure = structure
java
@Override public int compare(BugInstance lhs, BugInstance rhs) { // Attributes of BugInstance. // Compare abbreviation // Compare class and method annotations (ignoring line numbers). // Compare field annotations. int cmp; BugPattern lhsPattern = lhs.getBugPattern(); BugPattern rhsPattern = rhs.getBugPattern(); // Compare by abbrev instead of type. The specific bug type can // change // (e.g., "definitely null" to "null on simple path"). Also, we // often // change bug pattern types from one version of FindBugs to the // next. // // Source line and field name are still matched precisely, so this // shouldn't // cause loss of precision. if ((cmp = lhsPattern.getAbbrev().compareTo(rhsPattern.getAbbrev())) != 0) { return cmp; } if (isExactBugPatternMatch() && (cmp = lhsPattern.getType().compareTo(rhsPattern.getType())) != 0) { return cmp; } if (comparePriorities) { cmp = lhs.getPriority() - rhs.getPriority(); if (cmp != 0) { return cmp; } } Iterator<BugAnnotation> lhsIter = new FilteringAnnotationIterator(lhs.annotationIterator()); Iterator<BugAnnotation> rhsIter = new FilteringAnnotationIterator(rhs.annotationIterator()); while (lhsIter.hasNext() && rhsIter.hasNext()) { BugAnnotation lhsAnnotation = lhsIter.next(); BugAnnotation rhsAnnotation = rhsIter.next(); Class<? extends BugAnnotation> lhsClass; while (true) { // Different annotation types obviously cannot be equal, // so just compare by class name. lhsClass = lhsAnnotation.getClass(); Class<? extends BugAnnotation> rhsClass = rhsAnnotation.getClass(); if (lhsClass == rhsClass) { break; } if (lhsClass == LocalVariableAnnotation.class && !((LocalVariableAnnotation) lhsAnnotation).isSignificant() && lhsIter.hasNext()) { lhsAnnotation = lhsIter.next(); } else if (rhsClass == LocalVariableAnnotation.class && !((LocalVariableAnnotation) rhsAnnotation).isSignificant() && rhsIter.hasNext()) { rhsAnnotation = rhsIter.next(); } else { return lhsClass.getName().compareTo(rhsClass.getName()); } } if (lhsClass == ClassAnnotation.class) { // ClassAnnotations should have their class names rewritten to // handle moved and renamed classes. String lhsClassName = classNameRewriter.rewriteClassName(((ClassAnnotation) lhsAnnotation).getClassName()); String rhsClassName = classNameRewriter.rewriteClassName(((ClassAnnotation) rhsAnnotation).getClassName()); cmp = lhsClassName.compareTo(rhsClassName); } else if (lhsClass == MethodAnnotation.class) { // Rewrite class names in MethodAnnotations MethodAnnotation lhsMethod = ClassNameRewriterUtil.convertMethodAnnotation(classNameRewriter, (MethodAnnotation) lhsAnnotation); MethodAnnotation rhsMethod = ClassNameRewriterUtil.convertMethodAnnotation(classNameRewriter, (MethodAnnotation) rhsAnnotation); cmp = lhsMethod.compareTo(rhsMethod); } else if (lhsClass == FieldAnnotation.class) { // Rewrite class names in FieldAnnotations FieldAnnotation lhsField = ClassNameRewriterUtil.convertFieldAnnotation(classNameRewriter, (FieldAnnotation) lhsAnnotation); FieldAnnotation rhsField = ClassNameRewriterUtil.convertFieldAnnotation(classNameRewriter, (FieldAnnotation) rhsAnnotation); cmp = lhsField.compareTo(rhsField); } else if (lhsClass == StringAnnotation.class) { String lhsString = ((StringAnnotation) lhsAnnotation).getValue(); String rhsString = ((StringAnnotation) rhsAnnotation).getValue(); cmp = lhsString.compareTo(rhsString); } else if (lhsClass == LocalVariableAnnotation.class) { String lhsName = ((LocalVariableAnnotation) lhsAnnotation).getName(); String rhsName = ((LocalVariableAnnotation) rhsAnnotation).getName(); if ("?".equals(lhsName) || "?".equals(rhsName)) { continue; } cmp = lhsName.compareTo(rhsName); } else if (lhsClass == TypeAnnotation.class) { String lhsType = ((TypeAnnotation) lhsAnnotation).getTypeDescriptor(); String rhsType = ((TypeAnnotation) rhsAnnotation).getTypeDescriptor(); lhsType = ClassNameRewriterUtil.rewriteSignature(classNameRewriter, lhsType); rhsType = ClassNameRewriterUtil.rewriteSignature(classNameRewriter, rhsType); cmp = lhsType.compareTo(rhsType); } else if (lhsClass == IntAnnotation.class) { int lhsValue = ((IntAnnotation) lhsAnnotation).getValue(); int rhsValue = ((IntAnnotation) rhsAnnotation).getValue(); cmp = lhsValue - rhsValue; } else if (isBoring(lhsAnnotation)) { throw new IllegalStateException("Impossible"); } else { throw new IllegalStateException("Unknown annotation type: " + lhsClass.getName()); } if (cmp != 0) { return cmp; } } if (interestingNext(rhsIter)) { return -1; } else if (interestingNext(lhsIter)) { return 1; } else { return 0; } }
java
protected void configureTwitterClient(final Collection<BaseClient> properties) { val twitter = pac4jProperties.getTwitter(); if (StringUtils.isNotBlank(twitter.getId()) && StringUtils.isNotBlank(twitter.getSecret())) { val client = new TwitterClient(twitter.getId(), twitter.getSecret(), twitter.isIncludeEmail()); configureClient(client, twitter); LOGGER.debug("Created client [{}] with identifier [{}]", client.getName(), client.getKey()); properties.add(client); } }
java
public void updateDataSegmentPosition(long iPosition, final int iDataSegmentId, final long iDataSegmentPosition) throws IOException { iPosition = iPosition * RECORD_SIZE; acquireExclusiveLock(); try { final long[] pos = fileSegment.getRelativePosition(iPosition); final OFile f = fileSegment.files[(int) pos[0]]; long p = pos[1]; f.writeShort(p, (short) iDataSegmentId); f.writeLong(p += OBinaryProtocol.SIZE_SHORT, iDataSegmentPosition); } finally { releaseExclusiveLock(); } }
java
@NonNull public RequestCreator networkPolicy(@NonNull NetworkPolicy policy, @NonNull NetworkPolicy... additional) { data.networkPolicy(policy, additional); return this; }
python
def activate_shepherd(driver): """ Allows you to use Shepherd Tours with SeleniumBase http://github.hubspot.com/shepherd/docs/welcome/ """ shepherd_js = constants.Shepherd.MIN_JS sh_theme_arrows_css = constants.Shepherd.THEME_ARROWS_CSS sh_theme_arrows_fix_css = constants.Shepherd.THEME_ARR_FIX_CSS sh_theme_default_css = constants.Shepherd.THEME_DEFAULT_CSS sh_theme_dark_css = constants.Shepherd.THEME_DARK_CSS sh_theme_sq_css = constants.Shepherd.THEME_SQ_CSS sh_theme_sq_dark_css = constants.Shepherd.THEME_SQ_DK_CSS tether_js = constants.Tether.MIN_JS spinner_css = constants.Messenger.SPINNER_CSS sh_style = style_sheet.sh_style_test backdrop_style = style_sheet.sh_backdrop_style activate_bootstrap(driver) js_utils.wait_for_ready_state_complete(driver) js_utils.wait_for_angularjs(driver) js_utils.add_css_style(driver, backdrop_style) js_utils.wait_for_ready_state_complete(driver) js_utils.wait_for_angularjs(driver) for x in range(4): js_utils.add_css_link(driver, spinner_css) js_utils.add_css_link(driver, sh_theme_arrows_css) js_utils.add_css_link(driver, sh_theme_arrows_fix_css) js_utils.add_css_link(driver, sh_theme_default_css) js_utils.add_css_link(driver, sh_theme_dark_css) js_utils.add_css_link(driver, sh_theme_sq_css) js_utils.add_css_link(driver, sh_theme_sq_dark_css) js_utils.add_js_link(driver, tether_js) js_utils.add_js_link(driver, shepherd_js) time.sleep(0.1) for x in range(int(settings.MINI_TIMEOUT * 2.0)): # Shepherd needs a small amount of time to load & activate. try: driver.execute_script(sh_style) # Verify Shepherd has loaded js_utils.wait_for_ready_state_complete(driver) js_utils.wait_for_angularjs(driver) driver.execute_script(sh_style) # Need it twice for ordering js_utils.wait_for_ready_state_complete(driver) js_utils.wait_for_angularjs(driver) time.sleep(0.05) return except Exception: time.sleep(0.15) raise_unable_to_load_jquery_exception(driver)
python
def Pack(self): """Returns this message in an over-the-wire format.""" return struct.pack(self.format, self.command, self.arg0, self.arg1, len(self.data), self.checksum, self.magic)
python
def MX(domain, resolve=False, nameserver=None): ''' Return a list of lists for the MX of ``domain``. If the 'resolve' argument is True, resolve IPs for the servers. It's limited to one IP, because although in practice it's very rarely a round robin, it is an acceptable configuration and pulling just one IP lets the data be similar to the non-resolved version. If you think an MX has multiple IPs, don't use the resolver here, resolve them in a separate step. CLI Example: .. code-block:: bash salt ns1 dnsutil.MX google.com ''' if _has_dig(): return __salt__['dig.MX'](domain, resolve, nameserver) return 'This function requires dig, which is not currently available'
java
public void marshall(CreateApplicationRequest createApplicationRequest, ProtocolMarshaller protocolMarshaller) { if (createApplicationRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createApplicationRequest.getName(), NAME_BINDING); protocolMarshaller.marshall(createApplicationRequest.getTags(), TAGS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static boolean isKnownByteFormat( BufferedImage image ) { int type = image.getType(); return type != BufferedImage.TYPE_BYTE_INDEXED && type != BufferedImage.TYPE_BYTE_BINARY && type != BufferedImage.TYPE_CUSTOM; }
python
def purge_metadata(self, force=False): """Instance-based version of ProcessMetadataManager.purge_metadata_by_name() that checks for process liveness before purging metadata. :param bool force: If True, skip process liveness check before purging metadata. :raises: `ProcessManager.MetadataError` when OSError is encountered on metadata dir removal. """ if not force and self.is_alive(): raise ProcessMetadataManager.MetadataError('cannot purge metadata for a running process!') super(ProcessManager, self).purge_metadata_by_name(self._name)
python
def get_instructions(self): """ Get all instructions from a basic block. :rtype: Return all instructions in the current basic block """ tmp_ins = [] idx = 0 for i in self.method.get_instructions(): if idx >= self.start and idx < self.end: tmp_ins.append(i) idx += i.get_length() return tmp_ins