language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
protected final void printCommandLine(@Nonnull String[] cmd, @CheckForNull FilePath workDir) { StringBuilder buf = new StringBuilder(); if (workDir != null) { buf.append('['); if(showFullPath) buf.append(workDir.getRemote()); else buf.append(workDir.getRemote().replaceFirst("^.+[/\\\\]", "")); buf.append("] "); } buf.append('$'); for (String c : cmd) { buf.append(' '); if(c.indexOf(' ')>=0) { if(c.indexOf('"')>=0) buf.append('\'').append(c).append('\''); else buf.append('"').append(c).append('"'); } else buf.append(c); } listener.getLogger().println(buf.toString()); listener.getLogger().flush(); }
python
def load_data(self, sess, inputs, state_inputs): """Bulk loads the specified inputs into device memory. The shape of the inputs must conform to the shapes of the input placeholders this optimizer was constructed with. The data is split equally across all the devices. If the data is not evenly divisible by the batch size, excess data will be discarded. Args: sess: TensorFlow session. inputs: List of arrays matching the input placeholders, of shape [BATCH_SIZE, ...]. state_inputs: List of RNN input arrays. These arrays have size [BATCH_SIZE / MAX_SEQ_LEN, ...]. Returns: The number of tuples loaded per device. """ if log_once("load_data"): logger.info( "Training on concatenated sample batches:\n\n{}\n".format( summarize({ "placeholders": self.loss_inputs, "inputs": inputs, "state_inputs": state_inputs }))) feed_dict = {} assert len(self.loss_inputs) == len(inputs + state_inputs), \ (self.loss_inputs, inputs, state_inputs) # Let's suppose we have the following input data, and 2 devices: # 1 2 3 4 5 6 7 <- state inputs shape # A A A B B B C C C D D D E E E F F F G G G <- inputs shape # The data is truncated and split across devices as follows: # |---| seq len = 3 # |---------------------------------| seq batch size = 6 seqs # |----------------| per device batch size = 9 tuples if len(state_inputs) > 0: smallest_array = state_inputs[0] seq_len = len(inputs[0]) // len(state_inputs[0]) self._loaded_max_seq_len = seq_len else: smallest_array = inputs[0] self._loaded_max_seq_len = 1 sequences_per_minibatch = ( self.max_per_device_batch_size // self._loaded_max_seq_len * len( self.devices)) if sequences_per_minibatch < 1: logger.warn( ("Target minibatch size is {}, however the rollout sequence " "length is {}, hence the minibatch size will be raised to " "{}.").format(self.max_per_device_batch_size, self._loaded_max_seq_len, self._loaded_max_seq_len * len(self.devices))) sequences_per_minibatch = 1 if len(smallest_array) < sequences_per_minibatch: # Dynamically shrink the batch size if insufficient data sequences_per_minibatch = make_divisible_by( len(smallest_array), len(self.devices)) if log_once("data_slicing"): logger.info( ("Divided {} rollout sequences, each of length {}, among " "{} devices.").format( len(smallest_array), self._loaded_max_seq_len, len(self.devices))) if sequences_per_minibatch < len(self.devices): raise ValueError( "Must load at least 1 tuple sequence per device. Try " "increasing `sgd_minibatch_size` or reducing `max_seq_len` " "to ensure that at least one sequence fits per device.") self._loaded_per_device_batch_size = (sequences_per_minibatch // len( self.devices) * self._loaded_max_seq_len) if len(state_inputs) > 0: # First truncate the RNN state arrays to the sequences_per_minib. state_inputs = [ make_divisible_by(arr, sequences_per_minibatch) for arr in state_inputs ] # Then truncate the data inputs to match inputs = [arr[:len(state_inputs[0]) * seq_len] for arr in inputs] assert len(state_inputs[0]) * seq_len == len(inputs[0]), \ (len(state_inputs[0]), sequences_per_minibatch, seq_len, len(inputs[0])) for ph, arr in zip(self.loss_inputs, inputs + state_inputs): feed_dict[ph] = arr truncated_len = len(inputs[0]) else: for ph, arr in zip(self.loss_inputs, inputs + state_inputs): truncated_arr = make_divisible_by(arr, sequences_per_minibatch) feed_dict[ph] = truncated_arr truncated_len = len(truncated_arr) sess.run([t.init_op for t in self._towers], feed_dict=feed_dict) self.num_tuples_loaded = truncated_len tuples_per_device = truncated_len // len(self.devices) assert tuples_per_device > 0, "No data loaded?" assert tuples_per_device % self._loaded_per_device_batch_size == 0 return tuples_per_device
java
public final void setItsUser(final UserJetty pUser) { this.itsUser = pUser; if (this.itsId == null) { this.itsId = new IdUserRoleJetty(); } this.itsId.setItsUser(this.itsUser); }
python
def remove_links(self): """Remove links from our bin.""" for link in self.list_exes(): link = path.join(ENV_BIN, path.basename(link)) print_pretty("<FG_BLUE>Removing link {}...<END>".format(link)) os.remove(link)
python
def popError(text, title="Lackey Error"): """ Creates an error dialog with the specified text. """ root = tk.Tk() root.withdraw() tkMessageBox.showerror(title, text)
python
def disconnect(self): ''' Disconnect from the serial port ''' if self._poll_stop_event: self._poll_stop_event.set() if self._driver: if self.status != 'idle': self.deactivate() self._driver.disconnect()
java
public static void setPreferredRadius(double radius) { assert !Double.isNaN(radius); final Preferences prefs = Preferences.userNodeForPackage(MapElementConstants.class); if (prefs != null) { prefs.putDouble("RADIUS", radius); //$NON-NLS-1$ try { prefs.flush(); } catch (BackingStoreException exception) { // } } }
java
@Override public SecurityCookieImpl preInvoke(EJBRequestData request) throws EJBAccessDeniedException { Subject invokedSubject = subjectManager.getInvocationSubject(); Subject callerSubject = subjectManager.getCallerSubject(); EJBMethodMetaData methodMetaData = request.getEJBMethodMetaData(); if (ejbSecConfig.getUseUnauthenticatedForExpiredCredentials()) { invokedSubject = setNullSubjectWhenExpired(invokedSubject); callerSubject = setNullSubjectWhenExpired(callerSubject); } Subject originalInvokedSubject = invokedSubject; Subject originalCallerSubject = callerSubject; // SecurityCookieImpl securityCookie = new SecurityCookieImpl(invokedSubject, callerSubject); if (setUnauthenticatedSubjectIfNeeded(invokedSubject, callerSubject)) { invokedSubject = subjectManager.getInvocationSubject(); callerSubject = subjectManager.getCallerSubject(); } Subject subjectToAuthorize = (invokedSubject == null) ? callerSubject : invokedSubject; if (!isInternalUnprotectedMethod(methodMetaData)) { eah.authorizeEJB(request, subjectToAuthorize); } performDelegation(methodMetaData, subjectToAuthorize); subjectManager.setCallerSubject(subjectToAuthorize); SecurityCookieImpl securityCookie = new SecurityCookieImpl(originalInvokedSubject, originalCallerSubject, subjectManager.getInvocationSubject(), subjectToAuthorize); return securityCookie; }
java
public <A extends Annotation> List<A> getAnnotations(final Class<A> annoClass) { Objects.requireNonNull(annoClass, "annoClass should not be null."); return getAnnotationsByType(expandedAnnos, annoClass); }
java
public SecretBundle setSecret(String vaultBaseUrl, String secretName, String value) { return setSecretWithServiceResponseAsync(vaultBaseUrl, secretName, value).toBlocking().single().body(); }
python
def parse_file(self, file_or_fname): """ Parse a file or a filename """ with self._context(): if hasattr(file_or_fname, 'read'): self.filename = getattr( file_or_fname, 'name', file_or_fname.__class__.__name__) self.p.ParseFile(file_or_fname) else: self.filename = file_or_fname with open(file_or_fname, 'rb') as f: self.p.ParseFile(f) return self._root
python
def num_rewards(self): """Returns the number of distinct rewards. Returns: Returns None if the reward range is infinite or the processed rewards aren't discrete, otherwise returns the number of distinct rewards. """ # Pre-conditions: reward range is finite. # : processed rewards are discrete. if not self.is_reward_range_finite: tf.logging.error("Infinite reward range, `num_rewards returning None`") return None if not self.is_processed_rewards_discrete: tf.logging.error( "Processed rewards are not discrete, `num_rewards` returning None") return None min_reward, max_reward = self.reward_range return max_reward - min_reward + 1
java
@Override public Collection<Bean> delete(String schemaName, Collection<String> instanceIds) throws AbortRuntimeException { init(); Map<BeanId, Bean> deleted = list(schemaName, instanceIds); Set<HBeanRow> rows = new HashSet<>(); for (String id : instanceIds) { rows.add(new HBeanRow(BeanId.create(id, schemaName), uids)); } try { rows = table.getEager(rows).getRows(); ArrayList<BeanId> predecessors = new ArrayList<>(); for (HBeanRow row : rows) { for (BeanId id : row.getPredecessorsBeanIds()) { predecessors.add(id); } } if (predecessors.size() > 0) { throw Events.CFG302_CANNOT_DELETE_BEAN(predecessors); } } catch (HBeanNotFoundException e) { throw Events.CFG304_BEAN_DOESNT_EXIST(e.getNotFound().iterator().next()); } table.delete(rows); return deleted.values(); }
java
private void buildDeprecatedSection(MarkupDocBuilder markupDocBuilder, PathOperation operation) { if (BooleanUtils.isTrue(operation.getOperation().isDeprecated())) { markupDocBuilder.block(DEPRECATED_OPERATION, MarkupBlockStyle.EXAMPLE, null, MarkupAdmonition.CAUTION); } }
python
def fields(depth, Rp, Rm, Gam, lrec, lsrc, zsrc, ab, TM, use_ne_eval): r"""Calculate Pu+, Pu-, Pd+, Pd-. .. math:: P^{u\pm}_s, P^{d\pm}_s, \bar{P}^{u\pm}_s, \bar{P}^{d\pm}_s; P^{u\pm}_{s-1}, P^{u\pm}_n, \bar{P}^{u\pm}_{s-1}, \bar{P}^{u\pm}_n; P^{d\pm}_{s+1}, P^{d\pm}_n, \bar{P}^{d\pm}_{s+1}, \bar{P}^{d\pm}_n This function corresponds to equations 81/82, 95/96, 103/104, A-8/A-9, A-24/A-25, and A-32/A-33 in [HuTS15]_, and loosely to the corresponding files ``Pdownmin.F90``, ``Pdownplus.F90``, ``Pupmin.F90``, and ``Pdownmin.F90``. This function is called from the function :mod:`kernel.greenfct`. """ # Variables nlsr = abs(lsrc-lrec)+1 # nr of layers btw and incl. src and rec layer rsrcl = 0 # src-layer in reflection (Rp/Rm), first if down izrange = range(2, nlsr) isr = lsrc last = depth.size-1 # Booleans if src in first or last layer; swapped if up=True first_layer = lsrc == 0 last_layer = lsrc == depth.size-1 # Depths; dp and dm are swapped if up=True if lsrc != depth.size-1: ds = depth[lsrc+1]-depth[lsrc] dp = depth[lsrc+1]-zsrc dm = zsrc-depth[lsrc] # Rm and Rp; swapped if up=True Rmp = Rm Rpm = Rp # Boolean if plus or minus has to be calculated plusset = [13, 23, 33, 14, 24, 34, 15, 25, 35] if TM: plus = ab in plusset else: plus = ab not in plusset # Sign-switches pm = 1 # + if plus=True, - if plus=False if not plus: pm = -1 pup = -1 # + if up=True, - if up=False mupm = 1 # + except if up=True and plus=False # Gamma of source layer iGam = Gam[:, :, lsrc, :] # Calculate down- and up-going fields for up in [False, True]: # No upgoing field if rec is in last layer or below src if up and (lrec == depth.size-1 or lrec > lsrc): Pu = np.zeros(iGam.shape, dtype=complex) continue # No downgoing field if rec is in first layer or above src if not up and (lrec == 0 or lrec < lsrc): Pd = np.zeros(iGam.shape, dtype=complex) continue # Swaps if up=True if up: if not last_layer: dp, dm = dm, dp else: dp = dm Rmp, Rpm = Rpm, Rmp first_layer, last_layer = last_layer, first_layer rsrcl = nlsr-1 # src-layer in refl. (Rp/Rm), last (nlsr-1) if up izrange = range(nlsr-2) isr = lrec last = 0 pup = 1 if not plus: mupm = -1 # Calculate Pu+, Pu-, Pd+, Pd- if lsrc == lrec: # rec in src layer; Eqs 81/82, A-8/A-9 if last_layer: # If src/rec are in top (up) or bottom (down) layer if use_ne_eval: P = use_ne_eval("Rmp*exp(-iGam*dm)") else: P = Rmp*np.exp(-iGam*dm) else: # If src and rec are in any layer in between if use_ne_eval: Pstr = "(exp(-iGam*dm) + pm*Rpm*exp(-iGam*(ds+dp)))" Pstr += "* Rmp/(1-Rmp*Rpm*exp(-2*iGam*ds))" P = use_ne_eval(Pstr) else: P = np.exp(-iGam*dm) + pm*Rpm*np.exp(-iGam*(ds+dp)) P *= Rmp/(1 - Rmp*Rpm*np.exp(-2*iGam*ds)) else: # rec above (up) / below (down) src layer # # Eqs 95/96, A-24/A-25 for rec above src layer # # Eqs 103/104, A-32/A-33 for rec below src layer # First compute P_{s-1} (up) / P_{s+1} (down) iRpm = Rpm[:, :, rsrcl, :] if first_layer: # If src is in bottom (up) / top (down) layer if use_ne_eval: P = use_ne_eval("(1 + iRpm)*mupm*exp(-iGam*dp)") else: P = (1 + iRpm)*mupm*np.exp(-iGam*dp) else: iRmp = Rmp[:, :, rsrcl, :] if use_ne_eval: Pstr = "(mupm*exp(-iGam*dp) + " Pstr += "pm*mupm*iRmp*exp(-iGam*(ds+dm)))" Pstr += "*(1 + iRpm)/(1 - iRmp*iRpm * exp(-2*iGam*ds))" P = use_ne_eval(Pstr) else: P = mupm*np.exp(-iGam*dp) P += pm*mupm*iRmp*np.exp(-iGam * (ds+dm)) P *= (1 + iRpm)/(1 - iRmp*iRpm * np.exp(-2*iGam*ds)) # If up or down and src is in last but one layer if up or (not up and lsrc+1 < depth.size-1): ddepth = depth[lsrc+1-1*pup]-depth[lsrc-1*pup] iRpm = Rpm[:, :, rsrcl-1*pup, :] miGam = Gam[:, :, lsrc-1*pup, :] if use_ne_eval: P = use_ne_eval("P/(1 + iRpm*exp(-2*miGam * ddepth))") else: P /= (1 + iRpm*np.exp(-2*miGam * ddepth)) # Second compute P for all other layers if nlsr > 2: for iz in izrange: ddepth = depth[isr+iz+pup+1]-depth[isr+iz+pup] iRpm = Rpm[:, :, iz+pup, :] piGam = Gam[:, :, isr+iz+pup, :] if use_ne_eval: P = use_ne_eval("P*(1 + iRpm)*exp(-piGam * ddepth)") else: P *= (1 + iRpm)*np.exp(-piGam * ddepth) # If rec/src NOT in first/last layer (up/down) if isr+iz != last: ddepth = depth[isr+iz+1] - depth[isr+iz] iRpm = Rpm[:, :, iz, :] piGam2 = Gam[:, :, isr+iz, :] if use_ne_eval: Pstr = "P/(1 + iRpm*exp(-2*piGam2 * ddepth))" P = use_ne_eval(Pstr) else: P /= 1 + iRpm*np.exp(-2*piGam2 * ddepth) # Store P in Pu/Pd if up: Pu = P else: Pd = P # Return fields (up- and downgoing) return Pu, Pd
python
def delay(sig): """ Simple feedforward delay effect """ smix = Streamix() sig = thub(sig, 3) # Auto-copy 3 times (remove this line if using feedback) smix.add(0, sig) # To get a feedback delay, use "smix.copy()" below instead of both "sig" smix.add(280 * ms, .1 * sig) # You can also try other constants smix.add(220 * ms, .1 * sig) return smix
python
def _apply_discrete_colormap(arr, cmap): """ Apply discrete colormap. Attributes ---------- arr : numpy.ndarray 1D image array to convert. color_map: dict Discrete ColorMap dictionary e.g: { 1: [255, 255, 255], 2: [255, 0, 0] } Returns ------- arr: numpy.ndarray """ res = np.zeros((arr.shape[1], arr.shape[2], 3), dtype=np.uint8) for k, v in cmap.items(): res[arr[0] == k] = v return np.transpose(res, [2, 0, 1])
python
def mapping_get(uri, mapping): """Look up the URI in the given mapping and return the result. Throws KeyError if no matching mapping was found. """ ln = localname(uri) # 1. try to match URI keys for k, v in mapping.items(): if k == uri: return v # 2. try to match local names for k, v in mapping.items(): if k == ln: return v # 3. try to match local names with * prefix # try to match longest first, so sort the mapping by key length l = list(mapping.items()) l.sort(key=lambda i: len(i[0]), reverse=True) for k, v in l: if k[0] == '*' and ln.endswith(k[1:]): return v raise KeyError(uri)
python
def randomizer_bin_und(R, alpha, seed=None): ''' This function randomizes a binary undirected network, while preserving the degree distribution. The function directly searches for rewirable edge pairs (rather than trying to rewire edge pairs at random), and hence avoids long loops and works especially well in dense matrices. Parameters ---------- A : NxN np.ndarray binary undirected connection matrix alpha : float fraction of edges to rewire seed : hashable, optional If None (default), use the np.random's global random state to generate random numbers. Otherwise, use a new np.random.RandomState instance seeded with the given value. Returns ------- R : NxN np.ndarray randomized network ''' rng = get_rng(seed) R = binarize(R, copy=True) # binarize if not np.all(R == R.T): raise BCTParamError( 'randomizer_bin_und only takes undirected matrices') ax = len(R) nr_poss_edges = (np.dot(ax, ax) - ax) / 2 # find maximum possible edges savediag = np.diag(R) np.fill_diagonal(R, np.inf) # replace diagonal with high value # if there are more edges than non-edges, invert the matrix to reduce # computation time. "invert" means swap meaning of 0 and 1, not matrix # inversion i, j = np.where(np.triu(R, 1)) k = len(i) if k > nr_poss_edges / 2: swap = True R = np.logical_not(R) np.fill_diagonal(R, np.inf) i, j = np.where(np.triu(R, 1)) k = len(i) else: swap = False # exclude fully connected nodes fullnodes = np.where((np.sum(np.triu(R, 1), axis=0) + np.sum(np.triu(R, 1), axis=1).T) == (ax - 1)) if np.size(fullnodes): R[fullnodes, :] = 0 R[:, fullnodes] = 0 np.fill_diagonal(R, np.inf) i, j = np.where(np.triu(R, 1)) k = len(i) if k == 0 or k >= (nr_poss_edges - 1): raise BCTParamError("No possible randomization") for it in range(k): if rng.random_sample() > alpha: continue # rewire alpha% of edges a = i[it] b = j[it] # it is the chosen edge from a<->b alliholes, = np.where(R[:, a] == 0) # find where each end can connect alljholes, = np.where(R[:, b] == 0) # we can only use edges with connection to neither node i_intersect = np.intersect1d(alliholes, alljholes) # find which of these nodes are connected ii, jj = np.where(R[np.ix_(i_intersect, i_intersect)]) # if there is an edge to switch if np.size(ii): # choose one randomly nummates = np.size(ii) mate = rng.randint(nummates) # randomly orient the second edge if rng.random_sample() > .5: c = i_intersect[ii[mate]] d = i_intersect[jj[mate]] else: d = i_intersect[ii[mate]] c = i_intersect[jj[mate]] # swap the edges R[a, b] = 0 R[c, d] = 0 R[b, a] = 0 R[d, c] = 0 R[a, c] = 1 R[b, d] = 1 R[c, a] = 1 R[d, b] = 1 # update the edge index (this is inefficient) for m in range(k): if i[m] == d and j[m] == c: i.setflags(write=True) j.setflags(write=True) i[it] = c j[m] = b elif i[m] == c and j[m] == d: i.setflags(write=True) j.setflags(write=True) j[it] = c i[m] = b # restore fullnodes if np.size(fullnodes): R[fullnodes, :] = 1 R[:, fullnodes] = 1 # restore inversion if swap: R = np.logical_not(R) # restore diagonal np.fill_diagonal(R, 0) R += savediag return np.array(R, dtype=int)
python
def exclude_items(items, any_all=any, ignore_case=False, normalize_values=False, **kwargs): """Exclude items by matching metadata. Note: Metadata values are lowercased when ``normalized_values`` is ``True``, so ``ignore_case`` is automatically set to ``True``. Parameters: items (list): A list of item dicts or filepaths. any_all (callable): A callable to determine if any or all filters must match to exclude items. Expected values :obj:`any` (default) or :obj:`all`. ignore_case (bool): Perform case-insensitive matching. Default: ``False`` normalize_values (bool): Normalize metadata values to remove common differences between sources. Default: ``False`` kwargs (list): Lists of values to match the given metadata field. Yields: dict: The next item to be included. Example: >>> from google_music_utils import exclude_items >>> list(exclude_items(song_list, any_all=all, ignore_case=True, normalize_values=True, artist=['Beck'], album=['Golden Feelings'])) """ if kwargs: match = functools.partial( _match_item, any_all=any_all, ignore_case=ignore_case, normalize_values=normalize_values, **kwargs ) return filterfalse(match, items) else: return iter(items)
java
public S getNext() { S result = null; QueryParameters params = innerGetNext(); try { result = processor.toBean(params, this.type); } catch (MjdbcException ex) { throw new MjdbcRuntimeException(ex); } return result; }
java
public PrecedenceOrderStep<ConfigFactory> withSources( NamedConfigSource firstSource, NamedConfigSource secondSource, NamedConfigSource... moreSources ) { return withSources(listOfTwoOrMore( firstSource, secondSource, moreSources )); }
java
private void processSecurityIdentity(BeanMetaData bmd) { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) { Tr.entry(tc, "processSecurityIdentity"); } EnterpriseBean ejbBean = bmd.wccm.enterpriseBean; boolean runAsSet = false; if (ejbBean != null) { // get runAs identity SecurityIdentity secIdentity = ejbBean.getSecurityIdentity(); if (secIdentity != null) { runAsSet = true; if (secIdentity.isUseCallerIdentity()) { bmd.ivUseCallerIdentity = true; if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "RunAs set to Caller Identity "); } else { bmd.ivRunAs = secIdentity.getRunAs().getRoleName(); if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "RunAs set to " + bmd.ivRunAs); } } // if (secIdentity != null) } // if (ejbBean != null) //Only check for @runAs annotation if it is not set by XML. //446358 if (!runAsSet) { //check for @RunAs annotation RunAs runAs = bmd.enterpriseBeanClass.getAnnotation(javax.annotation.security.RunAs.class); if (runAs != null) { bmd.ivRunAs = runAs.value(); } } // if (!runAsSet) if (isTraceOn && tc.isEntryEnabled()) { Tr.exit(tc, "processSecurityIdentity: useCallerIdentity = " + bmd.ivUseCallerIdentity + ": Use Role = " + bmd.ivRunAs); } }
java
public void addHeader (@Nonnull @Nonempty final String sName, @Nullable final String sValue) { if (sValue != null) _addHeader (sName, sValue); }
java
public static void add(ClassVisitor cw, ClassInfo classInfo, boolean typeQueryRootBean) { List<FieldInfo> fields = classInfo.getFields(); if (fields != null) { for (FieldInfo field : fields) { field.writeMethod(cw, typeQueryRootBean); } } }
java
public FessMessages addConstraintsRangeMessage(String property, String min, String max) { assertPropertyNotNull(property); add(property, new UserMessage(CONSTRAINTS_Range_MESSAGE, min, max)); return this; }
java
public static boolean areBooleanEquals(Boolean boolean1, Boolean boolean2) { if (boolean1 != null && !boolean1.equals(boolean2)) { return false; } else if (boolean2 != null && !boolean2.equals(boolean1)) { return false; } return true; }
java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { try { boolean isFragment = ContextUtils.isFragment(fragment); boolean isSupportFragment = ContextUtils.isSupportFragment(fragment); if(isFragment || isSupportFragment) { Layout layout = TypeUtils.getAnnotation(fragment, Layout.class); if(layout != null) return inflater.inflate(layout.value(), null); } else { Set<Class<?>> applicableContexts = new HashSet<Class<?>>(); applicableContexts.add(Fragment.class); applicableContexts.add(android.support.v4.app.Fragment.class); throw new IllegalContextException(fragment, applicableContexts); } } catch(Exception e) { String errorContext = new StringBuilder() .append("Layout resolution failed on ") .append(fragment.getClass().getName()) .append(". ") .toString(); Log.e(IckleSupportFragment.class.getSimpleName(), errorContext, e); } return null; }
java
@Override protected Promise call(String name, Tree params, Options opts, PacketStream stream, Context parent, String targetID, int remaining) { EndpointKey endpointKey = null; ErrorCounter errorCounter = null; try { // Get the first recommended Endpoint and Error Counter ActionEndpoint action = (ActionEndpoint) serviceRegistry.getAction(name, targetID); String nodeID = action.getNodeID(); endpointKey = new EndpointKey(nodeID, name); errorCounter = getErrorCounter(endpointKey); // Check availability of the Endpoint (if endpoint isn't targetted) if (targetID == null) { LinkedHashSet<String> nodeIDs = new LinkedHashSet<>(maxSameNodes * 2); int sameNodeCounter = 0; long now; if (errorCounter == null) { now = 0; } else { now = System.currentTimeMillis(); } for (int i = 0; i < maxTries; i++) { if (errorCounter == null || errorCounter.isAvailable(now)) { // Endpoint is available break; } // Store nodeID if (!nodeIDs.add(nodeID)) { sameNodeCounter++; if (sameNodeCounter >= maxSameNodes) { // The "maxSameNodes" limit is reached break; } } // Try to choose another endpoint action = (ActionEndpoint) serviceRegistry.getAction(name, null); nodeID = action.getNodeID(); endpointKey = new EndpointKey(nodeID, name); errorCounter = getErrorCounter(endpointKey); } } // Create new Context Context ctx = contextFactory.create(name, params, opts, stream, parent); // Invoke Endpoint final ErrorCounter currentCounter = errorCounter; final EndpointKey currentKey = endpointKey; return Promise.resolve(action.handler(ctx)).then(rsp -> { // Reset error counter if (currentCounter != null) { currentCounter.reset(); } // Return response return rsp; }).catchError(cause -> { // Increment error counter increment(currentCounter, currentKey, cause, System.currentTimeMillis()); // Retry return retry(cause, name, params, opts, stream, parent, targetID, remaining); }); } catch (Throwable cause) { // Increment error counter increment(errorCounter, endpointKey, cause, System.currentTimeMillis()); // Retry return retry(cause, name, params, opts, stream, parent, targetID, remaining); } }
java
public TokenResult prepareLogin(String username) throws Exception { username = encode(username); Api apiResult = null; TokenResult token = new TokenResult(); token.tokenName = "lgtoken"; token.tokenMode = TokenMode.token1_19; // see https://github.com/WolfgangFahl/Mediawiki-Japi/issues/31 if (this.isVersion128()) { apiResult = this.getQueryResult("&meta=tokens&type=login"); super.handleError(apiResult); token.token = apiResult.getQuery().getTokens().getLogintoken(); } else { apiResult = getActionResult("login", "&lgname=" + username, null, null); super.handleError(apiResult); Login login = apiResult.getLogin(); token.token = login.getToken(); } return token; }
java
protected List<INode> getUserModules(List<? extends INode> ast) { List<INode> userModules = new LinkedList<INode>(); for (INode node : ast) { if (!getInfo().getDeclAssistant().isLibrary(node)) { userModules.add(node); } } return userModules; }
python
def tryimport(modname, pipiname=None, ensure=False): """ CommandLine: python -m utool.util_import --test-tryimport Example: >>> # ENABLE_DOCTEST >>> from utool.util_tests import * # NOQA >>> import utool as ut >>> modname = 'pyfiglet' >>> pipiname = 'git+https://github.com/pwaller/pyfiglet' >>> pyfiglet = ut.tryimport(modname, pipiname) >>> assert pyfiglet is None or isinstance(pyfiglet, types.ModuleType), 'unknown error' Example2: >>> # UNSTABLE_DOCTEST >>> # disabled because not everyone has access to being a super user >>> from utool.util_tests import * # NOQA >>> import utool as ut >>> modname = 'lru' >>> pipiname = 'git+https://github.com/amitdev/lru-dict' >>> lru = ut.tryimport(modname, pipiname, ensure=True) >>> assert isinstance(lru, types.ModuleType), 'did not ensure lru' """ if pipiname is None: pipiname = modname try: if util_inject.PRINT_INJECT_ORDER: if modname not in sys.modules: util_inject.noinject(modname, N=2, via='ut.tryimport') module = __import__(modname) return module except ImportError as ex: import utool as ut base_pipcmd = 'pip install %s' % pipiname sudo = not ut.WIN32 and not ut.in_virtual_env() if sudo: pipcmd = 'sudo ' + base_pipcmd else: pipcmd = base_pipcmd msg = 'unable to find module %s. Please install: %s' % ((modname), (pipcmd)) print(msg) ut.printex(ex, msg, iswarning=True) if ensure: raise AssertionError('Ensure is dangerous behavior and is is no longer supported.') #raise NotImplementedError('not ensuring') ut.cmd(base_pipcmd, sudo=sudo) module = tryimport(modname, pipiname, ensure=False) if module is None: raise AssertionError('Cannot ensure modname=%r please install using %r' % (modname, pipcmd)) return module return None
java
public String processRequest(HttpServletRequest request) { Map<String, Object> reqMap = MessageUtil.parseXml(request, getToken(), getAppId(), getAESKey()); String fromUserName = (String) reqMap.get("FromUserName"); String toUserName = (String) reqMap.get("ToUserName"); String msgType = (String) reqMap.get("MsgType"); LOG.debug("收到消息,消息类型:{}", msgType); BaseMsg msg = null; if (msgType.equals(ReqType.EVENT)) { String eventType = (String) reqMap.get("Event"); String ticket = (String) reqMap.get("Ticket"); QrCodeEvent qrCodeEvent = null; if (isNotBlank(ticket)) { String eventKey = (String) reqMap.get("EventKey"); LOG.debug("eventKey:{}", eventKey); LOG.debug("ticket:{}", ticket); qrCodeEvent = new QrCodeEvent(eventKey, ticket); buildBasicEvent(reqMap, qrCodeEvent); if (eventType.equals(EventType.SCAN)) { msg = handleQrCodeEvent(qrCodeEvent); if (isNull(msg)) { msg = processEventHandle(qrCodeEvent); } } } if (eventType.equals(EventType.SUBSCRIBE)) { BaseEvent event = new BaseEvent(); if (qrCodeEvent != null) { event = qrCodeEvent; } else { buildBasicEvent(reqMap, event); } msg = handleSubscribe(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (eventType.equals(EventType.UNSUBSCRIBE)) { BaseEvent event = new BaseEvent(); buildBasicEvent(reqMap, event); msg = handleUnsubscribe(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (eventType.equals(EventType.CLICK)) { String eventKey = (String) reqMap.get("EventKey"); LOG.debug("eventKey:{}", eventKey); MenuEvent event = new MenuEvent(eventKey); buildBasicEvent(reqMap, event); msg = handleMenuClickEvent(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (eventType.equals(EventType.VIEW)) { String eventKey = (String) reqMap.get("EventKey"); LOG.debug("eventKey:{}", eventKey); MenuEvent event = new MenuEvent(eventKey); buildBasicEvent(reqMap, event); msg = handleMenuViewEvent(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (eventType.equals(EventType.LOCATION)) { double latitude = Double.parseDouble((String) reqMap.get("Latitude")); double longitude = Double.parseDouble((String) reqMap.get("Longitude")); double precision = Double.parseDouble((String) reqMap.get("Precision")); LocationEvent event = new LocationEvent(latitude, longitude, precision); buildBasicEvent(reqMap, event); msg = handleLocationEvent(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (EventType.SCANCODEPUSH.equals(eventType) || EventType.SCANCODEWAITMSG.equals(eventType)) { String eventKey = (String) reqMap.get("EventKey"); Map<String, Object> scanCodeInfo = (Map<String, Object>)reqMap.get("ScanCodeInfo"); String scanType = (String) scanCodeInfo.get("ScanType"); String scanResult = (String) scanCodeInfo.get("ScanResult"); ScanCodeEvent event = new ScanCodeEvent(eventKey, scanType, scanResult); buildBasicEvent(reqMap, event); msg = handleScanCodeEvent(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (EventType.PICPHOTOORALBUM.equals(eventType) || EventType.PICSYSPHOTO.equals(eventType) || EventType.PICWEIXIN.equals(eventType)) { String eventKey = (String) reqMap.get("EventKey"); Map<String, Object> sendPicsInfo = (Map<String, Object>)reqMap.get("SendPicsInfo"); int count = Integer.parseInt((String) sendPicsInfo.get("Count")); List<Map> picList = (List) sendPicsInfo.get("PicList"); SendPicsInfoEvent event = new SendPicsInfoEvent(eventKey, count, picList); buildBasicEvent(reqMap, event); msg = handlePSendPicsInfoEvent(event); if (isNull(msg)) { msg = processEventHandle(event); } } else if (EventType.TEMPLATESENDJOBFINISH.equals(eventType)) { String msgId = (String) reqMap.get("MsgID"); String status = (String) reqMap.get("Status"); TemplateMsgEvent event = new TemplateMsgEvent(msgId,status); buildBasicEvent(reqMap, event); msg = handleTemplateMsgEvent(event); if (isNull(msg)) { msg = processEventHandle(event); } }else if(EventType.MASSSENDJOBFINISH.equals(eventType)){ String msgId=(String)reqMap.get("MsgID"); String status=(String)reqMap.get("Status"); Integer TotalCount=Integer.valueOf(String.valueOf(reqMap.get("TotalCount"))); Integer filterCount=Integer.valueOf(String.valueOf(reqMap.get("FilterCount"))); Integer sentCount=Integer.valueOf(String.valueOf(reqMap.get("SentCount"))); Integer errorCount=Integer.valueOf(String.valueOf(reqMap.get("ErrorCount"))); SendMessageEvent event=new SendMessageEvent(msgId,status,TotalCount,filterCount,sentCount,errorCount); buildBasicEvent(reqMap, event); msg=callBackAllMessage(event); if (isNull(msg)) { msg = processEventHandle(event); } } } else { if (msgType.equals(ReqType.TEXT)) { String content = (String) reqMap.get("Content"); LOG.debug("文本消息内容:{}", content); TextReqMsg textReqMsg = new TextReqMsg(content); buildBasicReqMsg(reqMap, textReqMsg); msg = handleTextMsg(textReqMsg); if (isNull(msg)) { msg = processMessageHandle(textReqMsg); } } else if (msgType.equals(ReqType.IMAGE)) { String picUrl = (String) reqMap.get("PicUrl"); String mediaId = (String) reqMap.get("MediaId"); ImageReqMsg imageReqMsg = new ImageReqMsg(picUrl, mediaId); buildBasicReqMsg(reqMap, imageReqMsg); msg = handleImageMsg(imageReqMsg); if (isNull(msg)) { msg = processMessageHandle(imageReqMsg); } } else if (msgType.equals(ReqType.VOICE)) { String format = (String) reqMap.get("Format"); String mediaId = (String) reqMap.get("MediaId"); String recognition = (String) reqMap.get("Recognition"); VoiceReqMsg voiceReqMsg = new VoiceReqMsg(mediaId, format, recognition); buildBasicReqMsg(reqMap, voiceReqMsg); msg = handleVoiceMsg(voiceReqMsg); if (isNull(msg)) { msg = processMessageHandle(voiceReqMsg); } } else if (msgType.equals(ReqType.VIDEO)) { String thumbMediaId = (String) reqMap.get("ThumbMediaId"); String mediaId = (String) reqMap.get("MediaId"); VideoReqMsg videoReqMsg = new VideoReqMsg(mediaId, thumbMediaId); buildBasicReqMsg(reqMap, videoReqMsg); msg = handleVideoMsg(videoReqMsg); if (isNull(msg)) { msg = processMessageHandle(videoReqMsg); } } else if (msgType.equals(ReqType.SHORT_VIDEO)) { String thumbMediaId = (String) reqMap.get("ThumbMediaId"); String mediaId = (String) reqMap.get("MediaId"); VideoReqMsg videoReqMsg = new VideoReqMsg(mediaId, thumbMediaId); buildBasicReqMsg(reqMap, videoReqMsg); msg = hadnleShortVideoMsg(videoReqMsg); if (isNull(msg)) { msg = processMessageHandle(videoReqMsg); } } else if (msgType.equals(ReqType.LOCATION)) { double locationX = Double.parseDouble((String) reqMap.get("Location_X")); double locationY = Double.parseDouble((String) reqMap.get("Location_Y")); int scale = Integer.parseInt((String) reqMap.get("Scale")); String label = (String) reqMap.get("Label"); LocationReqMsg locationReqMsg = new LocationReqMsg(locationX, locationY, scale, label); buildBasicReqMsg(reqMap, locationReqMsg); msg = handleLocationMsg(locationReqMsg); if (isNull(msg)) { msg = processMessageHandle(locationReqMsg); } } else if (msgType.equals(ReqType.LINK)) { String title = (String) reqMap.get("Title"); String description = (String) reqMap.get("Description"); String url = (String) reqMap.get("Url"); LOG.debug("链接消息地址:{}", url); LinkReqMsg linkReqMsg = new LinkReqMsg(title, description, url); buildBasicReqMsg(reqMap, linkReqMsg); msg = handleLinkMsg(linkReqMsg); if (isNull(msg)) { msg = processMessageHandle(linkReqMsg); } } } String result = ""; if (nonNull(msg)) { msg.setFromUserName(toUserName); msg.setToUserName(fromUserName); result = msg.toXml(); if (StrUtil.isNotBlank(getAESKey())) { try { WXBizMsgCrypt pc = new WXBizMsgCrypt(getToken(), getAESKey(), getAppId()); result = pc.encryptMsg(result, request.getParameter("timestamp"), request.getParameter("nonce")); LOG.debug("加密后密文:{}", result); } catch (AesException e) { LOG.error("加密异常", e); } } } return result; }
python
def import_project(controller, project_id, stream, location=None, name=None, keep_compute_id=False): """ Import a project contain in a zip file You need to handle OSerror exceptions :param controller: GNS3 Controller :param project_id: ID of the project to import :param stream: A io.BytesIO of the zipfile :param location: Directory for the project if None put in the default directory :param name: Wanted project name, generate one from the .gns3 if None :param keep_compute_id: If true do not touch the compute id :returns: Project """ if location and ".gns3" in location: raise aiohttp.web.HTTPConflict(text="The destination path should not contain .gns3") try: with zipfile.ZipFile(stream) as myzip: try: topology = json.loads(myzip.read("project.gns3").decode()) # We import the project on top of an existing project (snapshots) if topology["project_id"] == project_id: project_name = topology["name"] else: # If the project name is already used we generate a new one if name: project_name = controller.get_free_project_name(name) else: project_name = controller.get_free_project_name(topology["name"]) except KeyError: raise aiohttp.web.HTTPConflict(text="Can't import topology the .gns3 is corrupted or missing") if location: path = location else: projects_path = controller.projects_directory() path = os.path.join(projects_path, project_id) try: os.makedirs(path, exist_ok=True) except UnicodeEncodeError as e: raise aiohttp.web.HTTPConflict(text="The project name contain non supported or invalid characters") myzip.extractall(path) topology = load_topology(os.path.join(path, "project.gns3")) topology["name"] = project_name # To avoid unexpected behavior (project start without manual operations just after import) topology["auto_start"] = False topology["auto_open"] = False topology["auto_close"] = True # Generate a new node id node_old_to_new = {} for node in topology["topology"]["nodes"]: if "node_id" in node: node_old_to_new[node["node_id"]] = str(uuid.uuid4()) _move_node_file(path, node["node_id"], node_old_to_new[node["node_id"]]) node["node_id"] = node_old_to_new[node["node_id"]] else: node["node_id"] = str(uuid.uuid4()) # Update link to use new id for link in topology["topology"]["links"]: link["link_id"] = str(uuid.uuid4()) for node in link["nodes"]: node["node_id"] = node_old_to_new[node["node_id"]] # Generate new drawings id for drawing in topology["topology"]["drawings"]: drawing["drawing_id"] = str(uuid.uuid4()) # Modify the compute id of the node depending of compute capacity if not keep_compute_id: # For some VM type we move them to the GNS3 VM if possible # unless it's a linux host without GNS3 VM if not sys.platform.startswith("linux") or controller.has_compute("vm"): for node in topology["topology"]["nodes"]: if node["node_type"] in ("docker", "qemu", "iou", "nat"): node["compute_id"] = "vm" else: # Round-robin through available compute resources. compute_nodes = itertools.cycle(controller.computes) for node in topology["topology"]["nodes"]: node["compute_id"] = next(compute_nodes) compute_created = set() for node in topology["topology"]["nodes"]: if node["compute_id"] != "local": # Project created on the remote GNS3 VM? if node["compute_id"] not in compute_created: compute = controller.get_compute(node["compute_id"]) yield from compute.post("/projects", data={ "name": project_name, "project_id": project_id, }) compute_created.add(node["compute_id"]) yield from _move_files_to_compute(compute, project_id, path, os.path.join("project-files", node["node_type"], node["node_id"])) # And we dump the updated.gns3 dot_gns3_path = os.path.join(path, project_name + ".gns3") # We change the project_id to avoid erasing the project topology["project_id"] = project_id with open(dot_gns3_path, "w+") as f: json.dump(topology, f, indent=4) os.remove(os.path.join(path, "project.gns3")) if os.path.exists(os.path.join(path, "images")): _import_images(controller, path) project = yield from controller.load_project(dot_gns3_path, load=False) return project except zipfile.BadZipFile: raise aiohttp.web.HTTPConflict(text="Can't import topology the file is corrupted or not a GNS3 project (invalid zip)")
python
def listen(self): """ Listens to messages """ with Consumer(self.connection, queues=self.queue, on_message=self.on_message, auto_declare=False): for _ in eventloop(self.connection, timeout=1, ignore_timeouts=True): pass
java
public static boolean isMatch(Method javaMethod, String name, Class<?> []param) { if (! javaMethod.getName().equals(name)) return false; Class<?> []mparam = javaMethod.getParameterTypes(); return isMatch(mparam, param); }
java
public ListThingGroupsResult withThingGroups(GroupNameAndArn... thingGroups) { if (this.thingGroups == null) { setThingGroups(new java.util.ArrayList<GroupNameAndArn>(thingGroups.length)); } for (GroupNameAndArn ele : thingGroups) { this.thingGroups.add(ele); } return this; }
python
def launch_cif_clean(cif_filter, cif_select, group_cif_raw, group_cif_clean, group_structure, group_workchain, node, max_entries, skip_check, parse_engine, daemon): """Run the `CifCleanWorkChain` on the entries in a group with raw imported CifData nodes. It will use the `cif_filter` and `cif_select` scripts of `cod-tools` to clean the input cif file. Additionally, if the `group-structure` option is passed, the workchain will also attempt to use the given parse engine to parse the cleaned `CifData` to obtain the structure and then use SeeKpath to find the primitive structure, which, if successful, will be added to the `group-structure` group. """ # pylint: disable=too-many-arguments,too-many-locals,too-many-statements,too-many-branches import inspect from datetime import datetime from aiida import orm from aiida.engine import launch from aiida.plugins import DataFactory, WorkflowFactory from aiida_codtools.common.cli import echo_utc from aiida_codtools.common.resources import get_default_options from aiida_codtools.common.utils import get_input_node CifData = DataFactory('cif') # pylint: disable=invalid-name CifCleanWorkChain = WorkflowFactory('codtools.cif_clean') # pylint: disable=invalid-name # Collect the dictionary of not None parameters passed to the launch script and print to screen local_vars = locals() launch_paramaters = {} for arg in inspect.getargspec(launch_cif_clean.callback).args: # pylint: disable=deprecated-method if arg in local_vars and local_vars[arg]: launch_paramaters[arg] = local_vars[arg] click.echo('=' * 80) click.echo('Starting on {}'.format(datetime.utcnow().isoformat())) click.echo('Launch parameters: {}'.format(launch_paramaters)) click.echo('-' * 80) if group_cif_raw is not None: # Get CifData nodes that should actually be submitted according to the input filters builder = orm.QueryBuilder() builder.append(orm.Group, filters={'id': {'==': group_cif_raw.pk}}, tag='group') if skip_check: builder.append(CifData, with_group='group', project=['*']) else: # Get CifData nodes that already have an associated workchain node in the `group_workchain` group. submitted = orm.QueryBuilder() submitted.append(orm.WorkChainNode, tag='workchain') submitted.append(orm.Group, filters={'id': {'==': group_workchain.pk}}, with_node='workchain') submitted.append(orm.CifData, with_outgoing='workchain', tag='data', project=['id']) submitted_nodes = set(pk for entry in submitted.all() for pk in entry) if submitted_nodes: filters = {'id': {'!in': submitted_nodes}} else: filters = {} # Get all CifData nodes that are not included in the submitted node list builder.append(CifData, with_group='group', filters=filters, project=['*']) if max_entries is not None: builder.limit(int(max_entries)) nodes = [entry[0] for entry in builder.all()] elif node is not None: nodes = [node] else: raise click.BadParameter('you have to specify either --group-cif-raw or --node') counter = 0 node_cif_filter_parameters = get_input_node(orm.Dict, { 'fix-syntax-errors': True, 'use-c-parser': True, 'use-datablocks-without-coordinates': True, }) node_cif_select_parameters = get_input_node(orm.Dict, { 'canonicalize-tag-names': True, 'dont-treat-dots-as-underscores': True, 'invert': True, 'tags': '_publ_author_name,_citation_journal_abbrev', 'use-c-parser': True, }) node_options = get_input_node(orm.Dict, get_default_options()) node_parse_engine = get_input_node(orm.Str, parse_engine) node_site_tolerance = get_input_node(orm.Float, 5E-4) node_symprec = get_input_node(orm.Float, 5E-3) for cif in nodes: inputs = { 'cif': cif, 'cif_filter': cif_filter, 'cif_select': cif_select, 'cif_filter_parameters': node_cif_filter_parameters, 'cif_select_parameters': node_cif_select_parameters, 'options': node_options, 'parse_engine': node_parse_engine, 'site_tolerance': node_site_tolerance, 'symprec': node_symprec, } if group_cif_clean is not None: inputs['group_cif'] = group_cif_clean if group_structure is not None: inputs['group_structure'] = group_structure if daemon: workchain = launch.submit(CifCleanWorkChain, **inputs) echo_utc('CifData<{}> submitting: {}<{}>'.format(cif.pk, CifCleanWorkChain.__name__, workchain.pk)) else: echo_utc('CifData<{}> running: {}'.format(cif.pk, CifCleanWorkChain.__name__)) _, workchain = launch.run_get_node(CifCleanWorkChain, **inputs) if group_workchain is not None: group_workchain.add_nodes([workchain]) counter += 1 if max_entries is not None and counter >= max_entries: break click.echo('-' * 80) click.echo('Submitted {} new workchains'.format(counter)) click.echo('Stopping on {}'.format(datetime.utcnow().isoformat())) click.echo('=' * 80)
java
public static Object toUUId(Object o, Object defaultValue) { String str = toString(o, null); if (str == null) return defaultValue; if (!Decision.isUUId(str)) return defaultValue; return str; }
java
@Override public boolean writeEvent(AuditEvent event) throws HandlerException { StringBuilder elements = new StringBuilder(); for (Field element : event.getFields()) { elements.append( element.getName() + " " + element.getType() + ":" + element.getValue() + ", "); } try (Connection conn = getConnection()) { try (PreparedStatement statement = conn.prepareStatement(insertQuery)) { statement.setString(1, event.getUuid().toString()); statement.setTimestamp(2, new Timestamp(event.getTimestamp().getTime())); statement.setString(3, event.getActor()); statement.setString(4, event.getOrigin()); statement.setString(5, event.getAction()); statement.setString(6, elements.toString()); return statement.execute(); } } catch (SQLException e) { throw new HandlerException("SQL Exception", DatabaseAuditHandler.class, e); } }
python
def FALSE(classical_reg): """ Produce a FALSE instruction. :param classical_reg: A classical register to modify. :return: An instruction object representing the equivalent MOVE. """ warn("`FALSE a` has been deprecated. Use `MOVE a 0` instead.") if isinstance(classical_reg, int): classical_reg = Addr(classical_reg) return MOVE(classical_reg, 0)
python
def _write_abstract_named_entity(self): """ This method generates AbstractNamedEntity class js file. """ filename = "%sAbstractNamedEntity.js" % (self._class_prefix) superclass_name = "%sEntity" % (self._class_prefix) # write will write a file using a template. # mandatory params: destination directory, destination file name, template file name # optional params: whatever that is needed from inside the Jinja template self.write(destination = self.abstract_directory, filename = filename, template_name = "abstract_named_entity.js.tpl", class_prefix = self._class_prefix, superclass_name = superclass_name)
java
private void processInboxResponse(final JSONObject response){ if(getConfig().isAnalyticsOnly()){ getConfigLogger().verbose(getAccountId(),"CleverTap instance is configured to analytics only, not processing inbox messages"); return; } getConfigLogger().verbose(getAccountId(),"Inbox: Processing response"); if (!response.has("inbox_notifs")) { getConfigLogger().verbose(getAccountId(),"Inbox: Response JSON object doesn't contain the inbox key"); return; } try { _processInboxMessages(response.getJSONArray("inbox_notifs")); } catch (Throwable t){ getConfigLogger().verbose(getAccountId(),"InboxResponse: Failed to parse response", t); } }
java
public static Method getListenerMethod(String invokedMethodName, boolean isBefore) { if (isBefore) { return beforeLifecycleMethodsByMethodName.get(invokedMethodName); } else { return afterLifecycleMethodsByMethodName.get(invokedMethodName); } }
java
public AbstractBuilder getMethodBuilder(ClassWriter classWriter) { return MethodBuilder.getInstance(context, classWriter.getTypeElement(), writerFactory.getMethodWriter(classWriter)); }
python
def set_data(self, pos=None, symbol='o', size=10., edge_width=1., edge_width_rel=None, edge_color='black', face_color='white', scaling=False): """ Set the data used to display this visual. Parameters ---------- pos : array The array of locations to display each symbol. symbol : str The style of symbol to draw (see Notes). size : float or array The symbol size in px. edge_width : float | None The width of the symbol outline in pixels. edge_width_rel : float | None The width as a fraction of marker size. Exactly one of `edge_width` and `edge_width_rel` must be supplied. edge_color : Color | ColorArray The color used to draw each symbol outline. face_color : Color | ColorArray The color used to draw each symbol interior. scaling : bool If set to True, marker scales when rezooming. Notes ----- Allowed style strings are: disc, arrow, ring, clobber, square, diamond, vbar, hbar, cross, tailed_arrow, x, triangle_up, triangle_down, and star. """ assert (isinstance(pos, np.ndarray) and pos.ndim == 2 and pos.shape[1] in (2, 3)) if (edge_width is not None) + (edge_width_rel is not None) != 1: raise ValueError('exactly one of edge_width and edge_width_rel ' 'must be non-None') if edge_width is not None: if edge_width < 0: raise ValueError('edge_width cannot be negative') else: if edge_width_rel < 0: raise ValueError('edge_width_rel cannot be negative') self.symbol = symbol self.scaling = scaling edge_color = ColorArray(edge_color).rgba if len(edge_color) == 1: edge_color = edge_color[0] face_color = ColorArray(face_color).rgba if len(face_color) == 1: face_color = face_color[0] n = len(pos) data = np.zeros(n, dtype=[('a_position', np.float32, 3), ('a_fg_color', np.float32, 4), ('a_bg_color', np.float32, 4), ('a_size', np.float32, 1), ('a_edgewidth', np.float32, 1)]) data['a_fg_color'] = edge_color data['a_bg_color'] = face_color if edge_width is not None: data['a_edgewidth'] = edge_width else: data['a_edgewidth'] = size*edge_width_rel data['a_position'][:, :pos.shape[1]] = pos data['a_size'] = size self.shared_program['u_antialias'] = self.antialias # XXX make prop self._data = data self._vbo.set_data(data) self.shared_program.bind(self._vbo) self.update()
java
public CQLSSTableWriter rawAddRow(ByteBuffer... values) throws InvalidRequestException, IOException { return rawAddRow(Arrays.asList(values)); }
python
def _compile_prefixes(self): ''' Create a dict of all OS prefixes and their compiled regexs ''' self.compiled_prefixes = {} for dev_os, os_config in self.config.items(): if not os_config: continue self.compiled_prefixes[dev_os] = [] for prefix in os_config.get('prefixes', []): values = prefix.get('values', {}) line = prefix.get('line', '') if prefix.get('__python_fun__'): self.compiled_prefixes[dev_os].append({ '__python_fun__': prefix['__python_fun__'], '__python_mod__': prefix['__python_mod__'] }) continue # if python profiler defined for this prefix, # no need to go further, but jump to the next prefix # Add 'pri' and 'message' to the line, and values line = '{{pri}}{}{{message}}'.format(line) # PRI https://tools.ietf.org/html/rfc5424#section-6.2.1 values['pri'] = r'\<(\d+)\>' values['message'] = '(.*)' # We will now figure out which position each value is in so we can use it with the match statement position = {} for key in values.keys(): position[line.find('{' + key + '}')] = key sorted_position = {} for i, elem in enumerate(sorted(position.items())): sorted_position[elem[1]] = i + 1 # Escape the line, then remove the escape for the curly bracets so they can be used when formatting escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}') # Replace a whitespace with \s+ escaped = escaped.replace(r'\ ', r'\s+') self.compiled_prefixes[dev_os].append({ 'prefix': re.compile(escaped.format(**values)), 'prefix_positions': sorted_position, 'raw_prefix': escaped.format(**values), 'values': values })
python
def _callback_new_block(self, latest_block: Dict): """Called once a new block is detected by the alarm task. Note: This should be called only once per block, otherwise there will be duplicated `Block` state changes in the log. Therefore this method should be called only once a new block is mined with the corresponding block data from the AlarmTask. """ # User facing APIs, which have on-chain side-effects, force polled the # blockchain to update the node's state. This force poll is used to # provide a consistent view to the user, e.g. a channel open call waits # for the transaction to be mined and force polled the event to update # the node's state. This pattern introduced a race with the alarm task # and the task which served the user request, because the events are # returned only once per filter. The lock below is to protect against # these races (introduced by the commit # 3686b3275ff7c0b669a6d5e2b34109c3bdf1921d) with self.event_poll_lock: latest_block_number = latest_block['number'] # Handle testing with private chains. The block number can be # smaller than confirmation_blocks confirmed_block_number = max( GENESIS_BLOCK_NUMBER, latest_block_number - self.config['blockchain']['confirmation_blocks'], ) confirmed_block = self.chain.client.web3.eth.getBlock(confirmed_block_number) # These state changes will be procesed with a block_number which is # /larger/ than the ChainState's block_number. for event in self.blockchain_events.poll_blockchain_events(confirmed_block_number): on_blockchain_event(self, event) # On restart the Raiden node will re-create the filters with the # ethereum node. These filters will have the from_block set to the # value of the latest Block state change. To avoid missing events # the Block state change is dispatched only after all of the events # have been processed. # # This means on some corner cases a few events may be applied # twice, this will happen if the node crashed and some events have # been processed but the Block state change has not been # dispatched. state_change = Block( block_number=confirmed_block_number, gas_limit=confirmed_block['gasLimit'], block_hash=BlockHash(bytes(confirmed_block['hash'])), ) # Note: It's important to /not/ block here, because this function # can be called from the alarm task greenlet, which should not # starve. self.handle_and_track_state_change(state_change)
java
public void annotate(Run<?,?> build, Entry change, MarkupText text) { if (build instanceof AbstractBuild && Util.isOverridden(ChangeLogAnnotator.class, getClass(), "annotate", AbstractBuild.class, Entry.class, MarkupText.class)) { annotate((AbstractBuild) build, change, text); } else { Logger.getLogger(ChangeLogAnnotator.class.getName()).log(Level.WARNING, "You must override the newer overload of annotate from {0}", getClass().getName()); } }
python
def is_ipv4_filter(ip, options=None): ''' Returns a bool telling if the value passed to it was a valid IPv4 address. ip The IP address. net: False Consider IP addresses followed by netmask. options CSV of options regarding the nature of the IP address. E.g.: loopback, multicast, private etc. ''' _is_ipv4 = _is_ipv(ip, 4, options=options) return isinstance(_is_ipv4, six.string_types)
java
public long getTimeInMillis(final Calendar startInstant) { Calendar cal = (Calendar) startInstant.clone(); addTo(cal); return getCalendarTimeInMillis(cal) - getCalendarTimeInMillis(startInstant); }
java
public void setVectorCounters(java.util.Collection<SummarizedCounter> vectorCounters) { if (vectorCounters == null) { this.vectorCounters = null; return; } this.vectorCounters = new java.util.ArrayList<SummarizedCounter>(vectorCounters); }
python
def lookup_bg_color(self, bg_color): """ Return the color for use in the `windll.kernel32.SetConsoleTextAttribute` API call. :param bg_color: Background as text. E.g. 'ffffff' or 'red' """ # Background. if bg_color in BG_ANSI_COLORS: return BG_ANSI_COLORS[bg_color] else: return self._color_indexes(bg_color)[1]
java
static String makeXmlName(String name) { if (name.length()==0) name="_"; if (!XmlChars.isNameStart(name.charAt(0))) { if (name.length()>1 && XmlChars.isNameStart(name.charAt(1))) name = name.substring(1); else name = '_'+name; } int i=1; while (i<name.length()) { if (XmlChars.isNameChar(name.charAt(i))) i++; else name = name.substring(0,i)+name.substring(i+1); } return name; }
python
def plot_input(ace_model, fname='ace_input.png'): """Plot the transforms.""" if not plt: raise ImportError('Cannot plot without the matplotlib package') plt.rcParams.update({'font.size': 8}) plt.figure() num_cols = len(ace_model.x) / 2 + 1 for i in range(len(ace_model.x)): plt.subplot(num_cols, 2, i + 1) plt.plot(ace_model.x[i], ace_model.y, '.') plt.xlabel('x{0}'.format(i)) plt.ylabel('y') plt.tight_layout() if fname: plt.savefig(fname) else: plt.show()
java
public void marshall(DescribeUploadBufferRequest describeUploadBufferRequest, ProtocolMarshaller protocolMarshaller) { if (describeUploadBufferRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeUploadBufferRequest.getGatewayARN(), GATEWAYARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public void tagSubtree(Area root) { tagSingleNode(root); for (int i = 0; i < root.getChildCount(); i++) tagSubtree(root.getChildAt(i)); } /** * Applies all the taggers to a single tree node. * @param area the tree node */ public void tagSingleNode(Area area) { for (Tagger t : taggers) { float support = t.belongsTo(area); if (support > MIN_SUPPORT) area.addTag(t.getTag(), support); } } /** * Checks if two logical nodes are joinable. For this, the must * <ul> * <li>Made of neighboring area nodes (nothing between them) * <li>Have the same style * <li>Have the same tags or the second one may be a continuation of the first one * <li>None of the tags of the second node may refuse joining * </ul> * @param l1 the first logical node * @param l2 the second logical node * @return <code>true</code> if the nodes may be joined */ /*public boolean isJoinable(LogicalNode l1, LogicalNode l2) { Set<Tag> set1 = l1.getTags(); Set<Tag> set2 = l2.getTags(); AreaNode a1 = l1.getLastAreaNode(); AreaNode a2 = l2.getFirstAreaNode(); if ((a1 != null && a2 != null && a1.getNextSibling() == a2) && //must be adjacent areas l1.getFirstAreaNode().hasSameStyle(l2.getFirstAreaNode())) //require the same style { for (Tag tag : set2) //check if the second area does not refuse joining if (!tag.allowsJoining()) return false; for (Tag tag : set1) { if ((set2.isEmpty() && tag.allowsContinutation(l2.getFirstAreaNode())) //no tags in set2 but a2 can be a continuation of the previous area || (tag.allowsJoining() && set2.contains(tag))) //both have the same joinable tag return true; } } return false; } */ public static AbstractSequenceClassifier<?> getSharedClassifier() { if (sharedClassifier == null) { log.info("Loading resource {}", TreeTagger.class.getResource("/3class.gz")); InputStream is; try { is = new GZIPInputStream(TreeTagger.class.getResourceAsStream("/3class.gz")); sharedClassifier = CRFClassifier.getClassifier(is); } catch (IOException e) { System.err.println("Load failed: " + e.getMessage()); } catch (ClassCastException e) { System.err.println("Load failed: " + e.getMessage()); } catch (ClassNotFoundException e) { System.err.println("Load failed: " + e.getMessage()); } } return sharedClassifier; } }
python
def query(self, sql, *args, **kwargs): """Executes an SQL SELECT query, returning a result set as a Statement object. :param sql: query to execute :param args: parameters iterable :param kwargs: parameters iterable :return: result set as a Statement object :rtype: pydbal.statement.Statement """ self.ensure_connected() stmt = Statement(self) stmt.execute(sql, *args, **kwargs) return stmt
python
def imagetransformerpp_base_5l_8h_big_uncond_dr00_dan_g_bs1(): """For 256x256.""" hparams = imagetransformerpp_base_10l_8h_big_uncond_dr03_dan_g() # TODO(trandustin): I forgot to set this in the runs! Maybe it's not used in # image transformer training implementation? # hparams.img_len = 256 hparams.max_length = 66000 # allow for 256x256 hparams.batch_size = 1 hparams.num_decoder_layers = 5 hparams.hidden_size = 128 hparams.filter_size = 128 hparams.attention_key_channels = 64 hparams.attention_value_channels = 64 hparams.layer_prepostprocess_dropout = 0.0 return hparams
java
public static void registerUrlTypes(String...typesToSkip) { final List<Vfs.UrlType> urlTypes = Lists.newArrayList(); // include a list of file extensions / filenames to be recognized urlTypes.add(new EmptyIfFileEndingsUrlType(typesToSkip)); urlTypes.addAll(Arrays.asList(Vfs.DefaultUrlTypes.values())); Vfs.setDefaultURLTypes(urlTypes); }
java
public FromHostPrimitiveResult < T > fromHost(Class < T > javaClass, CobolContext cobolContext, byte[] hostData, int start) { int bytesLen = getBytesLen(); // For strings it is acceptable that host is sending over less data // than expected. This happens when trailing low values are chopped // off by transports if (hostData.length < start + bytesLen) { bytesLen = hostData.length - start; } return fromHostInternal(javaClass, cobolContext, hostData, start, bytesLen); }
java
public Vector getMidpoint(Vector other) { double x = (this.x + other.x) / 2; double y = (this.y + other.y) / 2; double z = (this.z + other.z) / 2; return new Vector(x, y, z); }
python
def __install_node_dependencies(kudu_client): """Installs Node.js dependencies at `site/wwwroot/` for Node.js bots. This method is only called when the detected bot is a Node.js bot. :return: Dictionary with results of the HTTP Kudu request """ if not kudu_client._KuduClient__initialized: # pylint:disable=protected-access kudu_client._KuduClient__initialize() # pylint:disable=protected-access payload = { 'command': 'npm install', 'dir': r'site\wwwroot' } response = requests.post(kudu_client._KuduClient__scm_url + '/api/command', data=json.dumps(payload), # pylint:disable=protected-access headers=kudu_client._KuduClient__get_application_json_headers()) # pylint:disable=protected-access HttpResponseValidator.check_response_status(response) return response.json()
python
def embed(self, width=600, height=650): """ Embed a viewer into a Jupyter notebook. """ from IPython.display import IFrame return IFrame(self.url, width, height)
java
public PythonKeyedStream key_by(KeySelector<PyObject, PyKey> selector) throws IOException { return new PythonKeyedStream(stream.keyBy(new PythonKeySelector(selector))); }
java
public BinaryClassificationFMeasure evaluate(String corpus) { Instance[] instanceList = readInstance(corpus, model.featureMap); return evaluate(instanceList); }
java
public void set(Object data, int iOpenMode) throws DBException, RemoteException { BaseTransport transport = this.createProxyTransport(SET); transport.addParam(DATA, data); transport.addParam(OPEN_MODE, iOpenMode); Object strReturn = transport.sendMessageAndGetReply(); Object objReturn = transport.convertReturnObject(strReturn); this.checkDBException(objReturn); }
java
private static void computeCentroid(double[] centroid, Relation<? extends NumberVector> relation, DBIDs ids) { Arrays.fill(centroid, 0); int dim = centroid.length; for(DBIDIter it = ids.iter(); it.valid(); it.advance()) { NumberVector v = relation.get(it); for(int i = 0; i < dim; i++) { centroid[i] += v.doubleValue(i); } } timesEquals(centroid, 1. / ids.size()); }
java
protected boolean updateStatusFile(List<File> createdFiles) { boolean success; final Properties statusProperties = new Properties(); try { for (File createdFile : createdFiles) { try { statusProperties.setProperty( getProjectRelativePath(createdFile), calculateChecksum(createdFile)); } catch (IOException e) { getLog().warn( "Checksum calculation failed: " + e.getMessage()); } } final FileWriter statusWriter = new FileWriter(statusFile); try { statusProperties.store(statusWriter, "Sculptor created the following " + createdFiles.size() + " files"); success = true; } finally { statusWriter.close(); } } catch (IOException e) { getLog().warn("Updating status file failed: " + e.getMessage()); success = false; } return success; }
java
public void setIpRoutesInfo(java.util.Collection<IpRouteInfo> ipRoutesInfo) { if (ipRoutesInfo == null) { this.ipRoutesInfo = null; return; } this.ipRoutesInfo = new com.amazonaws.internal.SdkInternalList<IpRouteInfo>(ipRoutesInfo); }
java
List<Integer> convexhull(List<Float> pts) { int npts = pts.size() / 3; List<Integer> out = new ArrayList<>(); // Find lower-leftmost point. int hull = 0; for (int i = 1; i < npts; ++i) { float[] a = new float[] { pts.get(i * 3), pts.get(i * 3 + 1), pts.get(i * 3 + 2) }; float[] b = new float[] { pts.get(hull * 3), pts.get(hull * 3 + 1), pts.get(hull * 3 + 2) }; if (cmppt(a, b)) { hull = i; } } // Gift wrap hull. int endpt = 0; do { out.add(hull); endpt = 0; for (int j = 1; j < npts; ++j) { float[] a = new float[] { pts.get(hull * 3), pts.get(hull * 3 + 1), pts.get(hull * 3 + 2) }; float[] b = new float[] { pts.get(endpt * 3), pts.get(endpt * 3 + 1), pts.get(endpt * 3 + 2) }; float[] c = new float[] { pts.get(j * 3), pts.get(j * 3 + 1), pts.get(j * 3 + 2) }; if (hull == endpt || left(a, b, c)) { endpt = j; } } hull = endpt; } while (endpt != out.get(0)); return out; }
java
public void reload() { providers = new HashSet<Metadata<S>>(); for (URL serviceFile : loadServiceFiles()) { loadServiceFile(serviceFile); } }
python
def add_section(self, name): """Append `section` to model Arguments: name (str): Name of section """ assert isinstance(name, str) # Skip existing sections for section in self.sections: if section.name == name: return section item = defaults["common"].copy() item["name"] = name item["itemType"] = "section" item = self.add_item(item) self.sections.append(item) return item
java
public String objectToString() { try { checkForDao(); } catch (SQLException e) { throw new IllegalArgumentException(e); } @SuppressWarnings("unchecked") T t = (T) this; return dao.objectToString(t); }
java
public List<Attribute> collectAttributes(QualifiedName context, QualifiedName qualifiedName, Types.ProvType type) { List<Attribute> attributes = new ArrayList<Attribute>(); List<Statement> statements = collators.get(context).get(qualifiedName); for (Statement statement : statements) { QualifiedName predQ = convertURIToQualifiedName(statement.getPredicate()); Value value = statement.getObject(); if (statement.getPredicate().equals(RDF.TYPE)) { Object obj = valueToObject(statement.getObject()); if (obj != null) { Value vobj = statement.getObject(); Boolean sameAsType = false; if (vobj instanceof URI) { // TODO: Nasty. URI uri = (URI) (vobj); String uriVal = uri.getNamespace() + uri.getLocalName(); sameAsType = uriVal.equals(types.find(type)); } if (!sameAsType) { if (statement.getObject() instanceof Resource) { QualifiedName typeQ = convertResourceToQualifiedName((Resource) statement.getObject()); if (isProvURI(typeQ) && DM_TYPES.indexOf(typeQ) == -1) { // System.out.println("Skipping type: " + typeQ); } else { attributes.add(pFactory.newAttribute(name.PROV_TYPE, typeQ, name.PROV_QUALIFIED_NAME)); } } else if (statement.getObject() instanceof Literal) { Literal lit=(Literal)statement.getObject(); Attribute attr = newAttribute(lit,name.PROV_TYPE); attributes.add(attr); } } } else { System.out.println(value); System.out.println("Value wasn't a suitable type"); } } if (predQ.equals(onto.QualifiedName_PROVO_hadRole)) { Value obj=statement.getObject(); Attribute attr = newAttributeForValue(obj,name.PROV_ROLE); attributes.add(attr); } if (predQ.equals(onto.QualifiedName_PROVO_atLocation)) { Value obj=statement.getObject(); Attribute attr = newAttributeForValue(obj,name.PROV_LOCATION); attributes.add(attr); } if (predQ.equals(onto.QualifiedName_RDFS_LABEL)) { Literal lit = (Literal) (statement.getObject()); Attribute attr=newAttribute(lit, name.PROV_LABEL); attributes.add(attr); } if (predQ.equals(onto.QualifiedName_PROVO_value)) { Attribute attr=newAttributeForValue(value, name.PROV_VALUE); attributes.add(attr); } if (!isProvURI(predQ)) { if (!predQ.equals(onto.QualifiedName_RDF_TYPE) && !predQ.equals(onto.QualifiedName_RDFS_LABEL)) { Attribute attr = newAttributeForValue(value, predQ); attributes.add(attr); } } } return attributes; }
java
public static Codec<long[], LongGene> ofVector( final LongRange domain, final int length ) { requireNonNull(domain); require.positive(length); return Codec.of( Genotype.of(LongChromosome.of(domain, length)), gt -> gt.getChromosome().as(LongChromosome.class).toArray() ); }
java
@Override protected void _from(ObjectInput in) throws IOException, ClassNotFoundException { // XXX this can be improved final int size = in.readInt(); for (int i = 0; i < size; i++) { final int termIndex = in.readInt(); final int parametersSize = in.readInt(); List<Integer> parameters = new ArrayList<Integer>(parametersSize); for (int j = 0; j < parametersSize; j++) { parameters.add(in.readInt()); } addTermParameterMapping(termIndex, parameters); } }
java
public static void doMetaUpdateVersionsOnStores(AdminClient adminClient, List<StoreDefinition> oldStoreDefs, List<StoreDefinition> newStoreDefs) { Set<String> storeNamesUnion = new HashSet<String>(); Map<String, StoreDefinition> oldStoreDefinitionMap = new HashMap<String, StoreDefinition>(); Map<String, StoreDefinition> newStoreDefinitionMap = new HashMap<String, StoreDefinition>(); List<String> storesChanged = new ArrayList<String>(); for(StoreDefinition storeDef: oldStoreDefs) { String storeName = storeDef.getName(); storeNamesUnion.add(storeName); oldStoreDefinitionMap.put(storeName, storeDef); } for(StoreDefinition storeDef: newStoreDefs) { String storeName = storeDef.getName(); storeNamesUnion.add(storeName); newStoreDefinitionMap.put(storeName, storeDef); } for(String storeName: storeNamesUnion) { StoreDefinition oldStoreDef = oldStoreDefinitionMap.get(storeName); StoreDefinition newStoreDef = newStoreDefinitionMap.get(storeName); if(oldStoreDef == null && newStoreDef != null || oldStoreDef != null && newStoreDef == null || oldStoreDef != null && newStoreDef != null && !oldStoreDef.equals(newStoreDef)) { storesChanged.add(storeName); } } System.out.println("Updating metadata version for the following stores: " + storesChanged); try { adminClient.metadataMgmtOps.updateMetadataversion(adminClient.getAdminClientCluster() .getNodeIds(), storesChanged); } catch(Exception e) { System.err.println("Error while updating metadata version for the specified store."); } }
python
def _find_id(self, result, uid): """ This method performs a depth-first search for the given uid in the dictionary of results. """ # if the result is a list if isinstance(result, list): # if the list has a valid entry if any([self._find_id(value, uid) for value in result]): # then we're done return True # otherwise results could be dictionaries if isinstance(result, dict): # the children of the result that are lists list_children = [value for value in result.values() if isinstance(value, list)] # go to every value that is a list for value in list_children: # if the value is a match if self._find_id(value, uid): # we're done return True # the children of the result that are dicts dict_children = [value for value in result.values() if isinstance(value, dict)] # perform the check on every child that is a dict for value in dict_children: # if the child is a match if self._find_id(value, uid): # we're done return True # if there are no values that are lists and there is an id key if not list_children and not dict_children and 'id' in result: # the value of the remote id field result_id = result['id'] # we've found a match if the id field matches (cast to match type) return result_id == type(result_id)(uid) # we didn't find the result return False
python
def copy(self, deep=True): """ Make a copy of this object's indices and data. When ``deep=True`` (default), a new object will be created with a copy of the calling object's data and indices. Modifications to the data or indices of the copy will not be reflected in the original object (see notes below). When ``deep=False``, a new object will be created without copying the calling object's data or index (only references to the data and index are copied). Any changes to the data of the original will be reflected in the shallow copy (and vice versa). Parameters ---------- deep : bool, default True Make a deep copy, including a copy of the data and the indices. With ``deep=False`` neither the indices nor the data are copied. Returns ------- copy : Series, DataFrame or Panel Object type matches caller. Notes ----- When ``deep=True``, data is copied but actual Python objects will not be copied recursively, only the reference to the object. This is in contrast to `copy.deepcopy` in the Standard Library, which recursively copies object data (see examples below). While ``Index`` objects are copied when ``deep=True``, the underlying numpy array is not copied for performance reasons. Since ``Index`` is immutable, the underlying data can be safely shared and a copy is not needed. Examples -------- >>> s = pd.Series([1, 2], index=["a", "b"]) >>> s a 1 b 2 dtype: int64 >>> s_copy = s.copy() >>> s_copy a 1 b 2 dtype: int64 **Shallow copy versus default (deep) copy:** >>> s = pd.Series([1, 2], index=["a", "b"]) >>> deep = s.copy() >>> shallow = s.copy(deep=False) Shallow copy shares data and index with original. >>> s is shallow False >>> s.values is shallow.values and s.index is shallow.index True Deep copy has own copy of data and index. >>> s is deep False >>> s.values is deep.values or s.index is deep.index False Updates to the data shared by shallow copy and original is reflected in both; deep copy remains unchanged. >>> s[0] = 3 >>> shallow[1] = 4 >>> s a 3 b 4 dtype: int64 >>> shallow a 3 b 4 dtype: int64 >>> deep a 1 b 2 dtype: int64 Note that when copying an object containing Python objects, a deep copy will copy the data, but will not do so recursively. Updating a nested data object will be reflected in the deep copy. >>> s = pd.Series([[1, 2], [3, 4]]) >>> deep = s.copy() >>> s[0][0] = 10 >>> s 0 [10, 2] 1 [3, 4] dtype: object >>> deep 0 [10, 2] 1 [3, 4] dtype: object """ data = self._data.copy(deep=deep) return self._constructor(data).__finalize__(self)
java
public ServiceFuture<Void> validatePurchaseInformationAsync(AppServiceCertificateOrderInner appServiceCertificateOrder, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(validatePurchaseInformationWithServiceResponseAsync(appServiceCertificateOrder), serviceCallback); }
python
def from_array(array): """ Deserialize a new ShippingOption from a given dictionary. :return: new ShippingOption instance. :rtype: ShippingOption """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") from pytgbot.api_types.sendable.payments import LabeledPrice data = {} data['id'] = u(array.get('id')) data['title'] = u(array.get('title')) data['prices'] = LabeledPrice.from_array_list(array.get('prices'), list_level=1) instance = ShippingOption(**data) instance._raw = array return instance
python
def validate_metadata_token(self, claims, endpoint): """ If the token endpoint is used in the grant type, the value of this parameter MUST be the same as the value of the "grant_type" parameter passed to the token endpoint defined in the grant type definition. """ self._grant_types.extend(endpoint._grant_types.keys()) claims.setdefault("token_endpoint_auth_methods_supported", ["client_secret_post", "client_secret_basic"]) self.validate_metadata(claims, "token_endpoint_auth_methods_supported", is_list=True) self.validate_metadata(claims, "token_endpoint_auth_signing_alg_values_supported", is_list=True) self.validate_metadata(claims, "token_endpoint", is_required=True, is_url=True)
python
def lookUpFieldType(field_type): """ Converts the ArcGIS REST field types to Python Types Input: field_type - string - type of field as string Output: Python field type as string """ if field_type == "esriFieldTypeDate": return "DATE" elif field_type == "esriFieldTypeInteger": return "LONG" elif field_type == "esriFieldTypeSmallInteger": return "SHORT" elif field_type == "esriFieldTypeDouble": return "DOUBLE" elif field_type == "esriFieldTypeString": return "TEXT" elif field_type == "esriFieldTypeBlob": return "BLOB" elif field_type == "esriFieldTypeSingle": return "FLOAT" elif field_type == "esriFieldTypeRaster": return "RASTER" elif field_type == "esriFieldTypeGUID": return "GUID" elif field_type == "esriFieldTypeGlobalID": return "TEXT" else: return "TEXT"
java
protected void handleException(Throwable t) { if (LOG.isErrorEnabled()) { LOG.error(Messages.get().getBundle().key(Messages.LOG_ERR_JSP_BEAN_0), t); } if (!(m_isSupressingExceptions || getRequestContext().getCurrentProject().isOnlineProject())) { if (LOG.isDebugEnabled()) { // no stack trace needed since it was already logged with the "error" log message above LOG.debug( Messages.get().getBundle().key(Messages.LOG_DEBUG_INTERRUPTED_EXCEPTION_1, getClass().getName())); } String uri = null; Throwable u = getController().getThrowable(); if (u != null) { uri = getController().getThrowableResourceUri(); } else { uri = getRequestContext().getUri(); } throw new CmsRuntimeException( Messages.get().container(Messages.ERR_RUNTIME_1, (uri != null) ? uri : getClass().getName()), t); } }
java
private void checkMallocedChunk(int mem, int s) { if (VALIDATING) { int p = memToChunk(mem); int sz = head(p) & ~INUSE_BITS; checkInUseChunk(p); if (sz < MIN_CHUNK_SIZE) throw new AssertionError("Allocated chunk " + p + " is too small"); if ((sz & CHUNK_ALIGN_MASK) != 0) throw new AssertionError("Chunk size " + sz + " of " + p + " is not correctly aligned"); if (sz < s) throw new AssertionError("Allocated chunk " + p + " is smaller than requested [" + sz + "<" + s + "]"); if (sz > (s + MIN_CHUNK_SIZE)) { throw new AssertionError("Allocated chunk " + p + " is too large (should have been split off) [" + sz + ">>" + s + "]"); } } }
python
def _fit_gpu(self, Ciu_host, Cui_host, show_progress=True): """ specialized training on the gpu. copies inputs to/from cuda device """ if not implicit.cuda.HAS_CUDA: raise ValueError("No CUDA extension has been built, can't train on GPU.") if self.dtype == np.float64: log.warning("Factors of dtype float64 aren't supported with gpu fitting. " "Converting factors to float32") self.item_factors = self.item_factors.astype(np.float32) self.user_factors = self.user_factors.astype(np.float32) Ciu = implicit.cuda.CuCSRMatrix(Ciu_host) Cui = implicit.cuda.CuCSRMatrix(Cui_host) X = implicit.cuda.CuDenseMatrix(self.user_factors.astype(np.float32)) Y = implicit.cuda.CuDenseMatrix(self.item_factors.astype(np.float32)) solver = implicit.cuda.CuLeastSquaresSolver(self.factors) log.debug("Running %i ALS iterations", self.iterations) with tqdm.tqdm(total=self.iterations, disable=not show_progress) as progress: for iteration in range(self.iterations): s = time.time() solver.least_squares(Cui, X, Y, self.regularization, self.cg_steps) progress.update(.5) solver.least_squares(Ciu, Y, X, self.regularization, self.cg_steps) progress.update(.5) if self.fit_callback: self.fit_callback(iteration, time.time() - s) if self.calculate_training_loss: loss = solver.calculate_loss(Cui, X, Y, self.regularization) progress.set_postfix({"loss": loss}) if self.calculate_training_loss: log.info("Final training loss %.4f", loss) X.to_host(self.user_factors) Y.to_host(self.item_factors)
java
private static final int count_flip(final int[] perm_flip) { // cache first element, avoid swapping perm[0] and perm[k] int v0 = perm_flip[0]; int tmp; int flip_count = 0; do { for (int i = 1, j = v0 - 1; i < j; ++i, --j) { tmp = perm_flip[i]; perm_flip[i] = perm_flip[j]; perm_flip[j] = tmp; } tmp = perm_flip[v0]; perm_flip[v0] = v0; v0 = tmp; flip_count++; } while (v0 != 0); // first element == '1' ? return flip_count; }
java
@Override public int compareTo (CharsetMatch other) { int compareResult = 0; if (this.fConfidence > other.fConfidence) { compareResult = 1; } else if (this.fConfidence < other.fConfidence) { compareResult = -1; } return compareResult; }
python
def reindex_model_on_save(sender, document, **kwargs): '''(Re/Un)Index Mongo document on post_save''' if current_app.config.get('AUTO_INDEX'): reindex.delay(document)
java
@Override public void printStack(final StringBuilder sb, final StackTraceElement[] trace) { for (final StackTraceElement element : trace) { sb.append("\tat ").append(element).append('\n'); } }
java
public static void pushImageTag(DockerClient docker, String imageName, List<String> imageTags, Log log, boolean skipPush) throws MojoExecutionException, DockerException, IOException, InterruptedException { if (skipPush) { log.info("Skipping docker push"); return; } // tags should not be empty if you have specified the option to push tags if (imageTags.isEmpty()) { throw new MojoExecutionException("You have used option \"pushImageTag\" but have" + " not specified an \"imageTag\" in your" + " docker-maven-client's plugin configuration"); } final CompositeImageName compositeImageName = CompositeImageName.create(imageName, imageTags); for (final String imageTag : compositeImageName.getImageTags()) { final String imageNameWithTag = compositeImageName.getName() + ":" + imageTag; log.info("Pushing " + imageNameWithTag); docker.push(imageNameWithTag, new AnsiProgressHandler()); } }
python
def join(self, other): """ Try to join a rectangle to this one, if the result is also a rectangle and the operation is successful and this rectangle is modified to the union. Arguments: other (Rectangle): Rectangle to join Returns: bool: True when successfully joined, False otherwise """ if self.contains(other): return True if other.contains(self): self.x = other.x self.y = other.y self.width = other.width self.height = other.height return True if not self.intersects(other, edges=True): return False # Other rectangle is Up/Down from this if self.left == other.left and self.width == other.width: y_min = min(self.bottom, other.bottom) y_max = max(self.top, other.top) self.y = y_min self.height = y_max-y_min return True # Other rectangle is Right/Left from this if self.bottom == other.bottom and self.height == other.height: x_min = min(self.left, other.left) x_max = max(self.right, other.right) self.x = x_min self.width = x_max-x_min return True return False
python
def create_translation_field(model, field_name, lang, empty_value): """ Translation field factory. Returns a ``TranslationField`` based on a fieldname and a language. The list of supported fields can be extended by defining a tuple of field names in the projects settings.py like this:: MODELTRANSLATION_CUSTOM_FIELDS = ('MyField', 'MyOtherField',) If the class is neither a subclass of fields in ``SUPPORTED_FIELDS``, nor in ``CUSTOM_FIELDS`` an ``ImproperlyConfigured`` exception will be raised. """ if empty_value not in ('', 'both', None, NONE): raise ImproperlyConfigured('%s is not a valid empty_value.' % empty_value) field = model._meta.get_field(field_name) cls_name = field.__class__.__name__ if not (isinstance(field, SUPPORTED_FIELDS) or cls_name in mt_settings.CUSTOM_FIELDS): raise ImproperlyConfigured( '%s is not supported by modeltranslation.' % cls_name) translation_class = field_factory(field.__class__) return translation_class(translated_field=field, language=lang, empty_value=empty_value)
python
def has_legend(self, bool_value): """ Add, remove, or leave alone the ``<c:legend>`` child element depending on current state and *bool_value*. If *bool_value* is |True| and no ``<c:legend>`` element is present, a new default element is added. When |False|, any existing legend element is removed. """ if bool(bool_value) is False: self._remove_legend() else: if self.legend is None: self._add_legend()
java
public void addCluster() { System.out.printf("%nAdding cluster: %s to instance: %s%n", CLUSTER, instanceId); // [START bigtable_create_cluster] try { adminClient.createCluster( CreateClusterRequest.of(instanceId, CLUSTER) .setZone("us-central1-c") .setServeNodes(3) .setStorageType(StorageType.SSD)); System.out.printf("Cluster: %s created successfully%n", CLUSTER); } catch (AlreadyExistsException e) { System.err.println("Failed to add cluster, already exists: " + e.getMessage()); } // [END bigtable_create_cluster] }
python
def remove(attributes, properties): """Returns a property sets which include all the elements in 'properties' that do not have attributes listed in 'attributes'.""" if isinstance(attributes, basestring): attributes = [attributes] assert is_iterable_typed(attributes, basestring) assert is_iterable_typed(properties, basestring) result = [] for e in properties: attributes_new = feature.attributes(get_grist(e)) has_common_features = 0 for a in attributes_new: if a in attributes: has_common_features = 1 break if not has_common_features: result += e return result
python
def linear_interpolate(p1, p2, fraction): '''Returns the point p satisfying: p1 + fraction * (p2 - p1)''' return np.array((p1[0] + fraction * (p2[0] - p1[0]), p1[1] + fraction * (p2[1] - p1[1]), p1[2] + fraction * (p2[2] - p1[2])))
java
public void printScreen(PrintWriter out, ResourceBundle reg) throws DBException { int iPrintOptions = this.getScreenField().getPrintOptions(); this.getScreenField().printControl(out, iPrintOptions); this.getScreenField().printData(out, iPrintOptions); }