language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def dict_to_html_attrs(dict_): """ Banana banana """ res = ' '.join('%s="%s"' % (k, v) for k, v in dict_.items()) return res
java
private void insert(ArrayList<String> list, String input) { if (!list.contains(input)) { if (input.equals("a") || input.equals("rdf:type")) { list.add(0, input); } else { list.add(input); } } }
python
def original_unescape(self, s): """Since we need to use this sometimes""" if isinstance(s, basestring): return unicode(HTMLParser.unescape(self, s)) elif isinstance(s, list): return [unicode(HTMLParser.unescape(self, item)) for item in s] else: return s
java
public static <T extends Tree> Matcher<T> isVoidType() { return new Matcher<T>() { @Override public boolean matches(T t, VisitorState state) { Type type = getType(t); return type != null && state.getTypes().isSameType(type, state.getSymtab().voidType); } }; }
python
def parse_reaction_table_file(path, f, default_compartment): """Parse a tab-separated file containing reaction IDs and properties The reaction properties are parsed according to the header which specifies which property is contained in each column. """ context = FilePathContext(path) for lineno, row in enumerate(csv.DictReader(f, delimiter=str('\t'))): if 'id' not in row or row['id'].strip() == '': raise ParseError('Expected `id` column in table') props = {key: value for key, value in iteritems(row) if value != ''} if 'equation' in props: props['equation'] = parse_reaction_equation_string( props['equation'], default_compartment) mark = FileMark(context, lineno + 2, 0) yield ReactionEntry(props, mark)
python
def _get_speed(element_duration, wpm, word_ref=WORD): """ Returns element duration when element_duration and/or code speed is given wpm >>> _get_speed(0.2, None) (0.2, 5.999999999999999) >>> _get_speed(None, 15) (0.08, 15) >>> _get_speed(None, None) (0.08, 15) """ seconds_per_dot = _seconds_per_dot(word_ref) if element_duration is None and wpm is None: # element_duration = 1 # wpm = seconds_per_dot / element_duration wpm = WPM element_duration = wpm_to_duration(wpm, output='float', word_ref=WORD) / 1000.0 return element_duration, wpm elif element_duration is not None and wpm is None: wpm = seconds_per_dot / element_duration return element_duration, wpm elif element_duration is None and wpm is not None: element_duration = wpm_to_duration(wpm, output='float', word_ref=WORD) / 1000.0 return element_duration, wpm else: raise NotImplementedError("Can't set both element_duration and wpm")
python
def monitor(options={}): """Starts syncing with W&B if you're in Jupyter. Displays your W&B charts live in a Jupyter notebook. It's currently a context manager for legacy reasons. """ try: from IPython.display import display except ImportError: def display(stuff): return None class Monitor(): def __init__(self, options={}): if os.getenv("WANDB_JUPYTER"): display(jupyter.Run()) else: self.rm = False termerror( "wandb.monitor is only functional in Jupyter notebooks") def __enter__(self): termlog( "DEPRECATED: with wandb.monitor(): is deprecated, just call wandb.monitor() to see live results.") pass def __exit__(self, *args): pass return Monitor(options)
java
private void writeInitializers( GinjectorBindings bindings, StringBuilder initializeEagerSingletonsBody, StringBuilder initializeStaticInjectionsBody, SourceWriteUtil sourceWriteUtil, SourceWriter writer) { if (bindings.hasEagerSingletonBindingInSubtree()) { sourceWriteUtil.writeMethod(writer, "public void initializeEagerSingletons()", initializeEagerSingletonsBody.toString()); } if (bindings.hasStaticInjectionRequestInSubtree()) { sourceWriteUtil.writeMethod(writer, "public void initializeStaticInjections()", initializeStaticInjectionsBody.toString()); } }
java
public void validate() { if (null != failedNodesfile && failedNodesfile.getName().startsWith("${") && failedNodesfile.getName() .endsWith("}")) { failedNodesfile=null; } }
python
def cmdSubstituteLines(self, cmd, count): """ S """ lineIndex = self._qpart.cursorPosition[0] availableCount = len(self._qpart.lines) - lineIndex effectiveCount = min(availableCount, count) _globalClipboard.value = self._qpart.lines[lineIndex:lineIndex + effectiveCount] with self._qpart: del self._qpart.lines[lineIndex:lineIndex + effectiveCount] self._qpart.lines.insert(lineIndex, '') self._qpart.cursorPosition = (lineIndex, 0) self._qpart._indenter.autoIndentBlock(self._qpart.textCursor().block()) self._saveLastEditSimpleCmd(cmd, count) self.switchMode(Insert)
python
def return_dat(self, chan, begsam, endsam): """Return the data as 2D numpy.ndarray. Parameters ---------- chan : int or list index (indices) of the channels to read begsam : int index of the first sample endsam : int index of the last sample Returns ------- numpy.ndarray A 2d matrix, with dimension chan X samples. To save memory, the data are memory-mapped, and you cannot change the values on disk. Notes ----- When asking for an interval outside the data boundaries, it returns NaN for those values. """ data = memmap(self.filename, dtype=self.dtype, mode='r', order='F', shape=(self.n_chan, self.n_samples), offset=self.head) dat = data[chan, max((begsam, 0)):min((endsam, self.n_samples))].astype(float64) dat = (dat + self.offset[chan, :]) * self.gain[chan, :] if begsam < 0: pad = empty((dat.shape[0], 0 - begsam)) pad.fill(NaN) dat = c_[pad, dat] if endsam >= self.n_samples: pad = empty((dat.shape[0], endsam - self.n_samples)) pad.fill(NaN) dat = c_[dat, pad] return dat
java
public static void premain(String agentArgs, @Nonnull Instrumentation inst) { kvs = splitCommaColonStringToKV(agentArgs); Logger.setLoggerImplType(getLogImplTypeFromAgentArgs(kvs)); final Logger logger = Logger.getLogger(TtlAgent.class); try { logger.info("[TtlAgent.premain] begin, agentArgs: " + agentArgs + ", Instrumentation: " + inst); final boolean disableInheritable = isDisableInheritableForThreadPool(); final List<JavassistTransformlet> transformletList = new ArrayList<JavassistTransformlet>(); transformletList.add(new TtlExecutorTransformlet(disableInheritable)); transformletList.add(new TtlForkJoinTransformlet(disableInheritable)); if (isEnableTimerTask()) transformletList.add(new TtlTimerTaskTransformlet()); final ClassFileTransformer transformer = new TtlTransformer(transformletList); inst.addTransformer(transformer, true); logger.info("[TtlAgent.premain] addTransformer " + transformer.getClass() + " success"); logger.info("[TtlAgent.premain] end"); ttlAgentLoaded = true; } catch (Exception e) { String msg = "Fail to load TtlAgent , cause: " + e.toString(); logger.log(Level.SEVERE, msg, e); throw new IllegalStateException(msg, e); } }
java
public void prepareSmallElement(Widget widget) { m_hasSmallElements = true; widget.addStyleName( org.opencms.ade.containerpage.client.ui.css.I_CmsLayoutBundle.INSTANCE.containerpageCss().smallElement()); }
java
private Future<Boolean> _add(String key, int exp, CachedData value, EVCacheLatch latch) throws Exception { if (enableChunking.get()) throw new EVCacheException("This operation is not supported as chunking is enabled on this EVCacheClient."); if (addCounter == null) addCounter = EVCacheMetricsFactory.getCounter(serverGroup.getName() + "-AddCall"); final MemcachedNode node = evcacheMemcachedClient.getEVCacheNode(key); if (!ensureWriteQueueSize(node, key)) return getDefaultFuture(); addCounter.increment(); if(shouldHashKey()) { final String hKey = getHashedKey(key); final CachedData cVal = getEVCacheValue(key, value, exp); return evcacheMemcachedClient.add(hKey, exp, cVal, null, latch); } else { return evcacheMemcachedClient.add(key, exp, value, null, latch); } }
python
def set_metadata(self, queue, metadata, clear=False): """ Accepts a dictionary and adds that to the specified queue's metadata. If the 'clear' argument is passed as True, any existing metadata is replaced with the new metadata. """ return self._manager.set_metadata(queue, metadata, clear=clear)
python
def is_suburi(self, base, test): """Check if test is below base in a URI tree Both args must be URIs in reduced form. """ if base == test: return True if base[0] != test[0]: return False common = posixpath.commonprefix((base[1], test[1])) if len(common) == len(base[1]): return True return False
java
private static <T> T nullForNotFound(IOException exception) { ComputeException serviceException = translate(exception); if (serviceException.getCode() == HTTP_NOT_FOUND) { return null; } throw serviceException; }
python
def channel_close( self, registry_address: PaymentNetworkID, token_address: TokenAddress, partner_address: Address, retry_timeout: NetworkTimeout = DEFAULT_RETRY_TIMEOUT, ): """Close a channel opened with `partner_address` for the given `token_address`. Race condition, this can fail if channel was closed externally. """ self.channel_batch_close( registry_address=registry_address, token_address=token_address, partner_addresses=[partner_address], retry_timeout=retry_timeout, )
python
def searchNs(self, node, nameSpace): """Search a Ns registered under a given name space for a document. recurse on the parents until it finds the defined namespace or return None otherwise. @nameSpace can be None, this is a search for the default namespace. We don't allow to cross entities boundaries. If you don't declare the namespace within those you will be in troubles !!! A warning is generated to cover this case. """ if node is None: node__o = None else: node__o = node._o ret = libxml2mod.xmlSearchNs(self._o, node__o, nameSpace) if ret is None:raise treeError('xmlSearchNs() failed') __tmp = xmlNs(_obj=ret) return __tmp
java
public ServiceFuture<VaultInner> getByResourceGroupAsync(String resourceGroupName, String vaultName, final ServiceCallback<VaultInner> serviceCallback) { return ServiceFuture.fromResponse(getByResourceGroupWithServiceResponseAsync(resourceGroupName, vaultName), serviceCallback); }
java
public static List<String> getParameterList(String parameterString) { List<String> parameterList = new ArrayList<>(); StringTokenizer tok = new StringTokenizer(parameterString, ","); while (tok.hasMoreElements()) { String param = tok.nextToken().trim(); parameterList.add(cutOffSingleQuotes(param)); } List<String> postProcessed = new ArrayList<>(); for (int i = 0; i < parameterList.size(); i++) { int next = i + 1; String processed = parameterList.get(i); if (processed.startsWith("'") && !processed.endsWith("'")) { while (next < parameterList.size()) { if (parameterString.contains(processed + ", " + parameterList.get(next))) { processed += ", " + parameterList.get(next); } else if (parameterString.contains(processed + "," + parameterList.get(next))) { processed += "," + parameterList.get(next); } else if (parameterString.contains(processed + " , " + parameterList.get(next))) { processed += " , " + parameterList.get(next); } else { processed += parameterList.get(next); } i++; if (parameterList.get(next).endsWith("'")) { break; } else { next++; } } } postProcessed.add(cutOffSingleQuotes(processed)); } return postProcessed; }
python
def heatmaps_to_keypoints(maps, rois): """Extract predicted keypoint locations from heatmaps. Output has shape (#rois, 4, #keypoints) with the 4 rows corresponding to (x, y, logit, prob) for each keypoint. """ # This function converts a discrete image coordinate in a HEATMAP_SIZE x # HEATMAP_SIZE image to a continuous keypoint coordinate. We maintain # consistency with keypoints_to_heatmap_labels by using the conversion from # Heckbert 1990: c = d + 0.5, where d is a discrete coordinate and c is a # continuous coordinate. offset_x = rois[:, 0] offset_y = rois[:, 1] widths = rois[:, 2] - rois[:, 0] heights = rois[:, 3] - rois[:, 1] widths = np.maximum(widths, 1) heights = np.maximum(heights, 1) widths_ceil = np.ceil(widths) heights_ceil = np.ceil(heights) # NCHW to NHWC for use with OpenCV maps = np.transpose(maps, [0, 2, 3, 1]) min_size = 0 # cfg.KRCNN.INFERENCE_MIN_SIZE num_keypoints = maps.shape[3] xy_preds = np.zeros((len(rois), 3, num_keypoints), dtype=np.float32) end_scores = np.zeros((len(rois), num_keypoints), dtype=np.float32) for i in range(len(rois)): if min_size > 0: roi_map_width = int(np.maximum(widths_ceil[i], min_size)) roi_map_height = int(np.maximum(heights_ceil[i], min_size)) else: roi_map_width = widths_ceil[i] roi_map_height = heights_ceil[i] width_correction = widths[i] / roi_map_width height_correction = heights[i] / roi_map_height roi_map = cv2.resize( maps[i], (roi_map_width, roi_map_height), interpolation=cv2.INTER_CUBIC ) # Bring back to CHW roi_map = np.transpose(roi_map, [2, 0, 1]) # roi_map_probs = scores_to_probs(roi_map.copy()) w = roi_map.shape[2] pos = roi_map.reshape(num_keypoints, -1).argmax(axis=1) x_int = pos % w y_int = (pos - x_int) // w # assert (roi_map_probs[k, y_int, x_int] == # roi_map_probs[k, :, :].max()) x = (x_int + 0.5) * width_correction y = (y_int + 0.5) * height_correction xy_preds[i, 0, :] = x + offset_x[i] xy_preds[i, 1, :] = y + offset_y[i] xy_preds[i, 2, :] = 1 end_scores[i, :] = roi_map[np.arange(num_keypoints), y_int, x_int] return np.transpose(xy_preds, [0, 2, 1]), end_scores
python
def add_params(self, **kw): """ Add [possibly many] parameters to the track. Parameters will be checked against known UCSC parameters and their supported formats. E.g.:: add_params(color='128,0,0', visibility='dense') """ for k, v in kw.items(): if (k not in self.params) and (k not in self.specific_params): raise ParameterError('"%s" is not a valid parameter for %s' % (k, self.__class__.__name__)) try: self.params[k].validate(v) except KeyError: self.specific_params[k].validate(v) self._orig_kwargs.update(kw) self.kwargs = self._orig_kwargs.copy()
java
public <T> void setHintValue(Hint<T> hint, T value) { if (value == null) { throw new IllegalArgumentException("Null value passed."); } hintValues.put(hint, value); }
java
public void flush() { for (Iterator<Map.Entry<OFileMMap, OMMapBufferEntry[]>> it = bufferPoolPerFile.entrySet().iterator(); it.hasNext();) { OFileMMap file; final Map.Entry<OFileMMap, OMMapBufferEntry[]> mapEntry = it.next(); file = mapEntry.getKey(); lockManager.acquireLock(Thread.currentThread(), file, OLockManager.LOCK.EXCLUSIVE); try { if (file.isClosed()) { OMMapBufferEntry[] notFlushed = EMPTY_BUFFER_ENTRIES; for (OMMapBufferEntry entry : mapEntry.getValue()) { if (!removeEntry(entry)) { notFlushed = addEntry(notFlushed, entry); } } if (notFlushed.length == 0) { it.remove(); } else { mapEntry.setValue(notFlushed); } } } finally { lockManager.releaseLock(Thread.currentThread(), file, OLockManager.LOCK.EXCLUSIVE); } } }
python
def transpose(self): """transpose operation of self Returns ------- Matrix : Matrix transpose of self """ if not self.isdiagonal: return type(self)(x=self.__x.copy().transpose(), row_names=self.col_names, col_names=self.row_names, autoalign=self.autoalign) else: return type(self)(x=self.__x.copy(), row_names=self.row_names, col_names=self.col_names, isdiagonal=True, autoalign=self.autoalign)
python
def subplots_adjust(fig=None, inches=1): """Enforce margin to be equal around figure, starting at subplots. .. note:: You probably should be using wt.artists.create_figure instead. See Also -------- wt.artists.plot_margins Visualize margins, for debugging / layout. wt.artists.create_figure Convinience method for creating well-behaved figures. """ if fig is None: fig = plt.gcf() size = fig.get_size_inches() vert = inches / size[0] horz = inches / size[1] fig.subplots_adjust(vert, horz, 1 - vert, 1 - horz)
java
@NonNull public static PaymentIntentParams createConfirmPaymentIntentWithPaymentMethodCreateParams( @Nullable PaymentMethodCreateParams paymentMethodCreateParams, @NonNull String clientSecret, @NonNull String returnUrl, boolean savePaymentMethod) { return new PaymentIntentParams() .setPaymentMethodCreateParams(paymentMethodCreateParams) .setClientSecret(clientSecret) .setReturnUrl(returnUrl) .setSavePaymentMethod(savePaymentMethod); }
python
def _update_tree_feature_weights(X, feature_names, clf, feature_weights): """ Update tree feature weights using decision path method. """ tree_value = clf.tree_.value if tree_value.shape[1] == 1: squeeze_axis = 1 else: assert tree_value.shape[2] == 1 squeeze_axis = 2 tree_value = np.squeeze(tree_value, axis=squeeze_axis) tree_feature = clf.tree_.feature _, indices = clf.decision_path(X).nonzero() if isinstance(clf, DecisionTreeClassifier): norm = lambda x: x / x.sum() else: norm = lambda x: x feature_weights[feature_names.bias_idx] += norm(tree_value[0]) for parent_idx, child_idx in zip(indices, indices[1:]): assert tree_feature[parent_idx] >= 0 feature_idx = tree_feature[parent_idx] diff = norm(tree_value[child_idx]) - norm(tree_value[parent_idx]) feature_weights[feature_idx] += diff
java
private File resolveIndexDirectoryPath() throws IOException { if (StringUtils.isEmpty(indexDirectoryPath)) { indexDirectoryPath = System.getProperty("java.io.tmpdir") + appContext.getApplicationName(); } File dir = new File(indexDirectoryPath, getClass().getPackage().getName()); if (!dir.exists() && !dir.mkdirs()) { throw new IOException("Failed to create help search index directory."); } log.info("Help search index located at " + dir); return dir; }
java
private void determineChangesSince(File file, long lastScanTime) { try { if (GlobalConfiguration.verboseMode && log.isLoggable(Level.INFO)) { log.info("Firing file changed event " + file); } listener.fileChanged(file); if (file.isDirectory()) { File[] filesOfInterest = file.listFiles(new RecentChangeFilter(lastScanTime)); for (File f : filesOfInterest) { if (f.isDirectory()) { determineChangesSince(f, lastScanTime); } else { if (GlobalConfiguration.verboseMode && log.isLoggable(Level.INFO)) { log.info("Observed last modification time change for " + f + " (lastScanTime=" + lastScanTime + ")"); log.info("Firing file changed event " + file); } listener.fileChanged(f); } } } } catch (Throwable t) { if (log.isLoggable(Level.SEVERE)) { log.log(Level.SEVERE, "FileWatcher caught serious error, see cause", t); } } }
python
def select(*signals: Signal, **kwargs) -> List[Signal]: """ Allows the current process to wait for multiple concurrent signals. Waits until one of the signals turns on, at which point this signal is returned. :param timeout: If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and stops waiting on the set of :py:class:`Signal`s. In such a situation, a :py:class:`Timeout` exception is raised on the process. """ class CleanUp(Interrupt): pass timeout = kwargs.get("timeout", None) if not isinstance(timeout, (float, int, type(None))): raise ValueError("The timeout keyword parameter can be either None or a number.") def wait_one(signal: Signal, common: Signal) -> None: try: signal.wait() common.turn_on() except CleanUp: pass # We simply sets up multiple sub-processes respectively waiting for one of the signals. Once one of them has fired, # the others will all run no-op eventually, so no need for any explicit clean-up. common = Signal(name=local.name + "-selector").turn_off() if _logger is not None: _log(INFO, "select", "select", "select", signals=[sig.name for sig in signals]) procs = [] for signal in signals: procs.append(add(wait_one, signal, common)) try: common.wait(timeout) finally: for proc in procs: # Clean up the support processes. proc.interrupt(CleanUp()) return [signal for signal in signals if signal.is_on]
python
def interact(self, escape_character=chr(29), input_filter=None, output_filter=None): '''This gives control of the child process to the interactive user (the human at the keyboard). Keystrokes are sent to the child process, and the stdout and stderr output of the child process is printed. This simply echos the child stdout and child stderr to the real stdout and it echos the real stdin to the child stdin. When the user types the escape_character this method will return None. The escape_character will not be transmitted. The default for escape_character is entered as ``Ctrl - ]``, the very same as BSD telnet. To prevent escaping, escape_character may be set to None. If a logfile is specified, then the data sent and received from the child process in interact mode is duplicated to the given log. You may pass in optional input and output filter functions. These functions should take a string and return a string. The output_filter will be passed all the output from the child process. The input_filter will be passed all the keyboard input from the user. The input_filter is run BEFORE the check for the escape_character. Note that if you change the window size of the parent the SIGWINCH signal will not be passed through to the child. If you want the child window size to change when the parent's window size changes then do something like the following example:: import pexpect, struct, fcntl, termios, signal, sys def sigwinch_passthrough (sig, data): s = struct.pack("HHHH", 0, 0, 0, 0) a = struct.unpack('hhhh', fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ , s)) if not p.closed: p.setwinsize(a[0],a[1]) # Note this 'p' is global and used in sigwinch_passthrough. p = pexpect.spawn('/bin/bash') signal.signal(signal.SIGWINCH, sigwinch_passthrough) p.interact() ''' # Flush the buffer. self.write_to_stdout(self.buffer) self.stdout.flush() self._buffer = self.buffer_type() mode = tty.tcgetattr(self.STDIN_FILENO) tty.setraw(self.STDIN_FILENO) if escape_character is not None and PY3: escape_character = escape_character.encode('latin-1') try: self.__interact_copy(escape_character, input_filter, output_filter) finally: tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode)
python
def _exec_check_pointers(executable): """Checks the specified executable for the pointer condition that not all members of the derived type have had their values set. Returns (list of offending members, parameter name). """ oparams = [] pmembers = {} xassigns = map(lambda x: x.lower().strip(), executable.external_assignments()) def add_offense(pname, member): """Adds the specified member as an offender under the specified parameter.""" if pname not in oparams: oparams.append(pname) if pname not in pmembers: pmembers[pname] = [member] else: pmembers[pname].append(member) def check_buried(executable, pname, member): """Checks whether the member has its value changed by one of the dependency subroutines in the executable. """ for d in executable.dependencies: if pname in d.argnames: pindex = d.argnames.index(pname) dtarget = d.target if dtarget is not None: mparam = dtarget.ordered_parameters[pindex] for pname, param in executable.parameters.items(): if param.direction == "(out)" and param.is_custom: utype = param.customtype if utype is None: continue for mname, member in utype.members.items(): key = "{}%{}".format(pname, mname).lower().strip() if key not in xassigns: #We also need to check the dependency calls to other, buried subroutines. compname = "{}%{}".format(pname, mname).lower() if executable.changed(compname) is None: add_offense(pname, member) return (oparams, pmembers)
java
public JobSchedulePatchOptions withIfUnmodifiedSince(DateTime ifUnmodifiedSince) { if (ifUnmodifiedSince == null) { this.ifUnmodifiedSince = null; } else { this.ifUnmodifiedSince = new DateTimeRfc1123(ifUnmodifiedSince); } return this; }
python
def _refresh(self, session, stopping=False): '''Get this task's current state. This must be called under the registry's lock. It updates the :attr:`finished` and :attr:`failed` flags and the :attr:`data` dictionary based on the current state in the registry. In the normal case, nothing will change and this function will return normally. If it turns out that the work unit is already finished, the state of this object will change before :exc:`rejester.exceptions.LostLease` is raised. :param session: locked registry session :param stopping: don't raise if the work unit is finished :raises rejester.exceptions.LostLease: if this worker is no longer doing this work unit ''' data = session.get( WORK_UNITS_ + self.work_spec_name + _FINISHED, self.key) if data is not None: self.finished = True self.data = data if not stopping: raise LostLease('work unit is already finished') return self.finished = False data = session.get( WORK_UNITS_ + self.work_spec_name + _FAILED, self.key) if data is not None: self.failed = True self.data = data if not stopping: raise LostLease('work unit has already failed') return self.failed = False # (You need a pretty specific sequence of events to get here) data = session.get( WORK_UNITS_ + self.work_spec_name + _BLOCKED, self.key) if data is not None: self.data = data raise LostLease('work unit now blocked by others') worker_id = session.get( WORK_UNITS_ + self.work_spec_name + '_locks', self.key) if worker_id != self.worker_id: raise LostLease('work unit claimed by %r', worker_id) # NB: We could check the priority here, but don't. # If at this point we're technically overtime but nobody # else has started doing work yet, since we're under the # global lock, we can get away with finishing whatever # transition we were going to try to do. data = session.get( WORK_UNITS_ + self.work_spec_name, self.key) if data is None: raise NoSuchWorkUnitError('work unit is gone') # Since we should still own the work unit, any changes # in data should be on our end; do not touch it return
java
public org.osmdroid.views.overlay.Polygon toPolygon(Polygon polygon) { org.osmdroid.views.overlay.Polygon newPoloygon = new org.osmdroid.views.overlay.Polygon(); List<GeoPoint> pts = new ArrayList<>(); List<List<GeoPoint>> holes = new ArrayList<>(); List<LineString> rings = polygon.getRings(); if (!rings.isEmpty()) { Double z = null; // Add the polygon points LineString polygonLineString = rings.get(0); for (Point point : polygonLineString.getPoints()) { GeoPoint latLng = toLatLng(point); pts.add(latLng); } // Add the holes for (int i = 1; i < rings.size(); i++) { LineString hole = rings.get(i); List<GeoPoint> holeLatLngs = new ArrayList<GeoPoint>(); for (Point point : hole.getPoints()) { GeoPoint latLng = toLatLng(point); holeLatLngs.add(latLng); if (point.hasZ()) { z = (z == null) ? point.getZ() : Math.max(z, point.getZ()); } } holes.add(holeLatLngs); } } newPoloygon.setPoints(pts); newPoloygon.setHoles(holes); if (polygonOptions!=null){ newPoloygon.setFillColor(polygonOptions.getFillColor()); newPoloygon.setStrokeColor(polygonOptions.getStrokeColor()); newPoloygon.setStrokeWidth(polygonOptions.getStrokeWidth()); newPoloygon.setTitle(polygonOptions.getTitle()); } return newPoloygon; }
java
@Deprecated public @Nonnull String getMessage(String key) { BugPattern bugPattern = DetectorFactoryCollection.instance().lookupBugPattern(key); if (bugPattern == null) { return L10N.getLocalString("err.missing_pattern", "Error: missing bug pattern for key") + " " + key; } return bugPattern.getAbbrev() + ": " + bugPattern.getLongDescription(); }
java
@Nullable public T getItem(final int position) { if (position < 0 || position >= mObjects.size()) { return null; } return mObjects.get(position); }
python
def scan_for_field(self, field_key): ''' Scan for a field in the container and its enclosed fields :param field_key: name of field to look for :return: field with name that matches field_key, None if not found ''' if field_key == self.get_name(): return self if field_key in self._fields_dict: return self._fields_dict[field_key] for field in self._fields: if isinstance(field, Container): resolved = field.scan_for_field(field_key) if resolved: return resolved return None
python
def _is_interactive(self): ''' Prevent middlewares and orders to work outside live mode ''' return not ( self.realworld and (dt.date.today() > self.datetime.date()))
java
public Group addGroup(String name, String path, String description, Visibility visibility, Boolean lfsEnabled, Boolean requestAccessEnabled, Integer parentId) throws GitLabApiException { Form formData = new GitLabApiForm() .withParam("name", name) .withParam("path", path) .withParam("description", description) .withParam("visibility", visibility) .withParam("lfs_enabled", lfsEnabled) .withParam("request_access_enabled", requestAccessEnabled) .withParam("parent_id", isApiVersion(ApiVersion.V3) ? null : parentId); Response response = post(Response.Status.CREATED, formData, "groups"); return (response.readEntity(Group.class)); }
java
List<String> argNames(int size) { List<String> argNames = new ArrayList<>(); for (int i = 0 ; i < size ; i++) { argNames.add(StubKind.FACTORY_METHOD_ARG.format(i)); } return argNames; }
python
def group_members(self, group, limit=99999): """ Get group of members :param group: :param limit: OPTIONAL: The limit of the number of users to return, this may be restricted by fixed system limits. Default by built-in method: 99999 :return: """ url = 'rest/api/1.0/admin/groups/more-members' params = {} if limit: params['limit'] = limit if group: params['context'] = group return (self.get(url, params=params) or {}).get('values')
java
public static boolean getBooleanProperty(final String key) { return (getPropertyOrNull(key) == null)? false: Boolean.valueOf(getPropertyOrNull(key)).booleanValue(); }
java
public static <T extends Exception> org.hamcrest.Matcher<T> hasMessageThat( Matcher<String> stringMatcher) { return new ExceptionMessageMatcher<>(stringMatcher); }
java
public MessageProcessor createMessageProcessor(@NonNull MessageToSend message, @Nullable List<Attachment> attachments, @NonNull String conversationId, @NonNull String profileId) { return new MessageProcessor(conversationId, profileId, message, attachments, maxPartSize, log); }
python
def create_read_replica(name, source_name, db_instance_class=None, availability_zone=None, port=None, auto_minor_version_upgrade=None, iops=None, option_group_name=None, publicly_accessible=None, tags=None, db_subnet_group_name=None, storage_type=None, copy_tags_to_snapshot=None, monitoring_interval=None, monitoring_role_arn=None, region=None, key=None, keyid=None, profile=None): ''' Create an RDS read replica CLI example to create an RDS read replica:: salt myminion boto_rds.create_read_replica replicaname source_name ''' if not backup_retention_period: raise SaltInvocationError('backup_retention_period is required') res = __salt__['boto_rds.exists'](source_name, tags, region, key, keyid, profile) if not res.get('exists'): return {'exists': bool(res), 'message': 'RDS instance source {0} does not exists.'.format(source_name)} res = __salt__['boto_rds.exists'](name, tags, region, key, keyid, profile) if res.get('exists'): return {'exists': bool(res), 'message': 'RDS replica instance {0} already exists.'.format(name)} try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) kwargs = {} for key in ('OptionGroupName', 'MonitoringRoleArn'): if locals()[key] is not None: kwargs[key] = str(locals()[key]) # future lint: disable=blacklisted-function for key in ('MonitoringInterval', 'Iops', 'Port'): if locals()[key] is not None: kwargs[key] = int(locals()[key]) for key in ('CopyTagsToSnapshot', 'AutoMinorVersionUpgrade'): if locals()[key] is not None: kwargs[key] = bool(locals()[key]) taglist = _tag_doc(tags) rds_replica = conn.create_db_instance_read_replica(DBInstanceIdentifier=name, SourceDBInstanceIdentifier=source_name, DBInstanceClass=db_instance_class, AvailabilityZone=availability_zone, PubliclyAccessible=publicly_accessible, Tags=taglist, DBSubnetGroupName=db_subnet_group_name, StorageType=storage_type, **kwargs) return {'exists': bool(rds_replica)} except ClientError as e: return {'error': __utils__['boto3.get_error'](e)}
java
public static void main (String[] args) throws IOException { if (args.length < 1) { System.out.println("Usage: java opennlp.maxent.io.OldFormatGISModelReader model_name_prefix (new_model_name)"); System.exit(0); } int nameIndex = 0; String infilePrefix = args[nameIndex]; String outfile; if (args.length > nameIndex) outfile = args[nameIndex+1]; else outfile = infilePrefix + ".bin.gz"; GISModelReader reader = new OldFormatGISModelReader(infilePrefix); new SuffixSensitiveGISModelWriter(reader.getModel(), new File(outfile)).persist(); }
java
@Nonnull public static List<SCMDecisionHandler> listShouldPollVetos(@Nonnull Item item) { List<SCMDecisionHandler> result = new ArrayList<>(); for (SCMDecisionHandler handler : all()) { if (!handler.shouldPoll(item)) { result.add(handler); } } return result; }
python
def _collect_unrecognized_values(self, scheme, data, ancestors): """ Looks for values that aren't defined in the scheme and returns a dict with any unrecognized values found. :param scheme: A :dict:, The scheme defining the validations. :param data: A :dict: user supplied for this specific property. :param ancestors: A :OrderedDict: that provides a history of its ancestors. :rtype: A :dict: of unrecognized configuration properties. """ if not isinstance(ancestors, OrderedDict): raise TypeError("ancestors must be an OrderedDict. type: {0} was passed.".format(type(ancestors))) if not isinstance(scheme, dict): raise TypeError('scheme must be a dict. type: {0} was passed'.format(type(scheme))) unrecognized_values = {} if isinstance(data, dict): pruned_scheme = [key for key in scheme.keys() if key not in RESERVED_SCHEME_KEYS and key[0] not in RESERVED_SCHEME_KEYS] for key, value in six.iteritems(data): if key in pruned_scheme: continue unrecognized_values[key] = value validations = scheme.get('is') if validations and 'one_of' in validations: for nested_scheme in validations['one_of']: if isinstance(nested_scheme, dict): updated_scheme = self._update_scheme(nested_scheme, ancestors) pruned_scheme = [key for key in updated_scheme.keys() if key not in RESERVED_SCHEME_KEYS and key[0] not in RESERVED_SCHEME_KEYS] for key in pruned_scheme: if key in unrecognized_values: del unrecognized_values[key] else: # TODO: maybe return an error? pass return unrecognized_values
python
def data_available(dataset_name=None): """Check if the data set is available on the local machine already.""" dr = data_resources[dataset_name] if 'dirs' in dr: for dirs, files in zip(dr['dirs'], dr['files']): for dir, file in zip(dirs, files): if not os.path.exists(os.path.join(data_path, dataset_name, dir, file)): return False else: for file_list in dr['files']: for file in file_list: if not os.path.exists(os.path.join(data_path, dataset_name, file)): return False return True
java
public void addValueSet(ValueSet theValueSet) { Validate.notBlank(theValueSet.getUrl(), "theValueSet.getUrl() must not return a value"); myValueSets.put(theValueSet.getUrl(), theValueSet); }
java
@SafeVarargs public final DataStream<T> union(DataStream<T>... streams) { List<StreamTransformation<T>> unionedTransforms = new ArrayList<>(); unionedTransforms.add(this.transformation); for (DataStream<T> newStream : streams) { if (!getType().equals(newStream.getType())) { throw new IllegalArgumentException("Cannot union streams of different types: " + getType() + " and " + newStream.getType()); } unionedTransforms.add(newStream.getTransformation()); } return new DataStream<>(this.environment, new UnionTransformation<>(unionedTransforms)); }
java
public static short[] trimToCapacity(short[] array, int maxCapacity) { if (array.length > maxCapacity) { short oldArray[] = array; array = new short[maxCapacity]; System.arraycopy(oldArray, 0, array, 0, maxCapacity); } return array; }
python
def _get_order_clause(archive_table): """Returns an ascending order clause on the versioned unique constraint as well as the version column. """ order_clause = [ sa.asc(getattr(archive_table, col_name)) for col_name in archive_table._version_col_names ] order_clause.append(sa.asc(archive_table.version_id)) return order_clause
java
public Observable<PolicySetDefinitionInner> getAsync(String policySetDefinitionName) { return getWithServiceResponseAsync(policySetDefinitionName).map(new Func1<ServiceResponse<PolicySetDefinitionInner>, PolicySetDefinitionInner>() { @Override public PolicySetDefinitionInner call(ServiceResponse<PolicySetDefinitionInner> response) { return response.body(); } }); }
python
def marker_split(block): """Yield marker, value pairs from a text block (i.e. a list of lines). :param block: text block consisting of \n separated lines as it will be the case for \ files read using "rU" mode. :return: generator of (marker, value) pairs. """ marker = None value = [] for line in block.split('\n'): line = line.strip() if line.startswith('\\_'): continue # we simply ignore SFM header fields match = MARKER_PATTERN.match(line) if match: if marker: yield marker, '\n'.join(value) marker = match.group('marker') value = [line[match.end():]] else: value.append(line) if marker: yield marker, ('\n'.join(value)).strip()
java
public static MutableFst importFst(String basename, Semiring semiring) { Optional<MutableSymbolTable> maybeInputs = importSymbols(basename + INPUT_SYMS); Optional<MutableSymbolTable> maybeOutputs = importSymbols(basename + OUTPUT_SYMS); Optional<MutableSymbolTable> maybeStates = importSymbols(basename + STATES_SYMS); CharSource cs = asCharSource(Resources.getResource(basename + FST_TXT), Charsets.UTF_8); return convertFrom(cs, maybeInputs, maybeOutputs, maybeStates, semiring); }
java
@Override public Collection<String> wordsNearestSum(Collection<String> positive, Collection<String> negative, int top) { INDArray words = Nd4j.create(lookupTable.layerSize()); // Set<String> union = SetUtils.union(new HashSet<>(positive), new HashSet<>(negative)); for (String s : positive) words.addi(lookupTable.vector(s)); for (String s : negative) words.addi(lookupTable.vector(s).mul(-1)); return wordsNearestSum(words, top); }
python
def get_tetrahedra_integration_weight(omegas, tetrahedra_omegas, function='I'): """Returns integration weights Parameters ---------- omegas : float or list of float values Energy(s) at which the integration weight(s) are computed. tetrahedra_omegas : ndarray of list of list Energies at vertices of 24 tetrahedra shape=(24, 4) dytpe='double' function : str, 'I' or 'J' 'J' is for intetration and 'I' is for its derivative. """ if isinstance(omegas, float): return phonoc.tetrahedra_integration_weight( omegas, np.array(tetrahedra_omegas, dtype='double', order='C'), function) else: integration_weights = np.zeros(len(omegas), dtype='double') phonoc.tetrahedra_integration_weight_at_omegas( integration_weights, np.array(omegas, dtype='double'), np.array(tetrahedra_omegas, dtype='double', order='C'), function) return integration_weights
python
def weighted_choice(self, probabilities, key): """Makes a weighted choice between several options. Probabilities is a list of 2-tuples, (probability, option). The probabilties don't need to add up to anything, they are automatically scaled.""" try: choice = self.values[key].lower() except KeyError: # override not set. return super(RecordingParameters, self)\ .weighted_choice(probabilities, key) # Find the matching key (case insensitive) for probability, option in probabilities: if str(option).lower() == choice: return option # for function or class-type choices, also check __name__ for probability, option in probabilities: if option.__name__.lower() == choice: return option assert False, "Invalid value provided"
java
public ClusterInner getByResourceGroup(String resourceGroupName, String clusterName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, clusterName).toBlocking().single().body(); }
java
public void put(Entity entity) { int bytesHere = EntityTranslator.convertToPb(entity).getSerializedSize(); // Do this before the add so that we guarantee that size is never > sizeLimit if (putsBytes + bytesHere >= params.getBytesLimit()) { flushPuts(); } putsBytes += bytesHere; puts.add(entity); if (puts.size() >= params.getCountLimit()) { flushPuts(); } }
java
private String getEs6ModuleNameFromImportNode(NodeTraversal t, Node n) { String importName = n.getLastChild().getString(); boolean isNamespaceImport = importName.startsWith("goog:"); if (isNamespaceImport) { // Allow importing Closure namespace objects (e.g. from goog.provide or goog.module) as // import ... from 'goog:my.ns.Object'. // These are rewritten to plain namespace object accesses. return importName.substring("goog:".length()); } else { ModuleLoader.ModulePath modulePath = t.getInput() .getPath() .resolveJsModule(importName, n.getSourceFileName(), n.getLineno(), n.getCharno()); if (modulePath == null) { // The module loader issues an error // Fall back to assuming the module is a file path modulePath = t.getInput().getPath().resolveModuleAsPath(importName); } return modulePath.toModuleName(); } }
java
public static String[] intersect(String[] arr1, String[] arr2) { Map<String, Boolean> map = new HashMap<String, Boolean>(); LinkedList<String> list = new LinkedList<String>(); for (String str : arr1) { if (!map.containsKey(str)) { map.put(str, Boolean.FALSE); } } for (String str : arr2) { if (map.containsKey(str)) { map.put(str, Boolean.TRUE); } } for (Entry<String, Boolean> e : map.entrySet()) { if (e.getValue().equals(Boolean.TRUE)) { list.add(e.getKey()); } } String[] result = {}; return list.toArray(result); }
java
@Override public Character unmarshal(String object) { if (object.length() == 1) { return Character.valueOf(object.charAt(0)); } else { throw new IllegalArgumentException("Only single character String can be unmarshalled to a Character"); } }
java
@Pure @SuppressWarnings("resource") public static InputStream getResourceAsStream(Class<?> classname, String path) { if (classname == null) { return null; } InputStream is = getResourceAsStream(classname.getClassLoader(), classname.getPackage(), path); if (is == null) { is = getResourceAsStream(classname.getClassLoader(), path); } return is; }
python
def disable(self): """Disables the plotters in this list""" for arr in self: if arr.psy.plotter: arr.psy.plotter.disabled = True
python
def set_linetrace_on_frame(f, localtrace=None): """ Non-portable function to modify linetracing. Remember to enable global tracing with :py:func:`sys.settrace`, otherwise no effect! """ traceptr, _, _ = get_frame_pointers(f) if localtrace is not None: # Need to incref to avoid the frame causing a double-delete ctypes.pythonapi.Py_IncRef(localtrace) # Not sure if this is the best way to do this, but it works. addr = id(localtrace) else: addr = 0 traceptr.contents = ctypes.py_object.from_address(addr)
python
def _initialize_components(self, kwargs): """initialize the various components using the supplied \*\*kwargs Parameters ---------- kwargs: dict \*\*kwargs dict as received by __init__() """ tomodir = None # load/assign grid if 'tomodir' in kwargs: # load grid tomodir = kwargs.get('tomodir') print('importing tomodir {}'.format(tomodir)) assert os.path.isdir(tomodir) grid = CRGrid.crt_grid( tomodir + os.sep + 'grid' + os.sep + 'elem.dat', tomodir + os.sep + 'grid' + os.sep + 'elec.dat', ) self.grid = grid elif 'grid' in kwargs: self.grid = kwargs.get('grid') elif 'elem_file' in kwargs and 'elec_file' in kwargs: grid = CRGrid.crt_grid() grid.load_grid( kwargs['elem_file'], kwargs['elec_file'], ) self.grid = grid else: raise Exception( 'You must provide either a grid instance or ' + 'elem_file/elec_file file paths' ) crmod_cfg = kwargs.get('crmod_cfg', CRcfg.crmod_config()) self.crmod_cfg = crmod_cfg crtomo_cfg = kwargs.get('crtomo_cfg', CRcfg.crtomo_config()) self.crtomo_cfg = crtomo_cfg parman = kwargs.get('parman', pM.ParMan(self.grid)) self.parman = parman nodeman = kwargs.get('nodeman', nM.NodeMan(self.grid)) self.nodeman = nodeman configs_abmn = kwargs.get('configs_abmn', None) config = cConf.ConfigManager( nr_of_electrodes=self.grid.nr_of_electrodes ) if configs_abmn is not None: config.add_to_configs(configs_abmn) self.configs = config config_file = kwargs.get('config_file', None) if config_file is not None: self.configs.load_crmod_config(config_file) voltage_file = kwargs.get('volt_file', None) if voltage_file is not None: cids = self.configs.load_crmod_volt(voltage_file) self.assignments['measurements'] = cids self.plot = PlotManager.plotManager( grid=self.grid, nm=self.nodeman, pm=self.parman, ) # if we load from a tomodir, also load configs and inversion results if tomodir is not None: print('importing tomodir results') # forward configurations config_file = tomodir + os.sep + 'config' + os.sep + 'config.dat' if os.path.isfile(config_file): self.configs.load_crmod_config(config_file) # load inversion results self.read_inversion_results(tomodir)
java
public SipTransaction resendWithAuthorization(ResponseEvent event) { Response response = event.getResponse(); int status = response.getStatusCode(); if ((status == Response.UNAUTHORIZED) || (status == Response.PROXY_AUTHENTICATION_REQUIRED)) { try { // modify the request to include user authorization info and // resend synchronized (dialogLock) { Request msg = getLastSentNotify(); msg = phone.processAuthChallenge(response, msg); if (msg == null) { setErrorMessage("PresenceNotifySender: Error responding to authentication challenge: " + phone.getErrorMessage()); return null; } // bump up the sequence number CSeqHeader hdr = (CSeqHeader) msg.getHeader(CSeqHeader.NAME); long cseq = hdr.getSeqNumber(); hdr.setSeqNumber(cseq + 1); // send the message SipTransaction transaction = phone.sendRequestWithTransaction(msg, false, dialog); if (transaction == null) { setErrorMessage("Error resending NOTIFY with authorization: " + phone.getErrorMessage()); return null; } dialog = transaction.getClientTransaction().getDialog(); setLastSentNotify(msg); LOG.trace("Resent REQUEST: {}", msg); return transaction; } } catch (Exception ex) { setErrorMessage("Exception resending NOTIFY with authorization: " + ex.getClass().getName() + ": " + ex.getMessage()); } } return null; }
python
def dicom2db(file_path, file_type, is_copy, step_id, db_conn, sid_by_patient=False, pid_in_vid=False, visit_in_path=False, rep_in_path=False): """Extract some meta-data from a DICOM file and store in a DB. Arguments: :param file_path: File path. :param file_type: File type (should be 'DICOM'). :param is_copy: Indicate if this file is a copy. :param step_id: Step ID :param db_conn: Database connection. :param sid_by_patient: Rarely, a data set might use study IDs which are unique by patient (not for the whole study). E.g.: LREN data. In such a case, you have to enable this flag. This will use PatientID + StudyID as a session ID. :param pid_in_vid: Rarely, a data set might mix patient IDs and visit IDs. E.g. : LREN data. In such a case, you to enable this flag. This will try to split PatientID into VisitID and PatientID. :param visit_in_path: Enable this flag to get the visit ID from the folder hierarchy instead of DICOM meta-data (e.g. can be useful for PPMI). :param rep_in_path: Enable this flag to get the repetition ID from the folder hierarchy instead of DICOM meta-data (e.g. can be useful for PPMI). :return: A dictionary containing the following IDs : participant_id, visit_id, session_id, sequence_type_id, sequence_id, repetition_id, file_id. """ global conn conn = db_conn tags = dict() logging.info("Extracting DICOM headers from '%s'" % file_path) try: dcm = dicom.read_file(file_path) dataset = db_conn.get_dataset(step_id) tags['participant_id'] = _extract_participant(dcm, dataset, pid_in_vid) if visit_in_path: tags['visit_id'] = _extract_visit_from_path( dcm, file_path, pid_in_vid, sid_by_patient, dataset, tags['participant_id']) else: tags['visit_id'] = _extract_visit(dcm, dataset, tags['participant_id'], sid_by_patient, pid_in_vid) tags['session_id'] = _extract_session(dcm, tags['visit_id']) tags['sequence_type_id'] = _extract_sequence_type(dcm) tags['sequence_id'] = _extract_sequence(tags['session_id'], tags['sequence_type_id']) if rep_in_path: tags['repetition_id'] = _extract_repetition_from_path(dcm, file_path, tags['sequence_id']) else: tags['repetition_id'] = _extract_repetition(dcm, tags['sequence_id']) tags['file_id'] = extract_dicom(file_path, file_type, is_copy, tags['repetition_id'], step_id) except InvalidDicomError: logging.warning("%s is not a DICOM file !" % step_id) except IntegrityError: # TODO: properly deal with concurrency problems logging.warning("A problem occurred with the DB ! A rollback will be performed...") conn.db_session.rollback() return tags
java
public static Map<String, Object> from(Pair... pairs) { Map<String, Object> map = new HashMap<String, Object>(pairs.length); for (Pair p : pairs) { map.put(p.key, p.value); } return map; }
python
def init(name, languages, run): """Initializes your CONFIG_FILE for the current submission""" contents = [file_name for file_name in glob.glob("*.*") if file_name != "brains.yaml"] with open(CONFIG_FILE, "w") as output: output.write(yaml.safe_dump({ "run": run, "name": name, "languages": languages, # automatically insert all root files into contents "contents": contents, }, default_flow_style=False)) print "" cprint("Automatically including the follow files in brain contents:", "cyan") for file_name in contents: print "\t", file_name print "" cprint("done! brains.yaml created", 'green')
python
def _parse_message(self, data): """ Parses the raw message from the device. :param data: message data :type data: string :raises: :py:class:`~alarmdecoder.util.InvalidMessageError` """ try: _, values = data.split(':') self.serial_number, self.value = values.split(',') self.value = int(self.value, 16) is_bit_set = lambda b: self.value & (1 << (b - 1)) > 0 # Bit 1 = unknown self.battery = is_bit_set(2) self.supervision = is_bit_set(3) # Bit 4 = unknown self.loop[2] = is_bit_set(5) self.loop[1] = is_bit_set(6) self.loop[3] = is_bit_set(7) self.loop[0] = is_bit_set(8) except ValueError: raise InvalidMessageError('Received invalid message: {0}'.format(data))
java
@Override public void handleApplicationNotification(final PreloaderNotification n) { if (n instanceof MessageNotification) { this.messageText.setText(((MessageNotification) n).getMessage()); } else if (n instanceof ProgressNotification) { handleProgressNotification((ProgressNotification) n); } }
python
def add_locus(self,inlocus): """ Adds a locus to our loci, but does not go through an update our locus sets yet""" if self.use_direction == True and inlocus.use_direction == False: sys.stderr.write("ERROR if using the direction in Loci, then every locus added needs use_direction to be True\n") sys.exit() self.loci.append(inlocus) return
python
def _get_term_object(filter_name, term_name, pillar_key='acl', pillarenv=None, saltenv=None, merge_pillar=True, **term_fields): ''' Return an instance of the ``_Term`` class given the term options. ''' log.debug('Generating config for term %s under filter %s', term_name, filter_name) term = _Term() term.name = term_name term_opts = {} if merge_pillar: term_opts = get_term_pillar(filter_name, term_name, pillar_key=pillar_key, saltenv=saltenv, pillarenv=pillarenv) log.debug('Merging with pillar data:') log.debug(term_opts) term_opts = _clean_term_opts(term_opts) log.debug('Cleaning up pillar data:') log.debug(term_opts) log.debug('Received processing opts:') log.debug(term_fields) log.debug('Cleaning up processing opts:') term_fields = _clean_term_opts(term_fields) log.debug(term_fields) log.debug('Final term opts:') term_opts.update(term_fields) log.debug(term_fields) for field, value in six.iteritems(term_opts): # setting the field attributes to the term instance of _Term setattr(term, field, value) log.debug('Term config:') log.debug(six.text_type(term)) return term
python
def fetch(url, binary, outfile, noprint, rendered): ''' Fetch a specified URL's content, and output it to the console. ''' with chrome_context.ChromeContext(binary=binary) as cr: resp = cr.blocking_navigate_and_get_source(url) if rendered: resp['content'] = cr.get_rendered_page_source() resp['binary'] = False resp['mimie'] = 'text/html' if not noprint: if resp['binary'] is False: print(resp['content']) else: print("Response is a binary file") print("Cannot print!") if outfile: with open(outfile, "wb") as fp: if resp['binary']: fp.write(resp['content']) else: fp.write(resp['content'].encode("UTF-8"))
java
public TaskLevelPolicyChecker getTaskLevelPolicyChecker(TaskState taskState, int index) throws Exception { return TaskLevelPolicyCheckerBuilderFactory.newPolicyCheckerBuilder(taskState, index).build(); }
java
boolean hasPointFeatures() { for (int geometry = getFirstGeometry(); geometry != -1; geometry = getNextGeometry(geometry)) { if (!Geometry.isMultiPath(getGeometryType(geometry))) return true; } return false; }
python
def sync_remote_to_local(force="no"): """ Replace your remote db with your local Example: sync_remote_to_local:force=yes """ assert "local_wp_dir" in env, "Missing local_wp_dir in env" if force != "yes": message = "This will replace your local database with your "\ "remote, are you sure [y/n]" answer = prompt(message, "y") if answer != "y": logger.info("Sync stopped") return init_tasks() # Bootstrap fabrik remote_file = "sync_%s.sql" % int(time.time()*1000) remote_path = "/tmp/%s" % remote_file with env.cd(paths.get_current_path()): env.run("wp db export %s" % remote_path) local_wp_dir = env.local_wp_dir local_path = "/tmp/%s" % remote_file # Download sync file get(remote_path, local_path) with lcd(local_wp_dir): elocal("wp db import %s" % local_path) # Cleanup env.run("rm %s" % remote_path) elocal("rm %s" % local_path)
java
public static long fastLongMix(long k) { // phi = 2^64 / goldenRatio final long phi = 0x9E3779B97F4A7C15L; long h = k * phi; h ^= h >>> 32; return h ^ (h >>> 16); }
python
def deparse_code2str(code, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS, code_objects={}, compile_mode='exec', is_pypy=IS_PYPY, walker=SourceWalker): """Return the deparsed text for a Python code object. `out` is where any intermediate output for assembly or tree output will be sent. """ return deparse_code(version, code, out, showasm=debug_opts.get('asm', None), showast=debug_opts.get('tree', None), showgrammar=debug_opts.get('grammar', None), code_objects=code_objects, compile_mode=compile_mode, is_pypy=is_pypy, walker=walker).text
python
def merge_path_config(configs, config_dir_override): """ Given a list of PathConfig objects, merges them into a single PathConfig, giving priority in the order of the configs (first has highest priority). """ config_dir = None log_dir = None data_dir = None key_dir = None policy_dir = None for config in reversed(configs): if config.config_dir is not None: config_dir = config.config_dir if config.log_dir is not None: log_dir = config.log_dir if config.data_dir is not None: data_dir = config.data_dir if config.key_dir is not None: key_dir = config.key_dir if config.policy_dir is not None: policy_dir = config.policy_dir if config_dir_override is not None: config_dir = config_dir_override return PathConfig( config_dir=config_dir, log_dir=log_dir, data_dir=data_dir, key_dir=key_dir, policy_dir=policy_dir)
java
@PostMapping( value = "/{entityTypeId}/{id}", params = "_method=GET", produces = APPLICATION_JSON_VALUE) @ResponseBody public Map<String, Object> retrieveEntity( @PathVariable("entityTypeId") String entityTypeId, @PathVariable("id") String untypedId, @Valid @RequestBody EntityTypeRequest request) { Set<String> attributesSet = toAttributeSet(request != null ? request.getAttributes() : null); Map<String, Set<String>> attributeExpandSet = toExpandMap(request != null ? request.getExpand() : null); EntityType meta = dataService.getEntityType(entityTypeId); Object id = getTypedValue(untypedId, meta.getIdAttribute()); Entity entity = dataService.findOneById(entityTypeId, id); if (entity == null) { throw new UnknownEntityException(meta, untypedId); } return getEntityAsMap(entity, meta, attributesSet, attributeExpandSet); }
python
def select_row(self, steps): """Select row in list widget based on a number of steps with direction. Steps can be positive (next rows) or negative (previous rows). """ row = self.current_row() + steps if 0 <= row < self.count(): self.set_current_row(row)
python
def from_python_file( cls, python_file, lambdas_path, json_filename: str, stem: str ): """Builds GrFN object from Python file.""" with open(python_file, "r") as f: pySrc = f.read() return cls.from_python_src(pySrc, lambdas_path, json_filename, stem)
python
def error_map_source(self, kwargs_source, x_grid, y_grid, cov_param): """ variance of the linear source reconstruction in the source plane coordinates, computed by the diagonal elements of the covariance matrix of the source reconstruction as a sum of the errors of the basis set. :param kwargs_source: keyword arguments of source model :param x_grid: x-axis of positions to compute error map :param y_grid: y-axis of positions to compute error map :param cov_param: covariance matrix of liner inversion parameters :return: diagonal covariance errors at the positions (x_grid, y_grid) """ error_map = np.zeros_like(x_grid) basis_functions, n_source = self.SourceModel.functions_split(x_grid, y_grid, kwargs_source) basis_functions = np.array(basis_functions) if cov_param is not None: for i in range(len(error_map)): error_map[i] = basis_functions[:, i].T.dot(cov_param[:n_source, :n_source]).dot(basis_functions[:, i]) return error_map
python
def _generate_struct_class_reflection_attributes(self, ns, data_type): """ Generates two class attributes: * _all_field_names_: Set of all field names including inherited fields. * _all_fields_: List of tuples, where each tuple is (name, validator). If a struct has enumerated subtypes, then two additional attributes are generated: * _field_names_: Set of all field names excluding inherited fields. * _fields: List of tuples, where each tuple is (name, validator), and excludes inherited fields. These are needed because serializing a struct with enumerated subtypes requires knowing the fields defined in each level of the hierarchy. """ class_name = class_name_for_data_type(data_type) if data_type.parent_type: parent_type_class_name = class_name_for_data_type( data_type.parent_type, ns) else: parent_type_class_name = None for field in data_type.fields: field_name = fmt_var(field.name) validator_name = generate_validator_constructor(ns, field.data_type) full_validator_name = '{}._{}_validator'.format(class_name, field_name) self.emit('{} = {}'.format(full_validator_name, validator_name)) if field.redactor: self._generate_redactor(full_validator_name, field.redactor) # Generate `_all_field_names_` and `_all_fields_` for every omitted caller (and public). # As an edge case, we union omitted callers with None in the case when the object has no # public fields, as we still need to generate public attributes (`_field_names_` etc) child_omitted_callers = data_type.get_all_omitted_callers() | {None} parent_omitted_callers = data_type.parent_type.get_all_omitted_callers() if \ data_type.parent_type else set([]) for omitted_caller in sorted(child_omitted_callers | parent_omitted_callers, key=str): is_public = omitted_caller is None map_name_prefix = '' if is_public else '_{}'.format(omitted_caller) caller_in_parent = data_type.parent_type and (is_public or omitted_caller in parent_omitted_callers) # generate `_all_field_names_` names_map_name = '{}_field_names_'.format(map_name_prefix) all_names_map_name = '_all{}_field_names_'.format(map_name_prefix) if data_type.is_member_of_enumerated_subtypes_tree(): if is_public or omitted_caller in child_omitted_callers: self.generate_multiline_list( [ "'%s'" % field.name for field in data_type.fields if field.omitted_caller == omitted_caller ], before='{}.{} = set('.format(class_name, names_map_name), after=')', delim=('[', ']'), compact=False) if caller_in_parent: self.emit('{0}.{3} = {1}.{3}.union({0}.{2})' .format(class_name, parent_type_class_name, names_map_name, all_names_map_name)) else: self.emit('{0}.{2} = {0}.{1}'.format(class_name, names_map_name, all_names_map_name)) else: if caller_in_parent: before = '{0}.{1} = {2}.{1}.union(set('.format(class_name, all_names_map_name, parent_type_class_name) after = '))' else: before = '{}.{} = set('.format(class_name, all_names_map_name) after = ')' items = [ "'%s'" % field.name for field in data_type.fields if field.omitted_caller == omitted_caller ] self.generate_multiline_list( items, before=before, after=after, delim=('[', ']'), compact=False) # generate `_all_fields_` fields_map_name = '{}_fields_'.format(map_name_prefix) all_fields_map_name = '_all{}_fields_'.format(map_name_prefix) if data_type.is_member_of_enumerated_subtypes_tree(): items = [] for field in data_type.fields: if field.omitted_caller != omitted_caller: continue var_name = fmt_var(field.name) validator_name = '{}._{}_validator'.format(class_name, var_name) items.append("('{}', {})".format(var_name, validator_name)) self.generate_multiline_list( items, before='{}.{} = '.format(class_name, fields_map_name), delim=('[', ']'), compact=False, ) if caller_in_parent: self.emit('{0}.{3} = {1}.{3} + {0}.{2}'.format( class_name, parent_type_class_name, fields_map_name, all_fields_map_name)) else: self.emit('{0}.{2} = {0}.{1}'.format( class_name, fields_map_name, all_fields_map_name)) else: if caller_in_parent: before = '{0}.{2} = {1}.{2} + '.format( class_name, parent_type_class_name, all_fields_map_name) else: before = '{}.{} = '.format(class_name, all_fields_map_name) items = [] for field in data_type.fields: if field.omitted_caller != omitted_caller: continue var_name = fmt_var(field.name) validator_name = '{}._{}_validator'.format( class_name, var_name) items.append("('{}', {})".format(var_name, validator_name)) self.generate_multiline_list( items, before=before, delim=('[', ']'), compact=False) self.emit()
java
public static Class getTypeClass(Type type) { Class typeClassClass = null; if (type instanceof Class) { typeClassClass = (Class) type; } else if (type instanceof ParameterizedType) { typeClassClass = (Class) ((ParameterizedType) type).getRawType(); } else if (type instanceof GenericArrayType) { Class<?> arrrayParameter = getTypeClass(((GenericArrayType) type).getGenericComponentType()); if (arrrayParameter != null) { typeClassClass = Array.newInstance(arrrayParameter, 0).getClass(); } } return typeClassClass; }
java
public OAuthAccessToken getAccessToken(InputStream inputStream) throws JinxException { JinxUtils.validateParams(inputStream); Properties legacyTokenProperties = loadLegacyTokenProperties(inputStream); Map<String, String> params = new TreeMap<>(); params.put("method", "flickr.auth.oauth.getAccessToken"); params.put("api_key", jinx.getApiKey()); params.put("auth_token", legacyTokenProperties.getProperty("token")); params.put("format", "json"); params.put("nojsoncallback", "1"); params.put("api_sig", sign(params, jinx.getApiSecret())); StringBuilder sb = new StringBuilder(JinxConstants.REST_ENDPOINT).append('?'); for (String key : params.keySet()) { sb.append(key).append('=').append(params.get(key)).append('&'); } sb.deleteCharAt(sb.lastIndexOf("&")); BufferedReader in = null; StringBuilder json = new StringBuilder(); try { HttpURLConnection request = (HttpURLConnection) new URL(sb.toString()).openConnection(); request.connect(); in = new BufferedReader(new InputStreamReader(request.getInputStream())); String line; while ((line = in.readLine()) != null) { json.append(line); } } catch (Exception e) { throw new JinxException("Error when converting legacy token to OAuth token.", e); } finally { JinxUtils.close(in); } OAuthExchangedToken exchangedToken = new Gson().fromJson(json.toString(), OAuthExchangedToken.class); if (!exchangedToken.getStat().equals("ok")) { throw new JinxException("Flickr reported an error.", null, exchangedToken); } OAuthAccessToken oAuthAccessToken = new OAuthAccessToken(); oAuthAccessToken.setOauthToken(exchangedToken.getOAuthToken()); oAuthAccessToken.setOauthTokenSecret(exchangedToken.getOAuthTokenSecret()); oAuthAccessToken.setUsername(legacyTokenProperties.getProperty("username")); oAuthAccessToken.setFullname(legacyTokenProperties.getProperty("fullname")); oAuthAccessToken.setNsid(legacyTokenProperties.getProperty("nsid")); return oAuthAccessToken; }
python
def unassigned(data, as_json=False): """ https://sendgrid.com/docs/API_Reference/api_v3.html#ip-addresses The /ips rest endpoint returns information about the IP addresses and the usernames assigned to an IP unassigned returns a listing of the IP addresses that are allocated but have 0 users assigned data (response.body from sg.client.ips.get()) as_json False -> get list of dicts True -> get json object example: sg = sendgrid.SendGridAPIClient(os.environ.get('SENDGRID_API_KEY')) params = { 'subuser': 'test_string', 'ip': 'test_string', 'limit': 1, 'exclude_whitelabels': 'true', 'offset': 1 } response = sg.client.ips.get(query_params=params) if response.status_code == 201: data = response.body unused = unassigned(data) """ no_subusers = set() if not isinstance(data, list): return format_ret(no_subusers, as_json=as_json) for current in data: num_subusers = len(current["subusers"]) if num_subusers == 0: current_ip = current["ip"] no_subusers.add(current_ip) ret_val = format_ret(no_subusers, as_json=as_json) return ret_val
python
def pipe(self, other_task): """ Add a pipe listener to the execution of this task. The output of this task is required to be an iterable. Each item in the iterable will be queued as the sole argument to an execution of the listener task. Can also be written as:: pipeline = task1 | task2 """ other_task._source = self self._listeners.append(PipeListener(other_task)) return other_task
python
def handle(self): """Parse the RPC, make the call, and pickle up the return value.""" data = cPickle.load(self.rfile) # pylint: disable=E1101 method = data.pop('method') try: retval = getattr(self, 'do_{0}'.format(method))(**data) except Exception as e: # All exceptions should be passed to the client retval = e cPickle.dump(retval, self.wfile, # pylint: disable=E1101 cPickle.HIGHEST_PROTOCOL)
python
def mtz(n,c): """mtz: Miller-Tucker-Zemlin's model for the (asymmetric) traveling salesman problem (potential formulation) Parameters: - n: number of nodes - c[i,j]: cost for traversing arc (i,j) Returns a model, ready to be solved. """ model = Model("atsp - mtz") x,u = {},{} for i in range(1,n+1): u[i] = model.addVar(lb=0, ub=n-1, vtype="C", name="u(%s)"%i) for j in range(1,n+1): if i != j: x[i,j] = model.addVar(vtype="B", name="x(%s,%s)"%(i,j)) for i in range(1,n+1): model.addCons(quicksum(x[i,j] for j in range(1,n+1) if j != i) == 1, "Out(%s)"%i) model.addCons(quicksum(x[j,i] for j in range(1,n+1) if j != i) == 1, "In(%s)"%i) for i in range(1,n+1): for j in range(2,n+1): if i != j: model.addCons(u[i] - u[j] + (n-1)*x[i,j] <= n-2, "MTZ(%s,%s)"%(i,j)) model.setObjective(quicksum(c[i,j]*x[i,j] for (i,j) in x), "minimize") model.data = x,u return model
python
def discriminator(self, imgs, y): """ return a (b, 1) logits""" yv = y y = tf.reshape(y, [-1, 1, 1, 10]) with argscope(Conv2D, kernel_size=5, strides=1): l = (LinearWrap(imgs) .ConcatWith(tf.tile(y, [1, 28, 28, 1]), 3) .Conv2D('conv0', 11) .tf.nn.leaky_relu() .ConcatWith(tf.tile(y, [1, 14, 14, 1]), 3) .Conv2D('conv1', 74) .BatchNorm('bn1') .tf.nn.leaky_relu() .apply(batch_flatten) .ConcatWith(yv, 1) .FullyConnected('fc1', 1024, activation=tf.identity) .BatchNorm('bn2') .tf.nn.leaky_relu() .ConcatWith(yv, 1) .FullyConnected('fct', 1, activation=tf.identity)()) return l
python
def _fulfills_version_string(installed_versions, version_conditions_string, ignore_epoch=False, allow_updates=False): ''' Returns True if any of the installed versions match the specified version conditions, otherwise returns False. installed_versions The installed versions version_conditions_string The string containing all version conditions. E.G. 1.2.3-4 >=1.2.3-4 >=1.2.3-4, <2.3.4-5 >=1.2.3-4, <2.3.4-5, !=1.2.4-1 ignore_epoch : False When a package version contains an non-zero epoch (e.g. ``1:3.14.159-2.el7``, and a specific version of a package is desired, set this option to ``True`` to ignore the epoch when comparing versions. allow_updates : False Allow the package to be updated outside Salt's control (e.g. auto updates on Windows). This means a package on the Minion can have a newer version than the latest available in the repository without enforcing a re-installation of the package. (Only applicable if only one strict version condition is specified E.G. version: 2.0.6~ubuntu3) ''' version_conditions = _parse_version_string(version_conditions_string) for installed_version in installed_versions: fullfills_all = True for operator, version_string in version_conditions: if allow_updates and len(version_conditions) == 1 and operator == '==': operator = '>=' fullfills_all = fullfills_all and _fulfills_version_spec([installed_version], operator, version_string, ignore_epoch=ignore_epoch) if fullfills_all: return True return False
java
private static BitMatrix encodeLowLevel(DefaultPlacement placement, SymbolInfo symbolInfo, int width, int height) { int symbolWidth = symbolInfo.getSymbolDataWidth(); int symbolHeight = symbolInfo.getSymbolDataHeight(); ByteMatrix matrix = new ByteMatrix(symbolInfo.getSymbolWidth(), symbolInfo.getSymbolHeight()); int matrixY = 0; for (int y = 0; y < symbolHeight; y++) { // Fill the top edge with alternate 0 / 1 int matrixX; if ((y % symbolInfo.matrixHeight) == 0) { matrixX = 0; for (int x = 0; x < symbolInfo.getSymbolWidth(); x++) { matrix.set(matrixX, matrixY, (x % 2) == 0); matrixX++; } matrixY++; } matrixX = 0; for (int x = 0; x < symbolWidth; x++) { // Fill the right edge with full 1 if ((x % symbolInfo.matrixWidth) == 0) { matrix.set(matrixX, matrixY, true); matrixX++; } matrix.set(matrixX, matrixY, placement.getBit(x, y)); matrixX++; // Fill the right edge with alternate 0 / 1 if ((x % symbolInfo.matrixWidth) == symbolInfo.matrixWidth - 1) { matrix.set(matrixX, matrixY, (y % 2) == 0); matrixX++; } } matrixY++; // Fill the bottom edge with full 1 if ((y % symbolInfo.matrixHeight) == symbolInfo.matrixHeight - 1) { matrixX = 0; for (int x = 0; x < symbolInfo.getSymbolWidth(); x++) { matrix.set(matrixX, matrixY, true); matrixX++; } matrixY++; } } return convertByteMatrixToBitMatrix(matrix, width, height); }