language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
protected boolean keysEqual(@NotNull Object queriedKey, @Nullable K keyInMap) { return queriedKey.equals(keyInMap); }
java
public static String readFromInputStreamIntoString(InputStream inputStream) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8")); StringBuilder text = new StringBuilder(); String line; while( (line = reader.readLine()) != null) { text.append(line).append("\n"); } inputStream.close(); return text.toString(); }
java
@Override @SuppressWarnings("squid:S3346") public ReconfigurationPlan buildReconfigurationPlan(Solution s, Model src) throws SchedulerException { ReconfigurationPlan plan = new DefaultReconfigurationPlan(src); for (NodeTransition action : nodeActions) { action.insertActions(s, plan); } for (VMTransition action : vmActions) { action.insertActions(s, plan); } assert plan.isApplyable() : "The following plan cannot be applied:\n" + plan; assert checkConsistency(s, plan); return plan; }
java
public Closeable onConnectionStateChange(final Consumer<BitfinexConnectionStateEnum> listener) { connectionStateConsumers.offer(listener); return () -> connectionStateConsumers.remove(listener); }
java
static Class<?> getElementTypeFromEnumSet(Object enumSet) { if (__elementTypeFromEnumSet == null) { throw new RuntimeException( "Could not access (reflection) the private " + "field *elementType* (enumClass) from: class java.util.EnumSet"); } try { return (Class<?>) __elementTypeFromEnumSet.get(enumSet); } catch (Exception e) { throw new RuntimeException(e); } }
java
public Number getNumber(Object node, String expression) { return (Number) evalXPath(expression, node, XPathConstants.NUMBER); }
python
def _cls_lookup_dist(cls): """ Attempt to resolve the distribution from the provided class in the most naive way - this assumes the Python module path to the class contains the name of the package that provided the module and class. """ frags = cls.__module__.split('.') for name in ('.'.join(frags[:x]) for x in range(len(frags), 0, -1)): dist = find_pkg_dist(name) if dist: return dist
java
public static String dateToString(final Date date) { String result; if (date == null) { result = ""; } else { result = Z_DATE_FORMAT.format(date); } return result; }
java
public void goBack() { String action = "Going back one page"; String expected = "Previous page from browser history is loaded"; try { driver.navigate().back(); } catch (Exception e) { reporter.fail(action, expected, "Browser was unable to go back one page. " + e.getMessage()); log.warn(e); return; } reporter.pass(action, expected, expected); }
python
def _call(self, x, out=None): """Project ``x`` onto the subspace.""" if out is None: out = x[self.index].copy() else: out.assign(x[self.index]) return out
java
public final Transaction getTransaction() throws ObjectManagerException { if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled()) trace.entry(this, cclass , "getTransaction" ); // If the log is full introduce a delay for a checkpoiunt before allowing the // application to proceed. objectManagerState.transactionPacing(); Transaction transaction = objectManagerState.getTransaction(); if (Tracing.isAnyTracingEnabled() && trace.isEntryEnabled()) trace.exit(this, cclass , "getTransaction" , "returns transaction=" + transaction + "(Transaction)" ); return transaction; }
python
def log_level(level_string): """ Return a log level for a string >>> log_level('DEBUG') == logging.DEBUG True >>> log_level('30') == logging.WARNING True """ if level_string.isdigit(): return int(level_string) return getattr(logging, level_string.upper())
java
public static void dynamicCodeGenerate(OutputStream os, Class cls, Charset charset) throws IOException { dynamicCodeGenerate(os, cls, charset, getCodeGenerator(cls)); }
python
def getTotalDiscountedPrice(self): """Compute total discounted price """ price = self.getDiscountedPrice() vat = self.getVAT() price = price and price or 0 vat = vat and vat or 0 return float(price) + (float(price) * float(vat)) / 100
python
def get_subfolder_queries(store, label_store, folders, fid, sid): '''Returns [unicode]. This returns a list of queries that can be passed on to "other" search engines. The list of queries is derived from the subfolder identified by ``fid/sid``. ''' queries = [] for cid, subid, url, stype, data in subtopics(store, folders, fid, sid): if stype in ('text', 'manual'): queries.append(data) return queries
python
def worker_start(obj, queues, name, celery_args): """ Start a worker process. \b CELERY_ARGS: Additional Celery worker command line arguments. """ try: start_worker(queues=queues.split(','), config=obj['config'], name=name, celery_args=celery_args) except DataStoreNotConnected: click.echo(_style(obj['show_color'], 'Cannot connect to the Data Store server. Is the server running?', fg='red', bold=True))
python
def _create_layers(self, n_classes): """Create the layers of the model from self.layers. :param n_classes: number of classes :return: self """ next_layer_feed = tf.reshape(self.input_data, [-1, self.original_shape[0], self.original_shape[1], self.original_shape[2]]) prev_output_dim = self.original_shape[2] # this flags indicates whether we are building the first dense layer first_full = True self.W_vars = [] self.B_vars = [] for i, l in enumerate(self.layers.split(',')): node = l.split('-') node_type = node[0] if node_type == 'conv2d': # ################### # # Convolutional Layer # # ################### # # fx, fy = shape of the convolutional filter # feature_maps = number of output dimensions fx, fy, feature_maps, stride = int(node[1]),\ int(node[2]), int(node[3]), int(node[4]) print('Building Convolutional layer with %d input channels\ and %d %dx%d filters with stride %d' % (prev_output_dim, feature_maps, fx, fy, stride)) # Create weights and biases W_conv = self.weight_variable( [fx, fy, prev_output_dim, feature_maps]) b_conv = self.bias_variable([feature_maps]) self.W_vars.append(W_conv) self.B_vars.append(b_conv) # Convolution and Activation function h_conv = tf.nn.relu( self.conv2d(next_layer_feed, W_conv, stride) + b_conv) # keep track of the number of output dims of the previous layer prev_output_dim = feature_maps # output node of the last layer next_layer_feed = h_conv elif node_type == 'maxpool': # ################# # # Max Pooling Layer # # ################# # ksize = int(node[1]) print('Building Max Pooling layer with size %d' % ksize) next_layer_feed = self.max_pool(next_layer_feed, ksize) elif node_type == 'full': # ####################### # # Densely Connected Layer # # ####################### # if first_full: # first fully connected layer dim = int(node[1]) shp = next_layer_feed.get_shape() tmpx = shp[1].value tmpy = shp[2].value fanin = tmpx * tmpy * prev_output_dim print('Building fully connected layer with %d in units\ and %d out units' % (fanin, dim)) W_fc = self.weight_variable([fanin, dim]) b_fc = self.bias_variable([dim]) self.W_vars.append(W_fc) self.B_vars.append(b_fc) h_pool_flat = tf.reshape(next_layer_feed, [-1, fanin]) h_fc = tf.nn.relu(tf.add( tf.matmul(h_pool_flat, W_fc), b_fc)) h_fc_drop = tf.nn.dropout(h_fc, self.keep_prob) prev_output_dim = dim next_layer_feed = h_fc_drop first_full = False else: # not first fully connected layer dim = int(node[1]) W_fc = self.weight_variable([prev_output_dim, dim]) b_fc = self.bias_variable([dim]) self.W_vars.append(W_fc) self.B_vars.append(b_fc) h_fc = tf.nn.relu(tf.add( tf.matmul(next_layer_feed, W_fc), b_fc)) h_fc_drop = tf.nn.dropout(h_fc, self.keep_prob) prev_output_dim = dim next_layer_feed = h_fc_drop elif node_type == 'softmax': # ############# # # Softmax Layer # # ############# # print('Building softmax layer with %d in units and\ %d out units' % (prev_output_dim, n_classes)) W_sm = self.weight_variable([prev_output_dim, n_classes]) b_sm = self.bias_variable([n_classes]) self.W_vars.append(W_sm) self.B_vars.append(b_sm) self.mod_y = tf.add(tf.matmul(next_layer_feed, W_sm), b_sm)
java
public static boolean isFunctionDeclaration(Node n) { // Note: There is currently one case where an unnamed function has a declaration parent. // `export default function() {...}` // In this case we consider the function to be an expression. return n.isFunction() && isDeclarationParent(n.getParent()) && isNamedFunction(n); }
python
def use(self, middleware=None, path='/', method_mask=HTTPMethod.ALL): """ Use the middleware (a callable with parameters res, req, next) upon requests match the provided path. A None path matches every request. Returns the middleware so this method may be used as a decorator. Args: middleware (callable): A function with signature '(req, res)' to be called with every request which matches path. path (str or regex): Object used to test the requests path. If it matches, either by equality or a successful regex match, the middleware is called with the req/res pair. method_mask (Optional[HTTPMethod]): Filters requests by HTTP method. The HTTPMethod enum behaves as a bitmask, so multiple methods may be joined by `+` or `\|`, removed with `-`, or toggled with `^` (e.g. `HTTPMethod.GET + HTTPMethod.POST`, `HTTPMethod.ALL - HTTPMethod.DELETE`). Returns: Returns the provided middleware; a requirement for this method to be used as a decorator. """ # catch decorator pattern if middleware is None: return lambda mw: self.use(mw, path, method_mask) if hasattr(middleware, '__growler_router'): router = getattr(middleware, '__growler_router') if isinstance(router, (types.MethodType,)): router = router() self.add_router(path, router) elif isinstance(type(middleware), RouterMeta): router = middleware._RouterMeta__growler_router() self.add_router(path, router) elif hasattr(middleware, '__iter__'): for mw in middleware: self.use(mw, path, method_mask) else: log.info("{} Using {} on path {}", id(self), middleware, path) self.middleware.add(path=path, func=middleware, method_mask=method_mask) return middleware
python
def array_from_pair_dictionary( pair_dict, array_fn, dtype="float32", square_result=False): """ Convert a dictionary whose keys are pairs (k1, k2) into a sparse or incomplete array. Parameters ---------- pair_dict : dict Dictionary from pairs of keys to values. array_fn : function Takes shape and dtype as arguments, returns empty array. dtype : dtype NumPy dtype of result array square_result : bool Combine keys from rows and columns Returns array and sorted lists of the row and column keys. """ row_key_set, column_key_set = pair_dict_key_sets(pair_dict) if square_result: combined_key_set = row_key_set.union(column_key_set) row_key_list = column_key_list = list(sorted(combined_key_set)) row_key_indices = column_key_indices = { k: i for (i, k) in enumerate(row_key_list) } else: row_key_list = list(sorted(row_key_set)) column_key_list = list(sorted(column_key_set)) row_key_indices = {k: i for (i, k) in enumerate(row_key_list)} column_key_indices = {k: i for (i, k) in enumerate(column_key_list)} n_rows = len(row_key_indices) n_cols = len(column_key_indices) shape = (n_rows, n_cols) result = array_fn(shape, dtype) for (row_key, column_key), value in pair_dict.items(): i = row_key_indices[row_key] j = column_key_indices[column_key] result[i, j] = value return result, row_key_list, column_key_list
java
public static <T extends TOP> Collection<T> selectFromAllViews(JCas jCas, Class<T> type) { Collection<T> result = new ArrayList<T>(); try { Iterator<JCas> viewIterator = jCas.getViewIterator(); while (viewIterator.hasNext()) { JCas next = viewIterator.next(); result.addAll(JCasUtil.select(next, type)); } return result; } catch (CASException ex) { throw new RuntimeException(ex); } }
python
def _init_map(self): """stub""" MultiChoiceAnswerFormRecord._init_map(self) FilesAnswerFormRecord._init_map(self) FeedbackAnswerFormRecord._init_map(self) super(MultiChoiceFeedbackAndFilesAnswerFormRecord, self)._init_map()
java
public static String getSessionDirection(IoSessionEx session) { IoServiceEx service = session.getService(); String connectionDirection; if (service instanceof IoAcceptorEx || service instanceof AbstractBridgeAcceptor) { connectionDirection = ACCEPT_DIRECTION; } else if (service instanceof IoConnectorEx || service instanceof AbstractBridgeConnector) { connectionDirection = CONNECT_DIRECTION; } else { connectionDirection = UNKNOWN_DIRECTION; } return connectionDirection; }
java
@Override public void render() { if (false == HuluSetting.isDevMode) { Response.sendError(this.errorCode, this.errorContent); return; } if (null != e) { StaticLog.error(e); if (null == this.errorContent) { this.errorContent = StrUtil.EMPTY; } String stacktraceContent = ExceptionUtil.stacktraceToString(e) .replace("\tat", "&nbsp;&nbsp;&nbsp;&nbsp;\tat") .replace("\n", "<br/>\n"); this.errorContent = StrUtil.format(TEMPLATE_ERROR, this.getErrorCode(), Request.getServletRequest().getRequestURI(), this.errorContent, stacktraceContent); } Response.setStatus(errorCode); Response.write(errorContent, Response.CONTENT_TYPE_HTML); }
python
def get_instance(self): """Get the Streams instance that owns this view. Returns: Instance: Streams instance owning this view. """ return Instance(self.rest_client.make_request(self.instance), self.rest_client)
python
def repository_contributors(self, **kwargs): """Return a list of contributors for the project. Args: all (bool): If True, return all the items, without pagination per_page (int): Number of items to retrieve per request page (int): ID of the page to return (starts with page 1) as_list (bool): If set to False and no pagination option is defined, return a generator instead of a list **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabGetError: If the server failed to perform the request Returns: list: The contributors """ path = '/projects/%s/repository/contributors' % self.get_id() return self.manager.gitlab.http_list(path, **kwargs)
python
def sort(m, reverse=False): """Return sorted m (default: ascending order)""" ty = type(m) if ty == matrix: m = list(m) m = sorted(m, reverse=reverse) if ty == matrix: m = matrix(m) return m
java
public void getValue(float[] valueDestination) { valueDestination = new float[size() * 2]; for (int i = 0; i < size(); i++) { SFVec2f sfVec2f = value.get(i); valueDestination[i*3] = sfVec2f.x; valueDestination[i*3 + 1] = sfVec2f.y; } }
java
public <P extends Stanza> P nextResultOrThrow() throws NoResponseException, XMPPErrorException, InterruptedException, NotConnectedException { return nextResultOrThrow(connection.getReplyTimeout()); }
java
public static void removeLegacySubscriptionOnParent(ExecutionEntity execution, EventSubscriptionEntity eventSubscription) { ActivityImpl activity = execution.getActivity(); if (activity == null) { return; } ActivityBehavior behavior = activity.getActivityBehavior(); ActivityBehavior parentBehavior = (ActivityBehavior) (activity.getFlowScope() != null ? activity.getFlowScope().getActivityBehavior() : null); if (behavior instanceof ReceiveTaskActivityBehavior && parentBehavior instanceof MultiInstanceActivityBehavior) { List<EventSubscriptionEntity> parentSubscriptions = execution.getParent().getEventSubscriptions(); for (EventSubscriptionEntity subscription : parentSubscriptions) { // distinguish a boundary event on the mi body with the same message name from the receive task subscription if (areEqualEventSubscriptions(subscription, eventSubscription)) { subscription.delete(); } } } }
java
public Matrix3d rotation(AxisAngle4d axisAngle) { return rotation(axisAngle.angle, axisAngle.x, axisAngle.y, axisAngle.z); }
python
def packet(self): """Returns a string containing the packet's bytes No further parts should be added to the packet once this is done.""" if not self.finished: self.finished = 1 for question in self.questions: self.write_question(question) for answer, time in self.answers: self.write_record(answer, time) for authority in self.authorities: self.write_record(authority, 0) for additional in self.additionals: self.write_record(additional, 0) self.insert_short(0, len(self.additionals)) self.insert_short(0, len(self.authorities)) self.insert_short(0, len(self.answers)) self.insert_short(0, len(self.questions)) self.insert_short(0, self.flags) if self.multicast: self.insert_short(0, 0) else: self.insert_short(0, self.id) return b''.join(self.data)
python
def check(text): """Suggest the preferred forms.""" err = "hedging.misc" msg = "Hedging. Just say it." narcissism = [ "I would argue that", ", so to speak", "to a certain degree", ] return existence_check(text, narcissism, err, msg)
java
protected void checkText(PageElement pageElement, String textOrKey) throws TechnicalException, FailureException { WebElement webElement = null; String value = getTextOrKey(textOrKey); try { webElement = Context.waitUntil(ExpectedConditions.presenceOfElementLocated(Utilities.getLocator(pageElement))); } catch (final Exception e) { new Result.Failure<>(e.getMessage(), Messages.getMessage(Messages.FAIL_MESSAGE_UNABLE_TO_FIND_ELEMENT), true, pageElement.getPage().getCallBack()); } final String innerText = webElement == null ? null : webElement.getText(); logger.info("checkText() expected [{}] and found [{}].", textOrKey.startsWith(cryptoService.getPrefix()) ? SECURE_MASK : value, innerText); if (!value.equals(innerText)) { new Result.Failure<>(innerText, Messages.format(Messages.getMessage(Messages.FAIL_MESSAGE_WRONG_EXPECTED_VALUE), pageElement, textOrKey.startsWith(cryptoService.getPrefix()) ? SECURE_MASK : value, pageElement.getPage().getApplication()), true, pageElement.getPage().getCallBack()); } }
python
def raises_gathered(error_type): '''For use in tests. Many tests expect a single error to be thrown, and want it to be of a specific type. This is a helper method for when that type is inside a gathered exception.''' container = RaisesGatheredContainer() try: yield container except GatheredExceptions as e: # Make sure there is exactly one exception. if len(e.exceptions) != 1: raise inner = e.exceptions[0] # Make sure the exception is the right type. if not isinstance(inner, error_type): raise # Success. container.exception = inner
java
public static boolean isAnnotationMirrorOfType(AnnotationMirror annotationMirror, String fqcn) { assert annotationMirror != null; assert fqcn != null; String annotationClassName = annotationMirror.getAnnotationType().toString(); return annotationClassName.equals( fqcn ); }
java
public static boolean intersectsLineSegment(Coordinate a, Coordinate b, Coordinate c, Coordinate d) { // check single-point segment: these never intersect if ((a.getX() == b.getX() && a.getY() == b.getY()) || (c.getX() == d.getX() && c.getY() == d.getY())) { return false; } double c1 = cross(a, c, a, b); double c2 = cross(a, b, c, d); if (c1 == 0 && c2 == 0) { // colinear, only intersecting if overlapping (touch is ok) double xmin = Math.min(a.getX(), b.getX()); double ymin = Math.min(a.getY(), b.getY()); double xmax = Math.max(a.getX(), b.getX()); double ymax = Math.max(a.getY(), b.getY()); // check first point of last segment in bounding box of first segment if (c.getX() > xmin && c.getX() < xmax && c.getY() > ymin && c.getY() < ymax) { return true; // check last point of last segment in bounding box of first segment } else if (d.getX() > xmin && d.getX() < xmax && d.getY() > ymin && d.getY() < ymax) { return true; // check same segment } else { return c.getX() >= xmin && c.getX() <= xmax && c.getY() >= ymin && c.getY() <= ymax & d.getX() >= xmin && d.getX() <= xmax && d.getY() >= ymin && d.getY() <= ymax; } } if (c2 == 0) { // segments are parallel but not colinear return false; } // not parallel, classical test double u = c1 / c2; double t = cross(a, c, c, d) / c2; return (t > 0) && (t < 1) && (u > 0) && (u < 1); }
java
@Override public void concat(Path trg, Path[] psrcs) throws IOException { logger.atFine().log("GHFS.concat: %s, %s", trg, lazy(() -> Arrays.toString(psrcs))); checkArgument(psrcs.length > 0, "psrcs must have at least one source"); URI trgPath = getGcsPath(trg); List<URI> srcPaths = Arrays.stream(psrcs).map(this::getGcsPath).collect(toImmutableList()); checkArgument(!srcPaths.contains(trgPath), "target must not be contained in sources"); List<List<URI>> partitions = Lists.partition(srcPaths, GoogleCloudStorage.MAX_COMPOSE_OBJECTS - 1); logger.atFine().log("GHFS.concat: %s, %d partitions", trg, partitions.size()); for (List<URI> partition : partitions) { // We need to include the target in the list of sources to compose since // the GCS FS compose operation will overwrite the target, whereas the Hadoop // concat operation appends to the target. List<URI> sources = Lists.newArrayList(trgPath); sources.addAll(partition); logger.atFine().log("GHFS.concat compose: %s, %s", trgPath, sources); getGcsFs().compose(sources, trgPath, CreateFileOptions.DEFAULT_CONTENT_TYPE); } logger.atFine().log("GHFS.concat:=> "); }
java
public SimulationBuilder phase(String phase) { phases.remove(phase); phases.add(phase); return this; }
java
public static boolean isValidDoubleBondConfiguration(IAtomContainer container, IBond bond) { //org.openscience.cdk.interfaces.IAtom[] atoms = bond.getAtoms(); List<IAtom> connectedAtoms = container.getConnectedAtomsList(bond.getBegin()); IAtom from = null; for (IAtom connectedAtom : connectedAtoms) { if (!connectedAtom.equals(bond.getEnd())) { from = connectedAtom; } } boolean[] array = new boolean[container.getBondCount()]; for (int i = 0; i < array.length; i++) { array[i] = true; } if (isStartOfDoubleBond(container, bond.getBegin(), from, array) && isEndOfDoubleBond(container, bond.getEnd(), bond.getBegin(), array) && !bond.getFlag(CDKConstants.ISAROMATIC)) { return (true); } else { return (false); } }
java
public static <T> Set<T> asSet(T... items) { Set<T> set = new HashSet<T>(); for (T item : items) { set.add(item); } return set; }
python
def open(self, mode='rb'): """ Open the file for reading. :rtype: django.core.files.storage.File """ file = self.storage.open(self.relative_name, mode=mode) # type: File return file
python
def _load_config(self): """load configuration from config/""" logger.debug( 'Listing per-account config subdirectories in %s', self._conf_dir ) for acct_id in os.listdir(self._conf_dir): path = os.path.join(self._conf_dir, acct_id) # skip if not a directory if not os.path.isdir(path): continue # skip if doesn't match ^[0-9]+$ if not self.acct_id_re.match(acct_id): continue # call _load_account specifying the directory name (acct ID) & path self._load_account(acct_id, path) # Once all configuration is loaded, build a dict of Account Name to # Account ID (``self._acct_name_to_id``) for faster access to configs # by name. for acct_id, data in self._config.items(): if data['name'] is None: continue self._acct_name_to_id[data['name']] = acct_id
python
def generate_pdfa( pdf_pages, output_file, compression, log, threads=1, pdf_version='1.5', pdfa_part='2', ): """Generate a PDF/A. The pdf_pages, a list files, will be merged into output_file. One or more PDF files may be merged. One of the files in this list must be a pdfmark file that provides Ghostscript with details on how to perform the PDF/A conversion. By default with we pick PDF/A-2b, but this works for 1 or 3. compression can be 'jpeg', 'lossless', or an empty string. In 'jpeg', Ghostscript is instructed to convert color and grayscale images to DCT (JPEG encoding). In 'lossless' Ghostscript is told to convert images to Flate (lossless/PNG). If the parameter is omitted Ghostscript is left to make its own decisions about how to encode images; it appears to use a heuristic to decide how to encode images. As of Ghostscript 9.25, we support passthrough JPEG which allows Ghostscript to avoid transcoding images entirely. (The feature was added in 9.23 but broken, and the 9.24 release of Ghostscript had regressions, so we don't support it until 9.25.) """ compression_args = [] if compression == 'jpeg': compression_args = [ "-dAutoFilterColorImages=false", "-dColorImageFilter=/DCTEncode", "-dAutoFilterGrayImages=false", "-dGrayImageFilter=/DCTEncode", ] elif compression == 'lossless': compression_args = [ "-dAutoFilterColorImages=false", "-dColorImageFilter=/FlateEncode", "-dAutoFilterGrayImages=false", "-dGrayImageFilter=/FlateEncode", ] else: compression_args = [ "-dAutoFilterColorImages=true", "-dAutoFilterGrayImages=true", ] # Older versions of Ghostscript expect a leading slash in # sColorConversionStrategy, newer ones should not have it. See Ghostscript # git commit fe1c025d. strategy = 'RGB' if version() >= '9.19' else '/RGB' if version() == '9.23': # 9.23: new feature JPEG passthrough is broken in some cases, best to # disable it always # https://bugs.ghostscript.com/show_bug.cgi?id=699216 compression_args.append('-dPassThroughJPEGImages=false') with NamedTemporaryFile(delete=True) as gs_pdf: # nb no need to specify ProcessColorModel when ColorConversionStrategy # is set; see: # https://bugs.ghostscript.com/show_bug.cgi?id=699392 args_gs = ( [ "gs", "-dQUIET", "-dBATCH", "-dNOPAUSE", "-dCompatibilityLevel=" + str(pdf_version), "-dNumRenderingThreads=" + str(threads), "-sDEVICE=pdfwrite", "-dAutoRotatePages=/None", "-sColorConversionStrategy=" + strategy, ] + compression_args + [ "-dJPEGQ=95", "-dPDFA=" + pdfa_part, "-dPDFACompatibilityPolicy=1", "-sOutputFile=" + gs_pdf.name, ] ) args_gs.extend(fspath(s) for s in pdf_pages) # Stringify Path objs log.debug(args_gs) p = run(args_gs, stdout=PIPE, stderr=STDOUT, universal_newlines=True) if _gs_error_reported(p.stdout): log.error(p.stdout) elif 'overprint mode not set' in p.stdout: # Unless someone is going to print PDF/A documents on a # magical sRGB printer I can't see the removal of overprinting # being a problem.... log.debug( "Ghostscript had to remove PDF 'overprinting' from the " "input file to complete PDF/A conversion. " ) else: log.debug(p.stdout) if p.returncode == 0: # Ghostscript does not change return code when it fails to create # PDF/A - check PDF/A status elsewhere copy(gs_pdf.name, fspath(output_file)) else: log.error('Ghostscript PDF/A rendering failed') raise SubprocessOutputError()
java
public static void writeJpg(Image image, ImageOutputStream destImageStream) throws IORuntimeException { write(image, IMAGE_TYPE_JPG, destImageStream); }
java
public URLNormalizer secureScheme() { Matcher m = PATTERN_SCHEMA.matcher(url); if (m.find()) { String schema = m.group(1); if ("http".equalsIgnoreCase(schema)) { url = m.replaceFirst(schema + "s$2"); } } return this; }
java
public boolean validateUser(String userName, String password, CmsRole requiredRole) { boolean result = false; try { CmsUser user = m_cms.readUser(userName, password); result = OpenCms.getRoleManager().hasRole(m_cms, user.getName(), requiredRole); } catch (CmsException e) { // nothing to do } return result; }
python
def attempt(self, *kinds): """Try to get the next token if it matches one of the kinds given, otherwise returning None. If no kinds are given, any kind is accepted.""" if self._error: raise self._error token = self.next_token if not token: return None if kinds and token.kind not in kinds: return None self._advance() return token
python
def validate_exported_interfaces(object_class, exported_intfs): # type: (List[str], List[str]) -> bool """ Validates that the exported interfaces are all provided by the service :param object_class: The specifications of a service :param exported_intfs: The exported specifications :return: True if the exported specifications are all provided by the service """ if ( not exported_intfs or not isinstance(exported_intfs, list) or not exported_intfs ): return False else: for exintf in exported_intfs: if exintf not in object_class: return False return True
python
def dump_t_coords(dataset_dir, data_dir, dataset, root=None, compress=True): """dump vtkjs texture coordinates""" if root is None: root = {} tcoords = dataset.GetPointData().GetTCoords() if tcoords: dumped_array = dump_data_array(dataset_dir, data_dir, tcoords, {}, compress) root['pointData']['activeTCoords'] = len(root['pointData']['arrays']) root['pointData']['arrays'].append({'data': dumped_array})
python
def create(self, _attributes=None, **attributes): """ Create a new instance of the related model. :param attributes: The attributes :type attributes: dict :rtype: Model """ if _attributes is not None: attributes.update(_attributes) instance = self._related.new_instance(attributes) instance.set_attribute(self.get_plain_foreign_key(), self.get_parent_key()) instance.save() return instance
python
def to_dictionary( self, key_selector=lambda item: item.key, value_selector=list): """Build a dictionary from the source sequence. Args: key_selector: A unary callable to extract a key from each item. By default the key of the Grouping. value_selector: A unary callable to extract a value from each item. By default the value is the list of items from the Grouping. Note: This method uses immediate execution. Raises: ValueError: If the Queryable is closed. TypeError: If key_selector is not callable. TypeError: If value_selector is not callable. """ return super(Lookup, self).to_dictionary(key_selector, value_selector)
java
public final static DateFormat getDateTimeInstance(int dateStyle, int timeStyle) { return get(dateStyle, timeStyle, ULocale.getDefault(Category.FORMAT), null); }
python
def delete(self): """ Delete all records matching the query. Warning: This is a desctructive operation. Not every model allows deletion of records and several models even restrict based on status. For example, deleting products that have been transacted is restricted. Another example is sales orders which can be deleted only when they are draft. If deletion fails, a server error is thrown. """ ids = self.rpc_model.search(self.domain, context=self.context) if ids: self.rpc_model.delete(ids)
java
public static void unregisterJSIncludeFromThisRequest (@Nonnull final IJSPathProvider aJSPathProvider) { final JSResourceSet aSet = _getPerRequestSet (false); if (aSet != null) aSet.removeItem (aJSPathProvider); }
java
public void pushMetric(final MetricsRecord mr) { lock.lock(); try { intervalHeartBeat(); try { mr.incrMetric(getName(), getPreviousIntervalValue()); } catch (Exception e) { LOG.info("pushMetric failed for " + getName() + "\n" + StringUtils.stringifyException(e)); } } finally { lock.unlock(); } }
java
public static boolean check(String option) { if (table == null) return false; return (table.get(option.toLowerCase()) != null); }
java
private void upHeap(int idx) { Reference<E> e = entries.array[idx]; int iter = idx; while (hasParent(iter)) { int pidx = parent(iter); Reference<E> p = entries.array[pidx]; if (compare(e, p) < 0) { entries.array[pidx] = e; entries.array[iter] = p; p.index = iter; iter = parent(iter); } else { break; } } e.index = iter; }
java
@Override protected boolean performDialogOperation() throws CmsException { // check if the current resource is a folder for single operation boolean isFolder = isOperationOnFolder(); // on folder copy display "please wait" screen, not for simple file copy if ((isMultiOperation() || isFolder) && !DIALOG_WAIT.equals(getParamAction())) { // return false, this will trigger the "please wait" screen return false; } // get the copy mode from request parameter value CmsResourceCopyMode copyMode = CmsResource.COPY_PRESERVE_SIBLING; try { copyMode = CmsResourceCopyMode.valueOf(Integer.parseInt(getParamCopymode())); } catch (Exception e) { // can usually be ignored if (LOG.isInfoEnabled()) { LOG.info(e.getLocalizedMessage()); } } // check the overwrite options boolean overwrite = Boolean.valueOf(getParamOverwrite()).booleanValue(); overwrite = ((isMultiOperation() && overwrite) || DIALOG_CONFIRMED.equals(getParamAction())); // calculate the target name String target = getParamTarget(); if (target == null) { target = ""; } String storedSiteRoot = null; try { // check if a site root was added to the target name String sitePrefix = ""; if (OpenCms.getSiteManager().getSiteRoot(target) != null) { String siteRootFolder = getCms().getRequestContext().getSiteRoot(); if (siteRootFolder.endsWith("/")) { siteRootFolder = siteRootFolder.substring(0, siteRootFolder.length() - 1); } sitePrefix = siteRootFolder; storedSiteRoot = getCms().getRequestContext().getSiteRoot(); getCms().getRequestContext().setSiteRoot("/"); } Iterator<String> i = getResourceList().iterator(); // iterate the resources to copy while (i.hasNext()) { String resName = i.next(); try { performSingleCopyOperation(resName, target, sitePrefix, copyMode, overwrite); } catch (CmsException e) { if (isMultiOperation()) { // collect exceptions to create a detailed output addMultiOperationException(e); } else { // for single operation, throw the exception immediately throw e; } } } // check if exceptions occurred checkMultiOperationException(Messages.get(), Messages.ERR_COPY_MULTI_0); } finally { // restore the site root if (storedSiteRoot != null) { getCms().getRequestContext().setSiteRoot(storedSiteRoot); } } return true; }
java
public static String encryptToBase64(String content) { byte[] val = content.getBytes(CHARSET); return DatatypeConverter.printBase64Binary(val); }
python
def npoints_towards(lon, lat, depth, azimuth, hdist, vdist, npoints): """ Find a list of specified number of points starting from a given one along a great circle arc with a given azimuth measured in a given point. :param float lon, lat, depth: Coordinates of a point to start from. The first point in a resulting list has these coordinates. :param azimuth: A direction representing a great circle arc together with a reference point. :param hdist: Horizontal (geodetic) distance from reference point to the last point of the resulting list, in km. :param vdist: Vertical (depth) distance between reference and the last point, in km. :param npoints: Integer number of points to return. First and last points count, so if there have to be two intervals, ``npoints`` should be 3. :returns: Tuple of three 1d numpy arrays: longitudes, latitudes and depths of resulting points respectively. Implements "completely general but more complicated algorithm" from http://williams.best.vwh.net/avform.htm#LL """ assert npoints > 1 rlon, rlat = numpy.radians(lon), numpy.radians(lat) tc = numpy.radians(360 - azimuth) hdists = numpy.arange(npoints, dtype=float) hdists *= (hdist / EARTH_RADIUS) / (npoints - 1) vdists = numpy.arange(npoints, dtype=float) vdists *= vdist / (npoints - 1) sin_dists = numpy.sin(hdists) cos_dists = numpy.cos(hdists) sin_lat = numpy.sin(rlat) cos_lat = numpy.cos(rlat) sin_lats = sin_lat * cos_dists + cos_lat * sin_dists * numpy.cos(tc) lats = numpy.degrees(numpy.arcsin(sin_lats)) dlon = numpy.arctan2(numpy.sin(tc) * sin_dists * cos_lat, cos_dists - sin_lat * sin_lats) lons = numpy.mod(rlon - dlon + numpy.pi, 2 * numpy.pi) - numpy.pi lons = numpy.degrees(lons) depths = vdists + depth # the first point should be left intact lons[0] = lon lats[0] = lat depths[0] = depth return lons, lats, depths
java
public static String compile( File lessFile, boolean compress ) throws IOException { String lessData = new String( Files.readAllBytes( lessFile.toPath() ), StandardCharsets.UTF_8 ); return Less.compile( lessFile.toURI().toURL(), lessData, compress, new ReaderFactory() ); }
java
public RaygunDuplicateErrorFilter create() { RaygunDuplicateErrorFilter filter = instance.get(); if (filter == null) { filter = new RaygunDuplicateErrorFilter(); instance.set(filter); return filter; } else { instance.remove(); return filter; } }
python
def send_login_signal(self, request, user, profile, client): """ Send a signal that a user logged in. This signal should be sent only if the user was *not* logged into Django. """ signals.login.send(sender=profile.__class__, user=user, profile=profile, client=client, request=request)
python
def on_connection_blocked(self, method_frame): """This method is called by pika if RabbitMQ sends a connection blocked method, to let us know we need to throttle our publishing. :param pika.amqp_object.Method method_frame: The blocked method frame """ LOGGER.warning('Connection blocked: %s', method_frame) self.state = self.STATE_BLOCKED if self.on_unavailable: self.on_unavailable(self)
python
def areaBetween(requestContext, *seriesLists): """ Draws the vertical area in between the two series in seriesList. Useful for visualizing a range such as the minimum and maximum latency for a service. areaBetween expects **exactly one argument** that results in exactly two series (see example below). The order of the lower and higher values series does not matter. The visualization only works when used in conjunction with ``areaMode=stacked``. Most likely use case is to provide a band within which another metric should move. In such case applying an ``alpha()``, as in the second example, gives best visual results. Example:: &target=areaBetween(service.latency.{min,max})&areaMode=stacked &target=alpha(areaBetween(service.latency.{min,max}),0.3)&areaMode=stacked If for instance, you need to build a seriesList, you should use the ``group`` function, like so:: &target=areaBetween(group(minSeries(a.*.min),maxSeries(a.*.max))) """ if len(seriesLists) == 1: [seriesLists] = seriesLists assert len(seriesLists) == 2, ("areaBetween series argument must " "reference *exactly* 2 series") lower, upper = seriesLists if len(lower) == 1: [lower] = lower if len(upper) == 1: [upper] = upper lower.options['stacked'] = True lower.options['invisible'] = True upper.options['stacked'] = True lower.name = upper.name = "areaBetween(%s)" % upper.pathExpression return [lower, upper]
python
async def addMachines(self, params=None): """ :param params dict: Dictionary specifying the machine to add. All keys are optional. Keys include: series: string specifying the machine OS series. constraints: string holding machine constraints, if any. We'll parse this into the json friendly dict that the juju api expects. container_type: string holding the type of the container (for instance ""lxd" or kvm"). It is not specified for top level machines. parent_id: string holding a placeholder pointing to another machine change or to a unit change. This value is only specified in the case this machine is a container, in which case also ContainerType is set. """ params = params or {} # Normalize keys params = {normalize_key(k): params[k] for k in params.keys()} # Fix up values, as necessary. if 'parent_id' in params: if params['parent_id'].startswith('$addUnit'): unit = self.resolve(params['parent_id'])[0] params['parent_id'] = unit.machine.entity_id else: params['parent_id'] = self.resolve(params['parent_id']) params['constraints'] = parse_constraints( params.get('constraints')) params['jobs'] = params.get('jobs', ['JobHostUnits']) if params.get('container_type') == 'lxc': log.warning('Juju 2.0 does not support lxc containers. ' 'Converting containers to lxd.') params['container_type'] = 'lxd' # Submit the request. params = client.AddMachineParams(**params) results = await self.client_facade.AddMachines([params]) error = results.machines[0].error if error: raise ValueError("Error adding machine: %s" % error.message) machine = results.machines[0].machine log.debug('Added new machine %s', machine) return machine
python
def _actionsFreqsAngles(self,*args,**kwargs): """ NAME: actionsFreqsAngles (_actionsFreqsAngles) PURPOSE: evaluate the actions, frequencies, and angles (jr,lz,jz,Omegar,Omegaphi,Omegaz,ar,ap,az) INPUT: Either: a) R,vR,vT,z,vz[,phi]: 1) floats: phase-space value for single object (phi is optional) (each can be a Quantity) 2) numpy.ndarray: [N] phase-space values for N objects (each can be a Quantity) b) Orbit instance: initial condition used if that's it, orbit(t) if there is a time given as well as the second argument fixed_quad= (False) if True, use n=10 fixed_quad integration scipy.integrate.quadrature or .fixed_quad keywords OUTPUT: (jr,lz,jz,Omegar,Omegaphi,Omegaz,ar,aphi,az) HISTORY: 2013-12-29 - Written - Bovy (IAS) """ fixed_quad= kwargs.pop('fixed_quad',False) if len(args) == 5: #R,vR.vT, z, vz pragma: no cover raise IOError("You need to provide phi when calculating angles") elif len(args) == 6: #R,vR.vT, z, vz, phi R,vR,vT, z, vz, phi= args else: self._parse_eval_args(*args) R= self._eval_R vR= self._eval_vR vT= self._eval_vT z= self._eval_z vz= self._eval_vz phi= self._eval_phi if isinstance(R,float): R= nu.array([R]) vR= nu.array([vR]) vT= nu.array([vT]) z= nu.array([z]) vz= nu.array([vz]) phi= nu.array([phi]) if self._c: #pragma: no cover pass else: Lz= R*vT Lx= -z*vT Ly= z*vR-R*vz L2= Lx*Lx+Ly*Ly+Lz*Lz E= _evaluatePotentials(self._pot,R,z)+vR**2./2.+vT**2./2.+vz**2./2. L= nu.sqrt(L2) #Actions Jphi= Lz Jz= L-nu.fabs(Lz) #Jr requires some more work #Set up an actionAngleAxi object for EL and rap/rperi calculations axiR= nu.sqrt(R**2.+z**2.) axivT= L/axiR #these are really spherical coords, called axi bc they go in actionAngleAxi axivR= (R*vR+z*vz)/axiR axivz= (z*vR-R*vz)/axiR Jr= [] Or= [] Op= [] ar= [] az= [] #Calculate the longitude of the ascending node asc= self._calc_long_asc(z,R,axivz,phi,Lz,L) for ii in range(len(axiR)): axiaA= actionAngleAxi(axiR[ii],axivR[ii],axivT[ii], pot=self._2dpot) (rperi,rap)= axiaA.calcRapRperi() EL= axiaA.calcEL() E, L= EL Jr.append(self._calc_jr(rperi,rap,E,L,fixed_quad,**kwargs)) #Radial period Rmean= m.exp((m.log(rperi)+m.log(rap))/2.) if Jr[-1] < 10.**-9.: #Circular orbit Or.append(epifreq(self._pot,axiR[ii],use_physical=False)) Op.append(omegac(self._pot,axiR[ii],use_physical=False)) else: Or.append(self._calc_or(Rmean,rperi,rap,E,L,fixed_quad,**kwargs)) Op.append(self._calc_op(Or[-1],Rmean,rperi,rap,E,L,fixed_quad,**kwargs)) #Angles ar.append(self._calc_angler(Or[-1],axiR[ii],Rmean,rperi,rap, E,L,axivR[ii],fixed_quad,**kwargs)) az.append(self._calc_anglez(Or[-1],Op[-1],ar[-1], z[ii],axiR[ii], Rmean,rperi,rap,E,L,Lz[ii], axivR[ii],axivz[ii], fixed_quad,**kwargs)) Op= nu.array(Op) Oz= copy.copy(Op) Op[vT < 0.]*= -1. ap= copy.copy(asc) ar= nu.array(ar) az= nu.array(az) ap[vT < 0.]-= az[vT < 0.] ap[vT >= 0.]+= az[vT >= 0.] ar= ar % (2.*nu.pi) ap= ap % (2.*nu.pi) az= az % (2.*nu.pi) return (nu.array(Jr),Jphi,Jz,nu.array(Or),Op,Oz, ar,ap,az)
python
def GetFeedItems(client, feed): """Returns the Feed Items for a given Feed. Args: client: an AdWordsClient instance. feed: the Feed we are retrieving Feed Items from. Returns: The Feed Items associated with the given Feed. """ feed_item_service = client.GetService('FeedItemService', 'v201809') feed_items = [] more_pages = True selector = { 'fields': ['FeedItemId', 'AttributeValues'], 'predicates': [ { 'field': 'Status', 'operator': 'EQUALS', 'values': ['ENABLED'] }, { 'field': 'FeedId', 'operator': 'EQUALS', 'values': [feed['id']] } ], 'paging': { 'startIndex': 0, 'numberResults': PAGE_SIZE } } while more_pages: page = feed_item_service.get(selector) if 'entries' in page: feed_items.extend(page['entries']) selector['paging']['startIndex'] += PAGE_SIZE more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries']) return feed_items
python
def get_var(script_path, var): """ Given a script, and the name of an environment variable, returns the value of the environment variable. :param script_path: Path the a shell script :type script_path: str or unicode :param var: environment variable name :type var: str or unicode :return: str """ if path.isfile(script_path): input = '. "%s"; echo -n "$%s"\n'% (script_path, var) pipe = Popen(["bash"], stdout=PIPE, stdin=PIPE, stderr=PIPE) stdout_data, stderr_data = pipe.communicate(input=input) if stderr_data: raise ShellScriptException(script_path, stderr_data) else: return stdout_data else: raise _noscripterror(script_path)
java
public String nextVASPTokenFollowing(String string) throws IOException { int index; String line; while (inputBuffer.ready()) { line = inputBuffer.readLine(); index = line.indexOf(string); if (index > 0) { index = index + string.length(); line = line.substring(index); st = new StringTokenizer(line, " =\t"); while (!st.hasMoreTokens() && inputBuffer.ready()) { line = inputBuffer.readLine(); st = new StringTokenizer(line, " =\t"); } if (st.hasMoreTokens()) { fieldVal = st.nextToken(); } else { fieldVal = null; } break; } } return fieldVal; }
java
public void clearAll() { for (int i = 0; i < textFields.size(); i++) { JTextField textField = textFields.get(i); if (textField == null) { continue; } textField.setText(""); } }
python
def content_transfer_encoding(self) \ -> Optional[ContentTransferEncodingHeader]: """The ``Content-Transfer-Encoding`` header.""" try: return cast(ContentTransferEncodingHeader, self[b'content-transfer-encoding'][0]) except (KeyError, IndexError): return None
java
public float get(int x, int y) { if (!isInBounds(x, y)) throw new ImageAccessException("Requested pixel is out of bounds: ( " + x + " , " + y + " )"); return unsafe_get(x,y); }
python
def killBatchJobs(self, jobIDs): """ Kills the given jobs, represented as Job ids, then checks they are dead by checking they are not in the list of issued jobs. """ self.killLocalJobs(jobIDs) jobIDs = set(jobIDs) logger.debug('Jobs to be killed: %r', jobIDs) for jobID in jobIDs: self.killQueue.put(jobID) while jobIDs: killedJobId = self.killedJobsQueue.get() if killedJobId is None: break jobIDs.remove(killedJobId) if killedJobId in self.currentJobs: self.currentJobs.remove(killedJobId) if jobIDs: logger.debug('Some kills (%s) still pending, sleeping %is', len(jobIDs), self.sleepSeconds())
java
private static Long getPid(File pidFile) { FileReader reader = null; BufferedReader lineReader = null; String pidLine = null; try { reader = new FileReader(pidFile); lineReader = new BufferedReader(reader); pidLine = lineReader.readLine(); if(pidLine!=null) { pidLine = pidLine.trim(); } } catch (Exception ex) { log.error("Failed to read PID from file [" + pidFile.getAbsolutePath() + "]", ex); } finally { if(reader!=null) try { reader.close(); } catch (Exception ex) { /* No Op */ } } try { return Long.parseLong(pidLine); } catch (Exception ex) { return null; } }
java
public InputHandler getInputHandler() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getInputHandler"); SibTr.exit(tc, "getInputHandler", inputHandler); } return inputHandler; }
java
public void start() { stop(); // Make sure any currently running dispatchers are stopped. // Create the cache dispatcher and start it. mCacheDispatcher = new CacheDispatcher(mCacheQueue, mNetworkQueue, mCache, mDelivery); mCacheDispatcher.start(); // Create network dispatchers (and corresponding threads) up to the pool size. for (int i = 0; i < mDispatchers.length; i++) { NetworkDispatcher networkDispatcher = new NetworkDispatcher(mNetworkQueue, mNetwork, mCache, mDelivery); mDispatchers[i] = networkDispatcher; networkDispatcher.start(); } }
python
def _calculate_filter(n, spacing, shift, fI, r_def, reim, name): r"""Calculate filter for this spacing, shift, n.""" # Base :: For this n/spacing/shift base = np.exp(spacing*(np.arange(n)-n//2) + shift) # r :: Start/end is defined by base AND r_def[0]/r_def[1] # Overdetermined system if r_def[2] > 1 r = np.logspace(np.log10(1/np.max(base)) - r_def[0], np.log10(1/np.min(base)) + r_def[1], r_def[2]*n) # k :: Get required k-values (matrix of shape (r.size, base.size)) k = base/r[:, None] # Create filter instance dlf = DigitalFilter(name.split('.')[0]) dlf.base = base dlf.factor = np.around(np.average(base[1:]/base[:-1]), 15) # Loop over transforms for f in fI: # Calculate lhs and rhs for inversion lhs = reim(f.lhs(k)) rhs = reim(f.rhs(r)*r) # Calculate filter values: Solve lhs*J=rhs using linalg.qr. # If factoring fails (qr) or if matrix is singular or square (solve) it # will raise a LinAlgError. Error is ignored and zeros are returned # instead. try: qq, rr = np.linalg.qr(lhs) J = np.linalg.solve(rr, rhs.dot(qq)) except np.linalg.LinAlgError: J = np.zeros((base.size,)) setattr(dlf, f.name, J) return dlf
java
public @Nullable io.micronaut.inject.ast.Element visit( Element element, AnnotationMetadata annotationMetadata) { if (element instanceof VariableElement) { final JavaFieldElement e = new JavaFieldElement( (VariableElement) element, annotationMetadata, visitorContext); visitor.visitField( e, visitorContext ); return e; } else if (element instanceof ExecutableElement) { ExecutableElement executableElement = (ExecutableElement) element; if (executableElement.getSimpleName().toString().equals("<init>")) { final JavaConstructorElement e = new JavaConstructorElement( executableElement, annotationMetadata, visitorContext); visitor.visitConstructor( e, visitorContext ); return e; } else { final JavaMethodElement e = new JavaMethodElement( executableElement, annotationMetadata, visitorContext); visitor.visitMethod( e, visitorContext ); return e; } } else if (element instanceof TypeElement) { TypeElement typeElement = (TypeElement) element; boolean isEnum = JavaModelUtils.resolveKind(typeElement, ElementKind.ENUM).isPresent(); if (isEnum) { final JavaEnumElement e = new JavaEnumElement( typeElement, annotationMetadata, visitorContext, Collections.emptyList()); visitor.visitClass( e, visitorContext ); return e; } else { final JavaClassElement e = new JavaClassElement( typeElement, annotationMetadata, visitorContext); visitor.visitClass( e, visitorContext ); return e; } } return null; }
java
private LoggingConfigurationService configure(final ResourceRoot root, final VirtualFile configFile, final ClassLoader classLoader, final LogContext logContext) throws DeploymentUnitProcessingException { InputStream configStream = null; try { LoggingLogger.ROOT_LOGGER.debugf("Found logging configuration file: %s", configFile); // Get the filname and open the stream final String fileName = configFile.getName(); configStream = configFile.openStream(); // Check the type of the configuration file if (isLog4jConfiguration(fileName)) { final ClassLoader current = WildFlySecurityManager.getCurrentContextClassLoaderPrivileged(); final LogContext old = logContextSelector.getAndSet(CONTEXT_LOCK, logContext); try { WildFlySecurityManager.setCurrentContextClassLoaderPrivileged(classLoader); if (LOG4J_XML.equals(fileName) || JBOSS_LOG4J_XML.equals(fileName)) { new DOMConfigurator().doConfigure(configStream, org.apache.log4j.JBossLogManagerFacade.getLoggerRepository(logContext)); } else { final Properties properties = new Properties(); properties.load(new InputStreamReader(configStream, ENCODING)); new org.apache.log4j.PropertyConfigurator().doConfigure(properties, org.apache.log4j.JBossLogManagerFacade.getLoggerRepository(logContext)); } } finally { logContextSelector.getAndSet(CONTEXT_LOCK, old); WildFlySecurityManager.setCurrentContextClassLoaderPrivileged(current); } return new LoggingConfigurationService(null, resolveRelativePath(root, configFile)); } else { // Create a properties file final Properties properties = new Properties(); properties.load(new InputStreamReader(configStream, ENCODING)); // Attempt to see if this is a J.U.L. configuration file if (isJulConfiguration(properties)) { LoggingLogger.ROOT_LOGGER.julConfigurationFileFound(configFile.getName()); } else { // Load non-log4j types final PropertyConfigurator propertyConfigurator = new PropertyConfigurator(logContext); propertyConfigurator.configure(properties); return new LoggingConfigurationService(propertyConfigurator.getLogContextConfiguration(), resolveRelativePath(root, configFile)); } } } catch (Exception e) { throw LoggingLogger.ROOT_LOGGER.failedToConfigureLogging(e, configFile.getName()); } finally { safeClose(configStream); } return null; }
python
def create_historical_stream(directory, listener=None): """ Uses streaming listener/cache to parse betfair historical data: https://historicdata.betfair.com/#/home :param str directory: Directory of betfair data :param BaseListener listener: Listener object :rtype: HistoricalStream """ listener = listener if listener else BaseListener() listener.register_stream('HISTORICAL', 'marketSubscription') return HistoricalStream(directory, listener)
java
public final EObject entryRuleDisjunction() throws RecognitionException { EObject current = null; EObject iv_ruleDisjunction = null; try { // InternalXtext.g:1758:52: (iv_ruleDisjunction= ruleDisjunction EOF ) // InternalXtext.g:1759:2: iv_ruleDisjunction= ruleDisjunction EOF { newCompositeNode(grammarAccess.getDisjunctionRule()); pushFollow(FollowSets000.FOLLOW_1); iv_ruleDisjunction=ruleDisjunction(); state._fsp--; current =iv_ruleDisjunction; match(input,EOF,FollowSets000.FOLLOW_2); } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; }
java
public static SelectColumn includes(String[] cols, String... columns) { return new SelectColumn(Utility.append(cols, columns), false); }
python
def list_of(cls): """ Returns a function that checks that each element in a list is of a specific type. """ return lambda l: isinstance(l, list) and all(isinstance(x, cls) for x in l)
java
@PublicEvolving public void add_sink(SinkFunction<PyObject> sink_func) throws IOException { stream.addSink(new PythonSinkFunction(sink_func)); }
java
private static void checkTokenName(String namedOutput) { if (namedOutput == null || namedOutput.length() == 0) { throw new IllegalArgumentException( "Name cannot be NULL or emtpy"); } for (char ch : namedOutput.toCharArray()) { if ((ch >= 'A') && (ch <= 'Z')) { continue; } if ((ch >= 'a') && (ch <= 'z')) { continue; } if ((ch >= '0') && (ch <= '9')) { continue; } throw new IllegalArgumentException( "Name cannot be have a '" + ch + "' char"); } }
python
def _convert_args(args): ''' Take a list of args, and convert any dicts inside the list to keyword args in the form of `key=value`, ready to be passed to salt-ssh ''' converted = [] for arg in args: if isinstance(arg, dict): for key in list(arg.keys()): if key == '__kwarg__': continue converted.append('{0}={1}'.format(key, arg[key])) else: converted.append(arg) return converted
java
@Override public void setCenter(final IGeoPoint point) { // If no layout, delay this call if (!mMapView.isLayoutOccurred()) { mReplayController.setCenter(point); return; } mMapView.setExpectedCenter(point); }
python
def parse(cls, root): """ Create a new MDWrap by parsing root. :param root: Element or ElementTree to be parsed into a MDWrap. """ if root.tag != utils.lxmlns("mets") + "mdRef": raise exceptions.ParseError( "MDRef can only parse mdRef elements with METS namespace." ) # Required attributes mdtype = root.get("MDTYPE") if not mdtype: raise exceptions.ParseError("mdRef must have a MDTYPE") target = root.get(utils.lxmlns("xlink") + "href") if not target: raise exceptions.ParseError("mdRef must have an xlink:href.") try: target = utils.urldecode(target) except ValueError: raise exceptions.ParseError( 'Value "{}" (of attribute xlink:href) is not a valid' " URL.".format(target) ) loctype = root.get("LOCTYPE") if not loctype: raise exceptions.ParseError("mdRef must have a LOCTYPE") # Optional attributes label = root.get("LABEL") otherloctype = root.get("OTHERLOCTYPE") return cls(target, mdtype, loctype, label, otherloctype)
python
def replaceVariables(path, datetime=None, pathvars=None): """Return absolute path with path variables replaced as applicable""" if datetime is None: datetime = time.gmtime() # if path variables are not given, set as empty list if pathvars is None: pathvars = [ ] # create an init path list to loop through if isinstance(path, list): path_list = path else: path_list = [ path ] # Set up the regex to search for variables regex = re.compile('\$\{(.*?)\}') # create a newpath list that will hold the 'cleaned' paths # with variables and strftime format directives replaced newpath_list = [ ] for p in path_list: # create temppath_list to be used a we work through the newpath_list.append(p) # Variable replacement # Find all the variables in path using the regex for k in regex.findall(p): # Check if the key is in path variables map if k in pathvars: # get the str or list of values v = pathvars[k] # Check value of variable must be in (string, integer, list) if type(v) is dict: msg = "Path variable must refer to string, integer, or list" raise TypeError(msg) # get the list of possible variable values value_list = v if type(v) is list else [ v ] # create temp_list for now temp_list = [] # loop through the most recent newpath list # need to do this every time in order to account for all possible # combinations # replace the variables # loop through the list of values and replace the variables for v in value_list: for newpath in newpath_list: # remove the path from newpath_list temp_list.append(newpath.replace('${%s}' % k, str(v))) # replace newpath_list newpath_list = temp_list # strftime translation # Loop through newpath_list to do strftime translation for index, newpath in enumerate(newpath_list): # Apply strftime translation newpath_list[index] = time.strftime(newpath, datetime) return newpath_list
java
public static void addPropertyInteger( CmsCmisTypeManager typeManager, PropertiesImpl props, String typeId, Set<String> filter, String id, long value) { addPropertyBigInteger(typeManager, props, typeId, filter, id, BigInteger.valueOf(value)); }
java
static String encodeUriComponent(String source, String encoding, Type type) throws UnsupportedEncodingException { if (source == null) { return null; } Assert.hasLength(encoding, "Encoding must not be empty"); byte[] bytes = encodeBytes(source.getBytes(encoding), type); return new String(bytes, "US-ASCII"); }
java
private List<BitcoinTransactionInput> readListOfInputsFromTable(ListObjectInspector loi, Object listOfInputsObject) { int listLength=loi.getListLength(listOfInputsObject); List<BitcoinTransactionInput> result = new ArrayList<>(listLength); StructObjectInspector listOfInputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector(); for (int i=0;i<listLength;i++) { Object currentlistofinputsObject = loi.getListElement(listOfInputsObject,i); StructField prevtransactionhashSF = listOfInputsElementObjectInspector.getStructFieldRef("prevtransactionhash"); StructField previoustxoutindexSF = listOfInputsElementObjectInspector.getStructFieldRef("previoustxoutindex"); StructField txinscriptlengthSF = listOfInputsElementObjectInspector.getStructFieldRef("txinscriptlength"); StructField txinscriptSF = listOfInputsElementObjectInspector.getStructFieldRef("txinscript"); StructField seqnoSF = listOfInputsElementObjectInspector.getStructFieldRef("seqno"); boolean prevFieldsNull = (prevtransactionhashSF==null) || (previoustxoutindexSF==null); boolean inFieldsNull = (txinscriptlengthSF==null) || (txinscriptSF==null); boolean otherAttribNull = seqnoSF==null; if (prevFieldsNull || inFieldsNull || otherAttribNull) { LOG.warn("Invalid BitcoinTransactionInput detected at position "+i); return new ArrayList<>(); } byte[] currentPrevTransactionHash = wboi.getPrimitiveJavaObject(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,prevtransactionhashSF)); long currentPreviousTxOutIndex = wloi.get(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,previoustxoutindexSF)); byte[] currentTxInScriptLength= wboi.getPrimitiveJavaObject(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,txinscriptlengthSF)); byte[] currentTxInScript= wboi.getPrimitiveJavaObject(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,txinscriptSF)); long currentSeqNo = wloi.get(listOfInputsElementObjectInspector.getStructFieldData(currentlistofinputsObject,seqnoSF)); BitcoinTransactionInput currentBitcoinTransactionInput = new BitcoinTransactionInput(currentPrevTransactionHash,currentPreviousTxOutIndex,currentTxInScriptLength,currentTxInScript,currentSeqNo); result.add(currentBitcoinTransactionInput); } return result; }
python
def ticket_satisfaction_rating_create(self, ticket_id, data, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/satisfaction_ratings#create-a-satisfaction-rating" api_path = "/api/v2/tickets/{ticket_id}/satisfaction_rating.json" api_path = api_path.format(ticket_id=ticket_id) return self.call(api_path, method="POST", data=data, **kwargs)
java
public int getNrSelectedFeatures() { int count = 0; for (VectorLayer layer : getVectorLayers()) { count += layer.getSelectedFeatures().size(); } return count; }
java
@Deprecated public static int cuParamSetf(CUfunction hfunc, int offset, float value) { return checkResult(cuParamSetfNative(hfunc, offset, value)); }
java
public SimpleFeature convertDwgCircle( String typeName, String layerName, DwgCircle circle, int id ) { double[] center = circle.getCenter(); double radius = circle.getRadius(); Point2D[] ptos = GisModelCurveCalculator.calculateGisModelCircle(new Point2D.Double( center[0], center[1]), radius); CoordinateList coordList = new CoordinateList(); for( int j = 0; j < ptos.length; j++ ) { Coordinate coord = new Coordinate(ptos[j].getX(), ptos[j].getY(), 0.0); coordList.add(coord); } // close to create a polygon if ((ptos[ptos.length - 1].getX() != ptos[0].getX()) || (ptos[ptos.length - 1].getY() != ptos[0].getY())) { Coordinate coord = new Coordinate(ptos[0].getX(), ptos[0].getY(), 0.0); coordList.add(coord); } SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName(typeName); b.setCRS(crs); b.add(THE_GEOM, Polygon.class); b.add(LAYER, String.class); SimpleFeatureType type = b.buildFeatureType(); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(type); LinearRing linearRing = gF.createLinearRing(coordList.toCoordinateArray()); Geometry polygon = gF.createPolygon(linearRing, null); Object[] values = new Object[]{polygon, layerName}; builder.addAll(values); return builder.buildFeature(typeName + "." + id); }
python
def _rfc822(date): """Parse RFC 822 dates and times http://tools.ietf.org/html/rfc822#section-5 There are some formatting differences that are accounted for: 1. Years may be two or four digits. 2. The month and day can be swapped. 3. Additional timezone names are supported. 4. A default time and timezone are assumed if only a date is present. 5. """ daynames = set(['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']) months = { 'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12, } timezonenames = { 'ut': 0, 'gmt': 0, 'z': 0, 'adt': -3, 'ast': -4, 'at': -4, 'edt': -4, 'est': -5, 'et': -5, 'cdt': -5, 'cst': -6, 'ct': -6, 'mdt': -6, 'mst': -7, 'mt': -7, 'pdt': -7, 'pst': -8, 'pt': -8, 'a': -1, 'n': 1, 'm': -12, 'y': 12, } parts = date.lower().split() if len(parts) < 5: # Assume that the time and timezone are missing parts.extend(('00:00:00', '0000')) # Remove the day name if parts[0][:3] in daynames: parts = parts[1:] if len(parts) < 5: # If there are still fewer than five parts, there's not enough # information to interpret this return None try: day = int(parts[0]) except ValueError: # Check if the day and month are swapped if months.get(parts[0][:3]): try: day = int(parts[1]) except ValueError: return None else: parts[1] = parts[0] else: return None month = months.get(parts[1][:3]) if not month: return None try: year = int(parts[2]) except ValueError: return None # Normalize two-digit years: # Anything in the 90's is interpreted as 1990 and on # Anything 89 or less is interpreted as 2089 or before if len(parts[2]) <= 2: year += (1900, 2000)[year < 90] timeparts = parts[3].split(':') timeparts = timeparts + ([0] * (3 - len(timeparts))) try: (hour, minute, second) = map(int, timeparts) except ValueError: return None tzhour = 0 tzmin = 0 # Strip 'Etc/' from the timezone if parts[4].startswith('etc/'): parts[4] = parts[4][4:] # Normalize timezones that start with 'gmt': # GMT-05:00 => -0500 # GMT => GMT if parts[4].startswith('gmt'): parts[4] = ''.join(parts[4][3:].split(':')) or 'gmt' # Handle timezones like '-0500', '+0500', and 'EST' if parts[4] and parts[4][0] in ('-', '+'): try: tzhour = int(parts[4][1:3]) tzmin = int(parts[4][3:]) except ValueError: return None if parts[4].startswith('-'): tzhour = tzhour * -1 tzmin = tzmin * -1 else: tzhour = timezonenames.get(parts[4], 0) # Create the datetime object and timezone delta objects try: stamp = datetime.datetime(year, month, day, hour, minute, second) except ValueError: return None delta = datetime.timedelta(0, 0, 0, 0, tzmin, tzhour) # Return the date and timestamp in a UTC 9-tuple try: return stamp - delta except OverflowError: return None
java
public Observable<Page<DiagnosticCategoryInner>> listSiteDiagnosticCategoriesSlotNextAsync(final String nextPageLink) { return listSiteDiagnosticCategoriesSlotNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<DiagnosticCategoryInner>>, Page<DiagnosticCategoryInner>>() { @Override public Page<DiagnosticCategoryInner> call(ServiceResponse<Page<DiagnosticCategoryInner>> response) { return response.body(); } }); }