language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def diamondTabularFormatToDicts(filename, fieldNames=None): """ Read DIAMOND tabular (--outfmt 6) output and convert lines to dictionaries. @param filename: Either a C{str} file name or an open file pointer. @param fieldNames: A C{list} or C{tuple} of C{str} DIAMOND field names. Run 'diamond -help' to see the full list. If C{None}, a default set of fields will be used, as compatible with convert-diamond-to-sam.py @raise ValueError: If a line of C{filename} does not have the expected number of TAB-separated fields (i.e., len(fieldNames)). Or if C{fieldNames} is empty or contains duplicates. @return: A generator that yields C{dict}s with keys that are the DIAMOND field names and values as converted by DIAMOND_FIELD_CONVERTER. """ fieldNames = fieldNames or FIELDS.split() nFields = len(fieldNames) if not nFields: raise ValueError('fieldNames cannot be empty.') c = Counter(fieldNames) if c.most_common(1)[0][1] > 1: raise ValueError( 'fieldNames contains duplicated names: %s.' % (', '.join(sorted(x[0] for x in c.most_common() if x[1] > 1)))) def identity(x): return x convertFunc = DIAMOND_FIELD_CONVERTER.get with as_handle(filename) as fp: for count, line in enumerate(fp, start=1): result = {} line = line[:-1] values = line.split('\t') if len(values) != nFields: raise ValueError( 'Line %d of %s had %d field values (expected %d). ' 'To provide input for this function, DIAMOND must be ' 'called with "--outfmt 6 %s" (without the quotes). ' 'The offending input line was %r.' % (count, (filename if isinstance(filename, six.string_types) else 'input'), len(values), nFields, FIELDS, line)) for fieldName, value in zip(fieldNames, values): value = convertFunc(fieldName, identity)(value) result[fieldName] = value yield result
java
public String saveExecutionResult(String username, String password, Vector<?> args) { return confluenceServiceDelegator.saveExecutionResult(username, password, args); }
java
public void marshall(CommandFilter commandFilter, ProtocolMarshaller protocolMarshaller) { if (commandFilter == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(commandFilter.getKey(), KEY_BINDING); protocolMarshaller.marshall(commandFilter.getValue(), VALUE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public void setIcon(Icon icon) { Icon old = null; if (iconInfo == CommandButtonIconInfo.BLANK_ICON_INFO) { if (icon != null) { // New IconInfo fires event setIconInfo(new CommandButtonIconInfo(icon)); } } else { old = iconInfo.getIcon(); this.iconInfo.setIcon(icon); } firePropertyChange(ICON_PROPERTY, old, icon); }
java
public void marshall(DeleteBackupRequest deleteBackupRequest, ProtocolMarshaller protocolMarshaller) { if (deleteBackupRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteBackupRequest.getBackupArn(), BACKUPARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def get_contacts_of_client_per_page(self, client_id, per_page=1000, page=1): """ Get contacts of client per page :param client_id: the client id :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :return: list """ return self._get_resource_per_page( resource=CONTACTS, per_page=per_page, page=page, params={'client_id': client_id}, )
python
def pos_int(i): """ Simple positive integer validation. """ try: if isinstance(i, string_types): i = int(i) if not isinstance(i, int) or i < 0: raise Exception() except: raise ValueError("Not a positive integer") return i
java
protected void printError(final Span references[], final Span predictions[], final T referenceSample, final T predictedSample, final String sentence) { final List<Span> falseNegatives = new ArrayList<Span>(); final List<Span> falsePositives = new ArrayList<Span>(); findErrors(references, predictions, falseNegatives, falsePositives); if (falsePositives.size() + falseNegatives.size() > 0) { printSamples(referenceSample, predictedSample); printErrors(falsePositives, falseNegatives, sentence); } }
java
public void setAjaxStartEvent(ISliderAjaxEvent ajaxStartEvent) { this.ajaxEvents.put(SliderAjaxEvent.ajaxStartEvent, ajaxStartEvent); setSlideEvent(new SliderAjaxJsScopeUiEvent(this, SliderAjaxEvent.ajaxStartEvent)); }
java
public static double getGradDotDirApprox(Function fn, IntDoubleVector x, IntDoubleVector d, double c) { double dot = 0; { // L(\theta + c * d) IntDoubleVector d1 = d.copy(); d1.scale(c); IntDoubleVector x1 = x.copy(); x1.add(d1); dot += fn.getValue(x1); } { // - L(\theta - c * d) IntDoubleVector d1 = d.copy(); d1.scale(-c); IntDoubleVector x1 = x.copy(); x1.add(d1); dot -= fn.getValue(x1); } dot /= (2.0 * c); return dot; }
python
def p_statement_while(p): 'statement : WHILE LPAREN expr RPAREN while_statement' p[0] = ast.While(p[3], p[5], lineno=p.lineno(1))
python
def _add_hookimpl(self, hookimpl): """Add an implementation to the callback chain. """ if hookimpl.hookwrapper: methods = self._wrappers else: methods = self._nonwrappers if hookimpl.trylast: methods.insert(0, hookimpl) elif hookimpl.tryfirst: methods.append(hookimpl) else: # find last non-tryfirst method i = len(methods) - 1 while i >= 0 and methods[i].tryfirst: i -= 1 methods.insert(i + 1, hookimpl) if "__multicall__" in hookimpl.argnames: warnings.warn( "Support for __multicall__ is now deprecated and will be" "removed in an upcoming release.", DeprecationWarning, ) self.multicall = _legacymulticall
python
def plugin_class_validation(self, plugin_class): """Plugin validation Every workbench plugin must have a dependencies list (even if it's empty). Every workbench plugin must have an execute method. Args: plugin_class: The loaded plugun class. Returns: True if dependencies and execute are present, else False. """ try: getattr(plugin_class, 'dependencies') getattr(plugin_class, 'execute') except AttributeError: return False return True
python
def load(self, config): """Load the web list from the configuration file.""" web_list = [] if config is None: logger.debug("No configuration file available. Cannot load ports list.") elif not config.has_section(self._section): logger.debug("No [%s] section in the configuration file. Cannot load ports list." % self._section) else: logger.debug("Start reading the [%s] section in the configuration file" % self._section) refresh = int(config.get_value(self._section, 'refresh', default=self._default_refresh)) timeout = int(config.get_value(self._section, 'timeout', default=self._default_timeout)) # Read the web/url list for i in range(1, 256): new_web = {} postfix = 'web_%s_' % str(i) # Read mandatories configuration key: host new_web['url'] = config.get_value(self._section, '%s%s' % (postfix, 'url')) if new_web['url'] is None: continue url_parse = urlparse(new_web['url']) if not bool(url_parse.scheme) or not bool(url_parse.netloc): logger.error('Bad URL (%s) in the [%s] section of configuration file.' % (new_web['url'], self._section)) continue # Read optionals configuration keys # Default description is the URL without the http:// new_web['description'] = config.get_value(self._section, '%sdescription' % postfix, default="%s" % url_parse.netloc) # Default status new_web['status'] = None new_web['elapsed'] = 0 # Refresh rate in second new_web['refresh'] = refresh # Timeout in second new_web['timeout'] = int(config.get_value(self._section, '%stimeout' % postfix, default=timeout)) # RTT warning new_web['rtt_warning'] = config.get_value(self._section, '%srtt_warning' % postfix, default=None) if new_web['rtt_warning'] is not None: # Convert to second new_web['rtt_warning'] = int(new_web['rtt_warning']) / 1000.0 # Indice new_web['indice'] = 'web_' + str(i) # ssl_verify new_web['ssl_verify'] = config.get_value(self._section, '%sssl_verify' % postfix, default=True) # Proxy http_proxy = config.get_value(self._section, '%shttp_proxy' % postfix, default=None) https_proxy = config.get_value(self._section, '%shttps_proxy' % postfix, default=None) if https_proxy is None and http_proxy is None: new_web['proxies'] = None else: new_web['proxies'] = {'http' : http_proxy, 'https' : https_proxy } # Add the server to the list logger.debug("Add Web URL %s to the static list" % new_web['url']) web_list.append(new_web) # Ports list loaded logger.debug("Web list loaded: %s" % web_list) return web_list
java
protected JvmTypeReference inferFunctionReturnType(XtendFunction source, JvmOperation target, JvmOperation overriddenOperation) { // The return type is explicitly given if (source.getReturnType() != null) { return ensureValidType(source.eResource(), source.getReturnType()); } // An super operation was detected => reuse its return type. if (overriddenOperation != null) { final JvmTypeReference type = overriddenOperation.getReturnType(); //return cloneWithProxiesFromOtherResource(type, target); return this.typeReferences.createDelegateTypeReference(type); } // Return type is inferred from the operation's expression. final XExpression expression = source.getExpression(); JvmTypeReference returnType = null; if (expression != null && ((!(expression instanceof XBlockExpression)) || (!((XBlockExpression) expression).getExpressions().isEmpty()))) { returnType = inferFunctionReturnType(expression); } return ensureValidType(source.eResource(), returnType); }
python
def words_for_language(language_code): """ Return the math words for a language code. The language_code should be an ISO 639-2 language code. https://www.loc.gov/standards/iso639-2/php/code_list.php """ word_groups = word_groups_for_language(language_code) words = [] for group in word_groups: words.extend(word_groups[group].keys()) return words
python
def as_issue(self): """ :calls: `GET /repos/:owner/:repo/issues/:number <http://developer.github.com/v3/issues>`_ :rtype: :class:`github.Issue.Issue` """ headers, data = self._requester.requestJsonAndCheck( "GET", self.issue_url ) return github.Issue.Issue(self._requester, headers, data, completed=True)
python
def set_args(self, args, unknown_args=None): """ Configure job, based on the arguments provided. """ if unknown_args is None: unknown_args = [] self.logger.setLevel(getattr(logging, args.log_level)) parent = hdfs.path.dirname(hdfs.path.abspath(args.output.rstrip("/"))) self.remote_wd = hdfs.path.join( parent, utils.make_random_str(prefix="pydoop_submit_") ) self.remote_exe = hdfs.path.join(self.remote_wd, str(uuid.uuid4())) self.properties[JOB_NAME] = args.job_name or 'pydoop' self.properties[IS_JAVA_RR] = ( 'false' if args.do_not_use_java_record_reader else 'true' ) self.properties[IS_JAVA_RW] = ( 'false' if args.do_not_use_java_record_writer else 'true' ) self.properties[JOB_REDUCES] = args.num_reducers if args.job_name: self.properties[JOB_NAME] = args.job_name self.properties.update(args.job_conf or {}) self.__set_files_to_cache(args) self.__set_archives_to_cache(args) self.requested_env = self._env_arg_to_dict(args.set_env or []) self.args = args self.unknown_args = unknown_args
python
def flush(self, fsync=False): """ Force all buffered modifications to be written to disk. Parameters ---------- fsync : bool (default False) call ``os.fsync()`` on the file handle to force writing to disk. Notes ----- Without ``fsync=True``, flushing may not guarantee that the OS writes to disk. With fsync, the operation will block until the OS claims the file has been written; however, other caching layers may still interfere. """ if self._handle is not None: self._handle.flush() if fsync: try: os.fsync(self._handle.fileno()) except OSError: pass
python
def addImg(self, img, maxShear=0.015, maxRot=100, minMatches=12, borderWidth=3): # borderWidth=100 """ Args: img (path or array): image containing the same object as in the reference image Kwargs: maxShear (float): In order to define a good fit, refect higher shear values between this and the reference image maxRot (float): Same for rotation minMatches (int): Minimum of mating points found in both, this and the reference image """ try: fit, img, H, H_inv, nmatched = self._fitImg(img) except Exception as e: print(e) return # CHECK WHETHER FIT IS GOOD ENOUGH: (translation, rotation, scale, shear) = decompHomography(H) print('Homography ...\n\ttranslation: %s\n\trotation: %s\n\tscale: %s\n\tshear: %s' % (translation, rotation, scale, shear)) if (nmatched > minMatches and abs(shear) < maxShear and abs(rotation) < maxRot): print('==> img added') # HOMOGRAPHY: self.Hs.append(H) # INVERSE HOMOGRSAPHY self.Hinvs.append(H_inv) # IMAGES WARPED TO THE BASE IMAGE self.fits.append(fit) # ADD IMAGE TO THE INITIAL flatField ARRAY: i = img > self.signal_ranges[-1][0] # remove borders (that might have erroneous light): i = minimum_filter(i, borderWidth) self._ff_mma.update(img, i) # create fit img mask: mask = fit < self.signal_ranges[-1][0] mask = maximum_filter(mask, borderWidth) # IGNORE BORDER r = self.remove_border_size if r: mask[:r, :] = 1 mask[-r:, :] = 1 mask[:, -r:] = 1 mask[:, :r] = 1 self._fit_masks.append(mask) # image added return fit return False
java
@Override public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, byte[] value, Delete delete) throws IOException { return checkAndDelete(row, family, qualifier, CompareFilter.CompareOp.EQUAL, value, delete); }
java
public boolean isValid( ChronoElement<Long> element, long value ) { return this.isValid(element, Long.valueOf(value)); }
java
public static <T> List<Class<?>> getPublicConcreteSubTypesOf(final Class<T> type) { return ClassGraphFacade.getPublicConcreteSubTypesOf(type); }
python
def json_or_jsonp(func): """Wrap response in JSON or JSONP style""" @wraps(func) def _(*args, **kwargs): mimetype = 'application/javascript' callback = request.args.get('callback', None) if callback is None: content = func(*args, **kwargs) else: content = "%s(%s)" % (callback, func(*args, **kwargs)) return current_app.response_class(content, mimetype=mimetype) return _
python
def _prepair(self): '''Try to connect to the given dbus services. If successful it will return a callable dbus proxy and those arguments. ''' try: sessionbus = dbus.SessionBus() systembus = dbus.SystemBus() except: return (None, None) for dbus_props in self.DBUS_SHUTDOWN.values(): try: if dbus_props['bus'] == SESSION_BUS: bus = sessionbus else: bus = systembus interface = bus.get_object(dbus_props['service'], dbus_props['objectPath']) proxy = interface.get_dbus_method(dbus_props['method'], dbus_props['interface']) return (proxy, dbus_props['arguments']) except dbus.exceptions.DBusException: continue return (None, None)
java
public static Matcher<JsonElement> areItemsValid(final Validator validator) { return new TypeSafeDiagnosingMatcher<JsonElement>() { @Override protected boolean matchesSafely(JsonElement item, Description mismatchDescription) { //we do not care for the properties if parent item is not JsonArray if (!item.isJsonArray()) return true; for (int i = 0; i < item.asJsonArray().length(); i++) { StringBuilder sb = new StringBuilder(); if (!validator.validate(item.asJsonArray().opt(i), sb)) { mismatchDescription.appendText("item at pos: " + i + ", does not validate by validator " + validator.getTitle()) .appendText("\nDetails: ") .appendText(sb.toString()); return false; } } return true; } @Override public void describeTo(Description description) { description.appendText("are array items valid"); } }; }
python
def register_by_twine(self): """ register via the twine method :return: """ check_call_no_output([ '{}'.format(self.python), 'setup.py', 'bdist_wheel', ]) # at this point there should be only one file in the 'dist' folder filename = self.get_package_filename() check_call_no_output([ 'twine', 'register', filename, ])
python
def decode_bytes(byt, enc='utf-8'): """Given a string or bytes input, return a string. Args: bytes - bytes or string enc - encoding to use for decoding the byte string. """ try: strg = byt.decode(enc) except UnicodeDecodeError as err: strg = "Unable to decode message:\n{}\n{}".format(str(byt), err) except (AttributeError, UnicodeEncodeError): # If byt is already a string, just return it return byt return strg
java
public void setDeploymentInfoCustomizers( Collection<? extends UndertowDeploymentInfoCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.deploymentInfoCustomizers = new ArrayList<>(customizers); }
java
protected void push(String location, FragmentBuilder builder, Node node) { // Check if any in content should be processed for the current node processInContent(location, builder, -1); builder.pushNode(node); }
java
public static String formatBytes (byte[] data, int start, int count) { return formatBytes(data, start, count, true); }
python
def accumulate_impl(func, sequence): # pylint: disable=no-name-in-module """ Implementation for accumulate :param sequence: sequence to accumulate :param func: accumulate function """ if six.PY3: from itertools import accumulate return accumulate(sequence, func) else: return _accumulate(sequence, func)
java
private static String loggerRequest(HttpUriRequest request){ String id = UUID.randomUUID().toString(); if(logger.isInfoEnabled()||logger.isDebugEnabled()){ if(request instanceof HttpEntityEnclosingRequestBase){ HttpEntityEnclosingRequestBase request_base = (HttpEntityEnclosingRequestBase)request; HttpEntity entity = request_base.getEntity(); String content = null; //MULTIPART_FORM_DATA 请求类型判断 if(entity.getContentType().toString().indexOf(ContentType.MULTIPART_FORM_DATA.getMimeType()) == -1){ try { content = EntityUtils.toString(entity); } catch (Exception e) { logger.error("logger content data get error", e); } } logger.info("URI[{}] {} {} ContentLength:{} Content:{}", id, request.getURI().toString(), entity.getContentType(), entity.getContentLength(), content == null?"multipart_form_data":content); }else{ logger.info("URI[{}] {}",id,request.getURI().toString()); } } return id; }
java
public ServiceFuture<RouteTableInner> updateTagsAsync(String resourceGroupName, String routeTableName, Map<String, String> tags, final ServiceCallback<RouteTableInner> serviceCallback) { return ServiceFuture.fromResponse(updateTagsWithServiceResponseAsync(resourceGroupName, routeTableName, tags), serviceCallback); }
java
protected void processClass (File source) { CtClass clazz; InputStream in = null; try { clazz = _pool.makeClass(in = new BufferedInputStream(new FileInputStream(source))); } catch (IOException ioe) { System.err.println("Failed to load " + source + ": " + ioe); return; } finally { StreamUtil.close(in); } try { if (clazz.subtypeOf(_streamable)) { processStreamable(source, clazz); } } catch (NotFoundException nfe) { System.err.println("Error processing class [class=" + clazz.getName() + ", error=" + nfe + "]."); } }
python
def parse_packet(packet): """Parse a beacon advertisement packet.""" frame = parse_ltv_packet(packet) if frame is None: frame = parse_ibeacon_packet(packet) return frame
java
public HttpClientResponseBuilder doReturnJSON(String response, Charset charset) { return doReturn(response, charset).withHeader("Content-type", APPLICATION_JSON.toString()); }
python
def _convert_todo(self, p_todo): """ Converts a Todo instance (Topydo) to an icalendar Todo instance. """ def _get_uid(p_todo): """ Gets a unique ID from a todo item, stored by the ical tag. If the tag is not present, a random value is assigned to it and returned. """ def generate_uid(p_length=4): """ Generates a random string of the given length, used as identifier. """ return ''.join( random.choice(string.ascii_letters + string.digits) for i in range(p_length)) uid = p_todo.tag_value('ical') if not uid: uid = generate_uid() p_todo.set_tag('ical', uid) self.todolist.dirty = True return uid result = self.icalendar.Todo() # this should be called first, it may set the ical: tag and therefore # change the source() output. result['uid'] = _get_uid(p_todo) result['summary'] = self.icalendar.vText(p_todo.text()) result['description'] = self.icalendar.vText(p_todo.source()) result.add('priority', _convert_priority(p_todo.priority())) start = p_todo.start_date() if start: result.add('dtstart', start) due = p_todo.due_date() if due: result.add('due', due) created = p_todo.creation_date() if created: result.add('created', created) completed = p_todo.completion_date() if completed: completed = datetime.combine(completed, time(0, 0)) result.add('completed', completed) return result
java
public V findChachedValueOf(K key) { // check selection for (V value : selection) { if (getKeyOf(value).equals(key)) { return value; } } // check data cache for (V value : getData()) { if (getKeyOf(value).equals(key)) { return value; } } return null; }
python
def export(context, keywords, module, update): """Operate on libraries and exported functions. Query the module name containing the function by default. Windows database must be prepared before using this. """ logging.info(_('Export Mode')) database = context.obj['sense'] none = True if update: exports = OrderedDict() from .executables.pe import PE for filename in keywords: module = split_ext(filename, basename=True)[0] with open(filename, 'rb') as stream: exports.update( {module: PE(stream).get_export_table()}) database.make_export(exports) none = False elif module: for module_name in keywords: funcs = database.query_module_funcs(module_name) if funcs: none = False print(', '.join(map(str, funcs))) else: logging.warning(_('No function for module: %s'), module_name) else: for func_name in keywords: module_name = database.query_func_module(func_name) if module_name: none = False print(repr(module_name)) else: logging.warning(_('No module for function: %s'), func_name) sys.exit(1 if none else 0)
java
public ArrayList returnErrors() { if (_containerErrors != null) return _containerErrors.returnErrors(); ArrayList e = _errors; _errors = null; return e; }
python
def topology_mdtraj(traj): '''Generate topology spec for the MolecularViewer from mdtraj. :param mdtraj.Trajectory traj: the trajectory :return: A chemview-compatible dictionary corresponding to the topology defined in mdtraj. ''' import mdtraj as md top = {} top['atom_types'] = [a.element.symbol for a in traj.topology.atoms] top['atom_names'] = [a.name for a in traj.topology.atoms] top['bonds'] = [(a.index, b.index) for a, b in traj.topology.bonds] top['secondary_structure'] = md.compute_dssp(traj[0])[0] top['residue_types'] = [r.name for r in traj.topology.residues ] top['residue_indices'] = [ [a.index for a in r.atoms] for r in traj.topology.residues ] return top
python
def safe(self,x): """removes nans and infs from outputs.""" x[np.isinf(x)] = 1 x[np.isnan(x)] = 1 return x
python
def _save_archive(self): """Saves the JSON archive of processed pull requests. """ import json from utility import json_serial with open(self.archpath, 'w') as f: json.dump(self.archive, f, default=json_serial)
java
public ContainerDefinition withVolumesFrom(VolumeFrom... volumesFrom) { if (this.volumesFrom == null) { setVolumesFrom(new com.amazonaws.internal.SdkInternalList<VolumeFrom>(volumesFrom.length)); } for (VolumeFrom ele : volumesFrom) { this.volumesFrom.add(ele); } return this; }
python
def face_encodings(face_image, known_face_locations=None, num_jitters=1): """ Given an image, return the 128-dimension face encoding for each face in the image. :param face_image: The image that contains one or more faces :param known_face_locations: Optional - the bounding boxes of each face if you already know them. :param num_jitters: How many times to re-sample the face when calculating encoding. Higher is more accurate, but slower (i.e. 100 is 100x slower) :return: A list of 128-dimensional face encodings (one for each face in the image) """ raw_landmarks = _raw_face_landmarks(face_image, known_face_locations, model="small") return [np.array(face_encoder.compute_face_descriptor(face_image, raw_landmark_set, num_jitters)) for raw_landmark_set in raw_landmarks]
python
def is_feature_enabled(self, feature_key, user_id, attributes=None): """ Returns true if the feature is enabled for the given user. Args: feature_key: The key of the feature for which we are determining if it is enabled or not for the given user. user_id: ID for user. attributes: Dict representing user attributes. Returns: True if the feature is enabled for the user. False otherwise. """ if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('is_feature_enabled')) return False if not validator.is_non_empty_string(feature_key): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('feature_key')) return False if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVALID_INPUT_ERROR.format('user_id')) return False if not self._validate_user_inputs(attributes): return False feature = self.config.get_feature_from_key(feature_key) if not feature: return False feature_enabled = False source_info = {} decision = self.decision_service.get_variation_for_feature(feature, user_id, attributes) is_source_experiment = decision.source == enums.DecisionSources.FEATURE_TEST if decision.variation: if decision.variation.featureEnabled is True: feature_enabled = True # Send event if Decision came from an experiment. if is_source_experiment: source_info = { 'experiment_key': decision.experiment.key, 'variation_key': decision.variation.key } self._send_impression_event(decision.experiment, decision.variation, user_id, attributes) if feature_enabled: self.logger.info('Feature "%s" is enabled for user "%s".' % (feature_key, user_id)) else: self.logger.info('Feature "%s" is not enabled for user "%s".' % (feature_key, user_id)) self.notification_center.send_notifications( enums.NotificationTypes.DECISION, enums.DecisionNotificationTypes.FEATURE, user_id, attributes or {}, { 'feature_key': feature_key, 'feature_enabled': feature_enabled, 'source': decision.source, 'source_info': source_info } ) return feature_enabled
java
public void help() { p(""); p("Command Description"); p("======= ==========="); p("help() Display usage and help messages. "); p("defineClass(className) Define an extension using the Java class"); p(" named with the string argument. "); p(" Uses ScriptableObject.defineClass(). "); p("load(['foo.js', ...]) Load JavaScript source files named by "); p(" string arguments. "); p("loadClass(className) Load a class named by a string argument."); p(" The class must be a script compiled to a"); p(" class file. "); p("print([expr ...]) Evaluate and print expressions. "); p("quit() Quit the shell. "); p("version([number]) Get or set the JavaScript version number."); p(""); }
java
public List<String> getExtensionPoints(String prefix,String suffix){ return extensionPointsCache.get(prefix+"@"+suffix); }
java
public Iterator<PlaceObject> enumeratePlaces () { final Iterator<PlaceManager> itr = _pmgrs.values().iterator(); return new Iterator<PlaceObject>() { public boolean hasNext () { return itr.hasNext(); } public PlaceObject next () { PlaceManager plmgr = itr.next(); return (plmgr == null) ? null : plmgr.getPlaceObject(); } public void remove () { throw new UnsupportedOperationException(); } }; }
java
public static boolean isOverlappingAfter(final Interval timeRange, final Interval timeRangeToCheck) { return ((timeRange.getStart() != null && timeRange.getStart().isBefore(timeRangeToCheck.getStart())) && (timeRange.getEnd() != null && timeRange.getEnd().isBefore(timeRangeToCheck.getEnd()))); }
java
public void marshall(GitConfig gitConfig, ProtocolMarshaller protocolMarshaller) { if (gitConfig == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(gitConfig.getRepositoryUrl(), REPOSITORYURL_BINDING); protocolMarshaller.marshall(gitConfig.getBranch(), BRANCH_BINDING); protocolMarshaller.marshall(gitConfig.getSecretArn(), SECRETARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public void removeEntity(Entity entity){ boolean delayed = updating || familyManager.notifying(); entityManager.removeEntity(entity, delayed); }
java
public static void main(String[] argv) { SimpleSourcedTokenizer tokenizer = DEFAULT_SOURCED_TOKENIZER; int n = 0; for (int i=0; i<argv.length; i++) { System.out.println("argument "+i+": '"+argv[i]+"'"); SourcedToken[] tokens = tokenizer.sourcedTokenize(argv[i],Integer.toString(i)); for (int j=0; j<tokens.length; j++) { System.out.println("token "+(++n)+":" +" id="+tokens[j].getIndex() +" value: '"+tokens[j].getValue() +"' source: '"+tokens[j].getSource()+"'"); } } }
python
def receive_message(self, header, message): """Receive a message""" print("=== Receive ===") print(header) print(message) print("MsgID: {0}".format(header["message-id"])) assert header["message-id"] txn = self._transport.transaction_begin() print(" 1. Txn: {0}".format(str(txn))) if self.crashpoint(): self._transport.transaction_abort(txn) print("--- Abort ---") return self._transport.ack(header["message-id"], self.subid, transaction=txn) print(" 2. Ack") if self.crashpoint(): self._transport.transaction_abort(txn) print("--- Abort ---") return self._transport.send("transient.destination", message, transaction=txn) print(" 3. Send") if self.crashpoint(): self._transport.transaction_abort(txn) print("--- Abort ---") return self._transport.transaction_commit(txn) print(" 4. Commit") print("=== Done ===")
java
private void readWaypoint(final Element waypoint, final List<GraphObjectMap> resultList) { final GraphObjectMap item = readPoint(waypoint); if (item != null) { resultList.add(item); } }
python
def hex_hash160(s, hex_format=False): """ s is in hex or binary format """ if hex_format and is_hex(s): s = unhexlify(s) return hexlify(bin_hash160(s))
python
def get_all_package_releases(self, package_name: str) -> Iterable[Tuple[str, str]]: """ Returns a tuple of release data (version, manifest_ur) for every release of the given package name available on the current registry. """ validate_package_name(package_name) self._validate_set_registry() release_ids = self.registry._get_all_release_ids(package_name) for release_id in release_ids: _, version, manifest_uri = self.registry._get_release_data(release_id) yield (version, manifest_uri)
java
protected boolean completeAppend(boolean succeeded, long lastLogIndex, CompletableFuture<AppendResponse> future) { future.complete(logResponse(AppendResponse.builder() .withStatus(RaftResponse.Status.OK) .withTerm(raft.getTerm()) .withSucceeded(succeeded) .withLastLogIndex(lastLogIndex) .build())); return succeeded; }
python
def is_value_in(constants_group, value): """ Checks whether value can be found in the given constants group, which in turn, should be a Django-like choices tuple. """ for const_value, label in constants_group: if const_value == value: return True return False
python
def widen(self): """Increase the interval size.""" t, h = self.time, self.half_duration h *= self.scaling_coeff_x self.set_interval((t - h, t + h))
python
def random_str_uuid(string_length): """Returns a random string of length string_length""" if not isinstance(string_length, int) or not 1 <= string_length <= 32: msg = "string_length must be type int where 1 <= string_length <= 32" raise ValueError(msg) random = str(uuid.uuid4()).upper().replace('-', '') return random[0:string_length]
python
def _check_not_empty(string): """ Checks that the string is not empty. If it is empty an exception is raised, stopping the validation. This is used for compulsory alphanumeric fields. :param string: the field value """ string = string.strip() if len(string) == 0: message = 'The string should not be empty' raise pp.ParseException(message)
java
private static Weekmodel getDefaultWeekmodel() { return Weekmodel.of(Weekday.SATURDAY, 1, Weekday.SATURDAY, Weekday.SUNDAY); }
python
def all_terms(self): """Iterate over all of the terms. The self.terms property has only root level terms. This iterator iterates over all terms""" for s_name, s in self.sections.items(): # Yield the section header if s.name != 'Root': yield s # Yield all of the rows for terms in the section for rterm in s: yield rterm for d in rterm.descendents: yield d
python
def main(): '''main reoutine''' # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--vmssname', '-n', required=True, action='store', help='VMSS Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--details', '-a', required=False, action='store', help='Print all details') args = arg_parser.parse_args() name = args.vmssname rgname = args.rgname details = args.details # Load Azure app defaults try: with open('azurermconfig.json') as config_file: config_data = json.load(config_file) except FileNotFoundError: print("Error: Expecting azurermconfig.json in current folder") sys.exit() tenant_id = config_data['tenantId'] app_id = config_data['appId'] app_secret = config_data['appSecret'] subscription_id = config_data['subscriptionId'] # authenticate access_token = azurerm.get_access_token(tenant_id, app_id, app_secret) # get public IPs public_ips = azurerm.get_vmss_public_ips(access_token, subscription_id, rgname, name) # print details if details is True: print(json.dumps(public_ips, sort_keys=False, indent=2, separators=(',', ': '))) else: for pip in public_ips['value']: vm_id = re.search('Machines/(.*)/networkInt', pip['id']).group(1) ipaddr = pip['properties']['ipAddress'] print('VM id: ' + vm_id + ', IP: ' + ipaddr)
java
public ItBitOrder[] getItBitOrders(String status, String instrument) throws IOException { ItBitOrder[] orders = itBitAuthenticated.getOrders( signatureCreator, new Date().getTime(), exchange.getNonceFactory(), instrument, "1", "1000", status, walletId); return orders; }
python
def get_cmd_tuple_list(self): """ Return a list of tuples containg the command line arguments """ # pattern to find DAGman macros pat = re.compile(r'\$\((.+)\)') argpat = re.compile(r'\d+') # first parse the options and replace macros with values options = self.job().get_opts() macros = self.get_opts() cmd_list = [] for k in options: val = options[k] m = pat.match(val) if m: key = m.group(1) value = macros[key] cmd_list.append(("--%s" % k, str(value))) else: cmd_list.append(("--%s" % k, str(val))) # second parse the short options and replace macros with values options = self.job().get_short_opts() for k in options: val = options[k] m = pat.match(val) if m: key = m.group(1) value = macros[key] cmd_list.append(("-%s" % k, str(value))) else: cmd_list.append(("-%s" % k, str(val))) # lastly parse the arguments and replace macros with values args = self.job().get_args() macros = self.get_args() for a in args: m = pat.match(a) if m: arg_index = int(argpat.findall(a)[0]) try: cmd_list.append(("%s" % macros[arg_index], "")) except IndexError: cmd_list.append("") else: cmd_list.append(("%s" % a, "")) return cmd_list
java
void setWorkbenchLayout(Layout layout) { validateNotNull(layout, "layout"); if (this.layout == layout) { return; } requestTabIndex = getCurrentRequestTabIndex(); Layout previousLayout = this.layout; this.layout = layout; componentMaximiser.unmaximiseComponent(); removeAll(); List<AbstractPanel> visiblePanels; switch (layout) { case FULL: visiblePanels = getTabbedStatus().getVisiblePanels(); getTabbedStatus().hideAllTabs(); visiblePanels.addAll(getTabbedWork().getVisiblePanels()); getTabbedWork().hideAllTabs(); visiblePanels.addAll(getTabbedSelect().getVisiblePanels()); getTabbedSelect().hideAllTabs(); getTabbedFull().setVisiblePanels(visiblePanels); updateFullLayout(); this.add(getFullLayoutPanel()); break; case EXPAND_SELECT: case EXPAND_STATUS: default: this.add(layout == Layout.EXPAND_STATUS ? createStatusPanelsSplit() : createSelectPanelsSplit()); if (previousLayout == Layout.FULL) { visiblePanels = getTabbedFull().getVisiblePanels(); getTabbedFull().hideAllTabs(); getTabbedStatus().setVisiblePanels(visiblePanels); getTabbedWork().setVisiblePanels(visiblePanels); getTabbedSelect().setVisiblePanels(visiblePanels); setResponsePanelPosition(responsePanelPosition); } break; } this.validate(); this.repaint(); }
java
public MoneyParseContext parse(CharSequence text, int startIndex) { checkNotNull(text, "Text must not be null"); if (startIndex < 0 || startIndex > text.length()) { throw new StringIndexOutOfBoundsException("Invalid start index: " + startIndex); } if (isParser() == false) { throw new UnsupportedOperationException("MoneyFomatter has not been configured to be able to parse"); } MoneyParseContext context = new MoneyParseContext(locale, text, startIndex); printerParser.parse(context); return context; }
java
public static void main(String[] args) throws IOException, NoSuchAlgorithmException, InvalidKeyException, XmlPullParserException { try { /* play.min.io for test and development. */ MinioClient minioClient = new MinioClient("https://play.min.io:9000", "Q3AM3UQ867SPQQA43P2F", "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"); /* Amazon S3: */ // MinioClient minioClient = new MinioClient("https://s3.amazonaws.com", "YOUR-ACCESSKEYID", // "YOUR-SECRETACCESSKEY"); // Get presigned URL string to download 'my-objectname' in 'my-bucketname' and its life time is one day. String url = minioClient.presignedGetObject("my-bucketname", "my-objectname", 60 * 60 * 24); System.out.println(url); } catch (MinioException e) { System.out.println("Error occurred: " + e); } }
java
@Override public String readString() throws JMSException { backupState(); try { return MessageConvertTools.asString(internalReadObject()); } catch (JMSException e) { restoreState(); throw e; } catch (RuntimeException e) { restoreState(); throw e; } }
java
public ApiResponse<DeviceTypesEnvelope> getDeviceTypesByApplicationWithHttpInfo(String appId, Boolean productInfo, Integer count, Integer offset) throws ApiException { com.squareup.okhttp.Call call = getDeviceTypesByApplicationValidateBeforeCall(appId, productInfo, count, offset, null, null); Type localVarReturnType = new TypeToken<DeviceTypesEnvelope>(){}.getType(); return apiClient.execute(call, localVarReturnType); }
java
private Bitmap getBitmapOfView(final View view){ view.destroyDrawingCache(); view.buildDrawingCache(false); Bitmap orig = view.getDrawingCache(); Bitmap.Config config = null; if(orig == null) { return null; } config = orig.getConfig(); if(config == null) { config = Bitmap.Config.ARGB_8888; } Bitmap b = orig.copy(config, false); orig.recycle(); view.destroyDrawingCache(); return b; }
python
def hatchery(): """ Main entry point for the hatchery program """ args = docopt.docopt(__doc__) task_list = args['<task>'] if not task_list or 'help' in task_list or args['--help']: print(__doc__.format(version=_version.__version__, config_files=config.CONFIG_LOCATIONS)) return 0 level_str = args['--log-level'] try: level_const = getattr(logging, level_str.upper()) logging.basicConfig(level=level_const) if level_const == logging.DEBUG: workdir.options.debug = True except LookupError: logging.basicConfig() logger.error('received invalid log level: ' + level_str) return 1 for task in task_list: if task not in ORDERED_TASKS: logger.info('starting task: check') logger.error('received invalid task: ' + task) return 1 for task in CHECK_TASKS: if task in task_list: task_check(args) break if 'package' in task_list and not args['--release-version']: logger.error('--release-version is required for the package task') return 1 config_dict = _get_config_or_die( calling_task='hatchery', required_params=['auto_push_tag'] ) if config_dict['auto_push_tag'] and 'upload' in task_list: logger.info('adding task: tag (auto_push_tag==True)') task_list.append('tag') # all commands will raise a SystemExit if they fail # check will have already been run for task in ORDERED_TASKS: if task in task_list and task != 'check': logger.info('starting task: ' + task) globals()['task_' + task](args) logger.info("all's well that ends well...hatchery out") return 0
java
@Override boolean hasContent(LessExtendMap lessExtends ) { if( output.length() == 0 ) { return false; } if( !isConcatExtents ) { isConcatExtents = true; selectors = lessExtends.concatenateExtends( selectors, isReference ); } return selectors.length > 0; }
python
def insertPreviousCommand(self): """ Inserts the previous command from history into the line. """ self._currentHistoryIndex -= 1 if 0 <= self._currentHistoryIndex < len(self._history): cmd = self._history[self._currentHistoryIndex] else: cmd = '>>> ' self._currentHistoryIndex = len(self._history) self.replaceCommand(cmd)
python
def encode_dense(input_sentences, nbest_size=0, alpha=1.0, model_file=None, model_proto=None, reverse=False, add_bos=False, add_eos=False, out_type=tf.int32, name=None): """Encodes sentences into pieces in dense tensor format. Args: input_sentences: A 1D string tensor of arbitrary size holding the raw text of input sentences. nbest_size: A scalar or 1D tensor for sampling. nbest_size = {0,1}: No sampling is performed. nbest_size > 1: samples from the nbest_size results. nbest_size < 0: assuming that nbest_size is infinite and samples from the all hypothesis (lattice) using forward-filtering-and-backward-sampling algorithm. alpha: A scalar or 1D tensor for a smoothing parameter. Inverse temperature for probability rescaling. model_file: The sentencepiece model file path. model_proto: The sentencepiece model serialized proto. Either `model_file` or `model_proto` must be set. reverse: Reverses the tokenized sequence (Default = false) add_bos: Add <s> to the result (Default = false) add_eos: Add </s> to the result (Default = false) <s>/</s> is added after reversing (if enabled). out_type: output type. tf.int32 or tf.string (Default = tf.int32) Setting tf.int32 directly encodes the string into an id sequence. name: The name argument that is passed to the op function. Returns: pieces: A dense 2D tensor representing the tokenized sentences. sequence_length: A 1D tensor representing the length of pieces. """ return _gen_sentencepiece_processor_op.sentencepiece_encode_dense( input_sentences, nbest_size=nbest_size, alpha=alpha, model_file=model_file, model_proto=model_proto, reverse=reverse, add_bos=add_bos, add_eos=add_eos, out_type=out_type, name=name)
python
def _normalize(self, string): ''' Returns a sanitized string. ''' string = string.replace(u'\xb7', '') string = string.replace(u'\u0331', '') string = string.replace(u'\u0323', '') string = string.strip(' \n\rI.') return string
python
def route_filter_rules(self): """Instance depends on the API version: * 2016-12-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2016_12_01.operations.RouteFilterRulesOperations>` * 2017-03-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2017_03_01.operations.RouteFilterRulesOperations>` * 2017-06-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2017_06_01.operations.RouteFilterRulesOperations>` * 2017-08-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2017_08_01.operations.RouteFilterRulesOperations>` * 2017-09-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2017_09_01.operations.RouteFilterRulesOperations>` * 2017-10-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2017_10_01.operations.RouteFilterRulesOperations>` * 2017-11-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2017_11_01.operations.RouteFilterRulesOperations>` * 2018-01-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2018_01_01.operations.RouteFilterRulesOperations>` * 2018-02-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2018_02_01.operations.RouteFilterRulesOperations>` * 2018-04-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2018_04_01.operations.RouteFilterRulesOperations>` * 2018-06-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2018_06_01.operations.RouteFilterRulesOperations>` * 2018-07-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2018_07_01.operations.RouteFilterRulesOperations>` * 2018-08-01: :class:`RouteFilterRulesOperations<azure.mgmt.network.v2018_08_01.operations.RouteFilterRulesOperations>` """ api_version = self._get_api_version('route_filter_rules') if api_version == '2016-12-01': from .v2016_12_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2017-03-01': from .v2017_03_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2017-06-01': from .v2017_06_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2017-08-01': from .v2017_08_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2017-09-01': from .v2017_09_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2017-10-01': from .v2017_10_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2017-11-01': from .v2017_11_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2018-01-01': from .v2018_01_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2018-02-01': from .v2018_02_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2018-04-01': from .v2018_04_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2018-06-01': from .v2018_06_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2018-07-01': from .v2018_07_01.operations import RouteFilterRulesOperations as OperationClass elif api_version == '2018-08-01': from .v2018_08_01.operations import RouteFilterRulesOperations as OperationClass else: raise NotImplementedError("APIVersion {} is not available".format(api_version)) return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
python
def start_auth(self, context, internal_req): """ See super class method satosa.backends.base.BackendModule#start_auth :type context: satosa.context.Context :type internal_req: satosa.internal.InternalData :rtype: satosa.response.Response """ target_entity_id = context.get_decoration(Context.KEY_TARGET_ENTITYID) if target_entity_id: entity_id = target_entity_id return self.authn_request(context, entity_id) # if there is only one IdP in the metadata, bypass the discovery service idps = self.sp.metadata.identity_providers() if len(idps) == 1 and "mdq" not in self.config["sp_config"]["metadata"]: entity_id = idps[0] return self.authn_request(context, entity_id) return self.disco_query()
python
def allocate(self): """Initializes libvirt resources.""" disk_path = self.provider_image self._hypervisor = libvirt.open( self.configuration.get('hypervisor', 'vbox:///session')) self._domain = domain_create(self._hypervisor, self.identifier, self.configuration['domain'], disk_path)
java
private void validateWorkflow(WorkflowDef workflowDef, Map<String, Object> workflowInput, String externalStoragePath) { try { //Check if the input to the workflow is not null if (workflowInput == null && StringUtils.isBlank(externalStoragePath)) { LOGGER.error("The input for the workflow '{}' cannot be NULL", workflowDef.getName()); throw new ApplicationException(INVALID_INPUT, "NULL input passed when starting workflow"); } } catch (Exception e) { Monitors.recordWorkflowStartError(workflowDef.getName(), WorkflowContext.get().getClientApp()); throw e; } }
java
public Module build() { SagaLibModule module = new SagaLibModule(); module.setStateStorage(stateStorage); module.setTimeoutManager(timeoutMgr); module.setScanner(scanner); module.setProviderFactory(providerFactory); module.setExecutionOrder(preferredOrder); module.setExecutionContext(executionContext); module.setModuleTypes(moduleTypes); module.setExecutor(executor); module.setInterceptorTypes(interceptorTypes); module.setStrategyFinder(strategyFinder); module.setInvoker(invoker); module.setCoordinatorFactory(coordinatorFactory); return module; }
python
def details_dict(obj, existing, ignore_missing, opt): """Output the changes, if any, for a dict""" existing = dict_unicodeize(existing) obj = dict_unicodeize(obj) for ex_k, ex_v in iteritems(existing): new_value = normalize_val(obj.get(ex_k)) og_value = normalize_val(ex_v) if ex_k in obj and og_value != new_value: print(maybe_colored("-- %s: %s" % (ex_k, og_value), 'red', opt)) print(maybe_colored("++ %s: %s" % (ex_k, new_value), 'green', opt)) if (not ignore_missing) and (ex_k not in obj): print(maybe_colored("-- %s: %s" % (ex_k, og_value), 'red', opt)) for ob_k, ob_v in iteritems(obj): val = normalize_val(ob_v) if ob_k not in existing: print(maybe_colored("++ %s: %s" % (ob_k, val), 'green', opt)) return
java
public static File touch(File file) throws IORuntimeException { if (null == file) { return null; } if (false == file.exists()) { mkParentDirs(file); try { file.createNewFile(); } catch (Exception e) { throw new IORuntimeException(e); } } return file; }
java
private Set<String> initExcludedPathList(Set<String> paths) { Set<String> toExclude = new HashSet<>(); if (null == paths) return toExclude; for (String path : paths) { path = PathNormalizer.asPath(path); toExclude.add(path); } return toExclude; }
java
public static boolean isBenchmarkable(final Method meth) { boolean returnVal = true; // Check if bench-anno is given. For testing purposes against // before/after annos final Bench benchAnno = meth.getAnnotation(Bench.class); // if method is annotated with SkipBench, the method is never // benchmarkable. final SkipBench skipBenchAnno = meth.getAnnotation(SkipBench.class); if (skipBenchAnno != null) { returnVal = false; } // Check if method is defined as beforeClass, beforeFirstRun, // beforeEachRun, afterEachRun, afterLastRun, afterClass. A method can // either be a before/after class or afterwards be benchmarkable through // the BenchClass annotation. final BeforeBenchClass beforeClass = meth .getAnnotation(BeforeBenchClass.class); if (beforeClass != null && benchAnno == null) { returnVal = false; } final BeforeFirstRun beforeFirstRun = meth .getAnnotation(BeforeFirstRun.class); if (beforeFirstRun != null && benchAnno == null) { returnVal = false; } final BeforeEachRun beforeEachRun = meth .getAnnotation(BeforeEachRun.class); if (beforeEachRun != null && benchAnno == null) { returnVal = false; } final AfterEachRun afterEachRun = meth .getAnnotation(AfterEachRun.class); if (afterEachRun != null && benchAnno == null) { returnVal = false; } final AfterLastRun afterLastRun = meth .getAnnotation(AfterLastRun.class); if (afterLastRun != null && benchAnno == null) { returnVal = false; } final AfterBenchClass afterClass = meth .getAnnotation(AfterBenchClass.class); if (afterClass != null && benchAnno == null) { returnVal = false; } // if method is not annotated with Bench and class is not annotated with // BenchClass, the method is never benchmarkable. final BenchClass classBenchAnno = meth.getDeclaringClass() .getAnnotation(BenchClass.class); if (benchAnno == null && classBenchAnno == null) { returnVal = false; } // check if method is executable for perfidix purposes. if (!isReflectedExecutable(meth, Bench.class)) { returnVal = false; } return returnVal; }
java
public URL copyWith(String serviceUUID, String characteristicUUID, String fieldName) { return new URL(this.protocol, this.adapterAddress, this.deviceAddress, this.deviceAttributes, serviceUUID, characteristicUUID, fieldName); }
java
public Observable<ServiceResponse<Integer>> getUntaggedImageCountWithServiceResponseAsync(UUID projectId, UUID iterationId) { if (projectId == null) { throw new IllegalArgumentException("Parameter projectId is required and cannot be null."); } if (this.client.apiKey() == null) { throw new IllegalArgumentException("Parameter this.client.apiKey() is required and cannot be null."); } return service.getUntaggedImageCount(projectId, iterationId, this.client.apiKey(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Integer>>>() { @Override public Observable<ServiceResponse<Integer>> call(Response<ResponseBody> response) { try { ServiceResponse<Integer> clientResponse = getUntaggedImageCountDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); }
python
def run_sparql_on(q, ontology): """ Run a SPARQL query (q) on a given Ontology (Enum EOntology) """ logging.info("Connecting to " + ontology.value + " SPARQL endpoint...") sparql = SPARQLWrapper(ontology.value) logging.info("Made wrapper: {}".format(sparql)) sparql.setQuery(q) sparql.setReturnFormat(JSON) logging.info("Query: {}".format(q)) results = sparql.query().convert() bindings = results['results']['bindings'] logging.info("Rows: {}".format(len(bindings))) for r in bindings: curiefy(r) return bindings
python
def __ensure_suffix_stem(t, suffix): """ Ensure that the target t has the given suffix, and return the file's stem. """ tpath = str(t) if not tpath.endswith(suffix): stem = tpath tpath += suffix return tpath, stem else: stem, ext = os.path.splitext(tpath) return t, stem
java
public void persistAttributes() { String attrName = getScopedName( STORED_ATTRS_ATTR ); getSession().setAttribute( attrName, _scopedContainer.getSerializableAttrs() ); }
java
public void marshall(UpdateRoleAliasRequest updateRoleAliasRequest, ProtocolMarshaller protocolMarshaller) { if (updateRoleAliasRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateRoleAliasRequest.getRoleAlias(), ROLEALIAS_BINDING); protocolMarshaller.marshall(updateRoleAliasRequest.getRoleArn(), ROLEARN_BINDING); protocolMarshaller.marshall(updateRoleAliasRequest.getCredentialDurationSeconds(), CREDENTIALDURATIONSECONDS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
protected void renderColumnBody( FacesContext facesContext, ResponseWriter writer, UIData uiData, UIComponent component, Styles styles, int columnStyleIndex) throws IOException { // Get the rowHeader attribute from the attribute map, because of MYFACES-1790 Object rowHeaderAttr = component.getAttributes().get(JSFAttr.ROW_HEADER_ATTR); boolean rowHeader = rowHeaderAttr != null && ((Boolean) rowHeaderAttr); if(rowHeader) { writer.startElement(HTML.TH_ELEM, null); // uiData); writer.writeAttribute(HTML.SCOPE_ATTR, HTML.SCOPE_ROW_VALUE, null); } else { writer.startElement(HTML.TD_ELEM, null); // uiData); } if (styles.hasColumnStyle()) { writer.writeAttribute(HTML.CLASS_ATTR, styles.getColumnStyle(columnStyleIndex), null); } //RendererUtils.renderChild(facesContext, component); component.encodeAll(facesContext); if(rowHeader) { writer.endElement(HTML.TH_ELEM); } else { writer.endElement(HTML.TD_ELEM); } }
python
def getGeneralInfo(rh): """ Obtain general information about the host. Input: Request Handle with the following properties: function - 'GETHOST' subfunction - 'GENERAL' Output: Request Handle updated with the results. Return code - 0: ok Return code - 4: problem getting some info """ rh.printSysLog("Enter getHost.getGeneralInfo") # Get host using VMCP rh.results['overallRC'] = 0 cmd = ["sudo", "/sbin/vmcp", "query userid"] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: host = subprocess.check_output( cmd, close_fds=True, stderr=subprocess.STDOUT) host = bytes.decode(host) userid = host.split()[0] host = host.split()[2] except subprocess.CalledProcessError as e: msg = msgs.msg['0405'][1] % (modId, "Hypervisor Name", strCmd, e.output) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) host = "no info" except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) rh.updateResults(msgs.msg['0421'][0]) host = "no info" # Get a bunch of info from /proc/sysinfo lparCpuTotal = "no info" lparCpuUsed = "no info" cecModel = "no info" cecVendor = "no info" hvInfo = "no info" with open('/proc/sysinfo', 'r') as myFile: for num, line in enumerate(myFile, 1): # Get total physical CPU in this LPAR if "LPAR CPUs Total" in line: lparCpuTotal = line.split()[3] # Get used physical CPU in this LPAR if "LPAR CPUs Configured" in line: lparCpuUsed = line.split()[3] # Get CEC model if "Type:" in line: cecModel = line.split()[1] # Get vendor of CEC if "Manufacturer:" in line: cecVendor = line.split()[1] # Get hypervisor type and version if "VM00 Control Program" in line: hvInfo = line.split()[3] + " " + line.split()[4] if lparCpuTotal == "no info": msg = msgs.msg['0405'][1] % (modId, "LPAR CPUs Total", "cat /proc/sysinfo", "not found") rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) if lparCpuUsed == "no info": msg = msgs.msg['0405'][1] % (modId, "LPAR CPUs Configured", "cat /proc/sysinfo", "not found") rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) if cecModel == "no info": msg = msgs.msg['0405'][1] % (modId, "Type:", "cat /proc/sysinfo", "not found") rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) if cecVendor == "no info": msg = msgs.msg['0405'][1] % (modId, "Manufacturer:", "cat /proc/sysinfo", "not found") rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) if hvInfo == "no info": msg = msgs.msg['0405'][1] % (modId, "VM00 Control Program", "cat /proc/sysinfo", "not found") rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) # Get processor architecture arch = str(os.uname()[4]) # Get LPAR memory total & offline parm = ["-T", "dummy", "-k", "STORAGE="] lparMemTotal = "no info" lparMemStandby = "no info" results = invokeSMCLI(rh, "System_Information_Query", parm) if results['overallRC'] == 0: for line in results['response'].splitlines(): if "STORAGE=" in line: lparMemOnline = line.split()[0] lparMemStandby = line.split()[4] lparMemTotal = lparMemOnline.split("=")[2] lparMemStandby = lparMemStandby.split("=")[1] else: # SMAPI API failed, so we put out messages # 300 and 405 for consistency rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI msg = msgs.msg['0405'][1] % (modId, "LPAR memory", "(see message 300)", results['response']) rh.printLn("ES", msg) # Get LPAR memory in use parm = ["-T", "dummy", "-k", "detailed_cpu=show=no"] lparMemUsed = "no info" results = invokeSMCLI(rh, "System_Performance_Information_Query", parm) if results['overallRC'] == 0: for line in results['response'].splitlines(): if "MEMORY_IN_USE=" in line: lparMemUsed = line.split("=")[1] lparMemUsed = generalUtils.getSizeFromPage(rh, lparMemUsed) else: # SMAPI API failed, so we put out messages # 300 and 405 for consistency rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI msg = msgs.msg['0405'][1] % (modId, "LPAR memory in use", "(see message 300)", results['response']) rh.printLn("ES", msg) # Get IPL Time ipl = "" cmd = ["sudo", "/sbin/vmcp", "query cplevel"] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: ipl = subprocess.check_output( cmd, close_fds=True, stderr=subprocess.STDOUT) ipl = bytes.decode(ipl).split("\n")[2] except subprocess.CalledProcessError as e: msg = msgs.msg['0405'][1] % (modId, "IPL Time", strCmd, e.output) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0405'][0]) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) rh.updateResults(msgs.msg['0421'][0]) # Create output string outstr = "ZCC USERID: " + userid outstr += "\nz/VM Host: " + host outstr += "\nArchitecture: " + arch outstr += "\nCEC Vendor: " + cecVendor outstr += "\nCEC Model: " + cecModel outstr += "\nHypervisor OS: " + hvInfo outstr += "\nHypervisor Name: " + host outstr += "\nLPAR CPU Total: " + lparCpuTotal outstr += "\nLPAR CPU Used: " + lparCpuUsed outstr += "\nLPAR Memory Total: " + lparMemTotal outstr += "\nLPAR Memory Offline: " + lparMemStandby outstr += "\nLPAR Memory Used: " + lparMemUsed outstr += "\nIPL Time: " + ipl rh.printLn("N", outstr) rh.printSysLog("Exit getHost.getGeneralInfo, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC']
java
public static <P> Setting of( String description, ListProperty<P> items, ObjectProperty<P> selection) { return new Setting<>( description, Field.ofSingleSelectionType(items, selection) .label(description) .render(new SimpleComboBoxControl<>()), selection); }
python
def community_post_subscription_show(self, post_id, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/subscriptions#show-post-subscription" api_path = "/api/v2/community/posts/{post_id}/subscriptions/{id}.json" api_path = api_path.format(post_id=post_id, id=id) return self.call(api_path, **kwargs)
java
Table SYSTEM_SESSIONS() { Table t = sysTables[SYSTEM_SESSIONS]; if (t == null) { t = createBlankTable(sysTableHsqlNames[SYSTEM_SESSIONS]); addColumn(t, "SESSION_ID", CARDINAL_NUMBER); addColumn(t, "CONNECTED", TIME_STAMP); addColumn(t, "USER_NAME", SQL_IDENTIFIER); addColumn(t, "IS_ADMIN", Type.SQL_BOOLEAN); addColumn(t, "AUTOCOMMIT", Type.SQL_BOOLEAN); addColumn(t, "READONLY", Type.SQL_BOOLEAN); addColumn(t, "MAXROWS", CARDINAL_NUMBER); // Note: some sessions may have a NULL LAST_IDENTITY value addColumn(t, "LAST_IDENTITY", CARDINAL_NUMBER); addColumn(t, "TRANSACTION_SIZE", CARDINAL_NUMBER); addColumn(t, "SCHEMA", SQL_IDENTIFIER); // order: SESSION_ID // true primary key HsqlName name = HsqlNameManager.newInfoSchemaObjectName( sysTableHsqlNames[SYSTEM_SESSIONS].name, false, SchemaObject.INDEX); t.createPrimaryKey(name, new int[]{ 0 }, true); return t; } // column number mappings final int isid = 0; final int ict = 1; final int iuname = 2; final int iis_admin = 3; final int iautocmt = 4; final int ireadonly = 5; final int imaxrows = 6; final int ilast_id = 7; final int it_size = 8; final int it_schema = 9; // PersistentStore store = database.persistentStoreCollection.getStore(t); // intermediate holders Session[] sessions; Session s; Object[] row; // Initialisation sessions = ns.listVisibleSessions(session); // Do it. for (int i = 0; i < sessions.length; i++) { s = sessions[i]; row = t.getEmptyRowData(); row[isid] = ValuePool.getLong(s.getId()); row[ict] = new TimestampData(s.getConnectTime() / 1000); row[iuname] = s.getUsername(); row[iis_admin] = ValuePool.getBoolean(s.isAdmin()); row[iautocmt] = ValuePool.getBoolean(s.isAutoCommit()); row[ireadonly] = ValuePool.getBoolean(s.isReadOnlyDefault()); row[imaxrows] = ValuePool.getInt(s.getSQLMaxRows()); row[ilast_id] = ValuePool.getLong(((Number) s.getLastIdentity()).longValue()); row[it_size] = ValuePool.getInt(s.getTransactionSize()); row[it_schema] = s.getCurrentSchemaHsqlName().name; t.insertSys(store, row); } return t; }
python
def libs(args): """ %prog libs libfile Get list of lib_ids to be run by pull(). The SQL commands: select library.lib_id, library.name from library join bac on library.bac_id=bac.id where bac.lib_name="Medicago"; select seq_name from sequence where seq_name like 'MBE%' and trash is null; """ p = OptionParser(libs.__doc__) p.set_db_opts(dbname="track", credentials=None) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) libfile, = args sqlcmd = "select library.lib_id, library.name, bac.gb# from library join bac on " + \ "library.bac_id=bac.id where bac.lib_name='Medicago'" cur = connect(opts.dbname) results = fetchall(cur, sqlcmd) fw = open(libfile, "w") for lib_id, name, gb in results: name = name.translate(None, "\n") if not gb: gb = "None" print("|".join((lib_id, name, gb)), file=fw) fw.close()