language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
private ThreadContextDescriptor captureThreadContext(Executor executor) { WSManagedExecutorService managedExecutor = defaultExecutor instanceof WSManagedExecutorService // ? (WSManagedExecutorService) defaultExecutor // : executor != defaultExecutor && executor instanceof WSManagedExecutorService // ? (WSManagedExecutorService) executor // : null; if (managedExecutor == null) return null; @SuppressWarnings("unchecked") ThreadContextDescriptor contextDescriptor = managedExecutor.getContextService().captureThreadContext(XPROPS_SUSPEND_TRAN); return contextDescriptor; }
python
def put_stream(self, rel_path, metadata=None, cb=None): """return a file object to write into the cache. The caller is responsibile for closing the stream. Bad things happen if you dont close the stream """ class flo: def __init__(self, this, sink, upstream, repo_path): self.this = this self.sink = sink self.upstream = upstream self.repo_path = repo_path @property def repo_path(self): return self.repo_path def write(self, d): self.sink.write(d) if self.upstream: self.upstream.write(d) def writelines(self, lines): raise NotImplemented() def close(self): self.sink.close() size = os.path.getsize(self.repo_path) self.this.add_record(rel_path, size) self.this._free_up_space(size, this_rel_path=rel_path) if self.upstream: self.upstream.close() def __enter__(self): # Can be used as a context! return self def __exit__(self, type_, value, traceback): if type_: return False if not isinstance(rel_path, basestring): rel_path = rel_path.cache_key repo_path = os.path.join(self.cache_dir, rel_path.strip('/')) if not os.path.isdir(os.path.dirname(repo_path)): os.makedirs(os.path.dirname(repo_path)) self.put_metadata(rel_path, metadata=metadata) sink = open(repo_path, 'w+') upstream = self.upstream.put_stream( rel_path, metadata=metadata) if self.upstream else None return flo(self, sink, upstream, repo_path)
java
public static CurrencyPair fromLunoPair(String lunoPair) { return new CurrencyPair( fromLunoCurrency(lunoPair.substring(0, 3)), fromLunoCurrency(lunoPair.substring(3))); }
java
@Override public void eUnset(int featureID) { switch (featureID) { case XtextPackage.PARAMETER__NAME: setName(NAME_EDEFAULT); return; } super.eUnset(featureID); }
python
def libvlc_video_set_marquee_int(p_mi, option, i_val): '''Enable, disable or set an integer marquee option Setting libvlc_marquee_Enable has the side effect of enabling (arg !0) or disabling (arg 0) the marq filter. @param p_mi: libvlc media player. @param option: marq option to set See libvlc_video_marquee_int_option_t. @param i_val: marq option value. ''' f = _Cfunctions.get('libvlc_video_set_marquee_int', None) or \ _Cfunction('libvlc_video_set_marquee_int', ((1,), (1,), (1,),), None, None, MediaPlayer, ctypes.c_uint, ctypes.c_int) return f(p_mi, option, i_val)
python
def sql_get_oids(self, where=None): ''' Query source database for a distinct list of oids. ''' table = self.lconfig.get('table') db = self.lconfig.get('db_schema_name') or self.lconfig.get('db') _oid = self.lconfig.get('_oid') if is_array(_oid): _oid = _oid[0] # get the db column, not the field alias sql = 'SELECT DISTINCT %s.%s FROM %s.%s' % (table, _oid, db, table) if where: where = [where] if isinstance(where, basestring) else list(where) sql += ' WHERE %s' % ' OR '.join(where) result = sorted([r[_oid] for r in self._load_sql(sql)]) return result
java
void sendDeltaWorkFailed() { failed.incrementAndGet(); if (trace) log.tracef("sendDeltaWorkFailed: %s", workManagers); if (own != null && transport != null && transport.isInitialized()) { for (Address address : workManagers) { if (!own.equals(address)) transport.deltaWorkFailed(address); } } }
java
protected boolean isNotSameAsOwner(MultistepExprHolder head, ElemTemplateElement ete) { MultistepExprHolder next = head; while(null != next) { ElemTemplateElement elemOwner = getElemFromExpression(next.m_exprOwner.getExpression()); if(elemOwner == ete) return false; next = next.m_next; } return true; }
python
def delete_assets(self, service_id, url=None, all=False): """Delete CDN assets Arguments: service_id: The ID of the service to delete from. url: The URL at which to delete assets all: When True, delete all assets associated with the service_id. You cannot specifiy both url and all. """ self._services_manager.delete_assets(service_id, url, all)
python
def polish(commit_indexes=None, urls=None): ''' Apply certain behaviors to commits or URLs that need polishing before they are ready for screenshots For example, if you have 10 commits in a row where static file links were broken, you could re-write the html in memory as it is interpreted. Keyword arguments: commit_indexes -- A list of indexes to apply the wrapped function to url -- A list of URLs to apply the wrapped function to ''' def decorator(f): if commit_indexes: f.polish_commit_indexes = commit_indexes if urls: f.polish_urls = urls @wraps(f) def wrappee(*args, **kwargs): return f(*args, **kwargs) return wrappee return decorator
python
def _set_env(self, env): """ Set environment for each callback in callbackList """ for callback in self.callbacks: if callable(getattr(callback, '_set_env', None)): callback._set_env(env)
java
protected void setTargetClass(Class targetClass) { this.targetClass = targetClass; this.readAccessors.clear(); this.writeAccessors.clear(); introspectMethods(targetClass, new HashSet()); if (isFieldAccessEnabled()) { introspectFields(targetClass, new HashSet()); } }
python
def construct_format(f, type_map=CONSTRUCT_CODE): """ Formats for Construct. """ formatted = "" if type_map.get(f.type_id, None): return "'{identifier}' / {type_id}".format(type_id=type_map.get(f.type_id), identifier=f.identifier) elif f.type_id == 'string' and f.options.get('size', None): return "'{id}'/ construct.Bytes({size})".format(id=f.identifier, size=f.options['size'].value) elif f.type_id == 'string': return "'{id}' / construct.GreedyBytes".format(id=f.identifier) elif f.type_id == 'array' and f.options.get('size', None): fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill s = f.options.get('size', None).value return "'{id}' / construct.Array({size}, {type})".format(id=f.identifier, size=s, type=type_map.get(f_.type_id, 'construct.Byte')) elif f.type_id == 'array': fill = f.options['fill'].value f_ = copy.copy(f) f_.type_id = fill return "construct.GreedyRange(%s)" % construct_format(f_) else: return "'%s' / construct.Struct(%s._parser)" % (f.identifier, f.type_id) return formatted
java
private boolean matchString(final ByteBuffer bbuf) throws IOException { if (this.isBetween) { final String buffer = new String(bbuf.array()); if (buffer.contains(getContent())) { return true; } return false; } final int read = getContent().length(); for (int j = 0; j < read; j++) { if ((bbuf.get(j) & 0xFF) != getContent().charAt(j)) { return false; } } return true; }
python
def _load_info(self): """Loads metadata about this table.""" if self._info is None: try: self._info = self._api.tables_get(self._name_parts) except Exception as e: raise e
java
public double[] get(double[] target) { if ((target == null) || (target.length != 6)) { target = new double[6]; } target[0] = m00; target[1] = m01; target[2] = m02; target[3] = m10; target[4] = m11; target[5] = m12; return target; }
python
def _do_taxons(self, taxon_str): """Taxon""" taxons = self._get_list(taxon_str) taxons_str = [v.split(':')[1] for v in taxons] # strip "taxon:" taxons_int = [int(s) for s in taxons_str if s] return taxons_int
python
def set_user_permission(rid, uid, action='full'): """ Sets users permission on a given resource. The resource will be created if it doesn't exist. Actions are: read, write, update, delete, full. :param uid: user id :type uid: str :param rid: resource ID :type rid: str :param action: read, write, update, delete or full :type action: str """ rid = rid.replace('/', '%252F') # Create ACL if it does not yet exist. ensure_resource(rid) # Set the permission triplet. try: acl_url = urljoin(_acl_url(), 'acls/{}/users/{}/{}'.format(rid, uid, action)) r = http.put(acl_url) assert r.status_code == 204 except DCOSHTTPException as e: if e.response.status_code != 409: raise
python
def check_valid_solution(solution, graph): """Check that the solution is valid: every path is visited exactly once.""" expected = Counter( i for (i, _) in graph.iter_starts_with_index() if i < graph.get_disjoint(i) ) actual = Counter( min(i, graph.get_disjoint(i)) for i in solution ) difference = Counter(expected) difference.subtract(actual) difference = {k: v for k, v in difference.items() if v != 0} if difference: print('Solution is not valid!' 'Difference in node counts (expected - actual): {}'.format(difference)) return False return True
python
def zip(self, destination_path, files): """ Takes array of files and downloads a compressed ZIP archive to provided path *returns* [requests.response] ```python from filestack import Client client = Client("<API_KEY>") client.zip('/path/to/file/destination', ['files']) ``` """ zip_url = "{}/{}/zip/[{}]".format(CDN_URL, self.apikey, ','.join(files)) with open(destination_path, 'wb') as new_file: response = utils.make_call(zip_url, 'get') if response.ok: for chunk in response.iter_content(1024): if not chunk: break new_file.write(chunk) return response return response.text
python
def setup(self): """Setup.""" self.normalize = self.config['normalize'].upper() self.convert_encoding = self.config['convert_encoding'].lower() self.errors = self.config['errors'].lower() if self.convert_encoding: self.convert_encoding = codecs.lookup( filters.PYTHON_ENCODING_NAMES.get(self.default_encoding, self.default_encoding).lower() ).name # Don't generate content with BOMs if ( self.convert_encoding.startswith(('utf-32', 'utf-16')) and not self.convert_encoding.endswith(('le', 'be')) ): self.convert_encoding += '-le' if self.convert_encoding == 'utf-8-sig': self.convert_encoding = 'utf-8'
python
def input(self, prompt, default=None, show_default=True): """Provide a command prompt.""" return click.prompt(prompt, default=default, show_default=show_default)
java
public long getEstimateSplitSize(String[] blocks) { String parts[] = null, lastParts[] = null; long totalSize = 0; for (String block : blocks) { lastParts = parts; parts = block.split("\t"); if ((lastParts != null) && (parts.length >= 3) && (lastParts.length >= 3)) { // If same shard, simply subtract long newOffset = Long.parseLong(parts[2]); if (parts[1].equals(lastParts[1])) { long lastOffset = Long.parseLong(lastParts[2]); totalSize += (newOffset - lastOffset); } else { totalSize += newOffset; //TODO: Compute size of all in between shards //computeBlockSizeDiff(); } } } return totalSize; }
java
protected final void beanUpdate(Object bean, Object identifier, Object value) { LOGGER.trace("Update bean \"" + bean + "\" property \"" + identifier + "\""); String propertyName = identifier.toString(); Class beanType = bean.getClass(); Class propType = PROPERTY_CACHE.getPropertyType(beanType, propertyName); // Get the type of the JavaBean property given reflected information from the JavaBean's type if(propType != null) { try { // The type of the JavaBean property is a List. To update it, get the List and // append the value to the end of the List. if(List.class.isAssignableFrom(propType)) { Method listGetter = PROPERTY_CACHE.getPropertyGetter(beanType, propertyName); if(listGetter != null) { List list = (List)listGetter.invoke(bean, (Object[])null); applyValuesToList(value, list); return; } } // The JavaBean is an Object, so set the Bean's property with the given value else { Method setter = PROPERTY_CACHE.getPropertySetter(beanType, propertyName); if(setter != null) { LOGGER.trace("Set property via setter method: [" + setter + "]"); Class targetType = setter.getParameterTypes()[0]; Object converted = ParseUtils.convertType(value, targetType); setter.invoke(bean, new Object[]{converted}); return; } } } catch(Exception e) { String msg = "Could not update proprety named \"" + propertyName + "\" on bean of type \"" + beanType + "\". Cause: " + e; LOGGER.error(msg, e); throw new RuntimeException(msg, e); } } String msg = "Could not update expression because a public JavaBean setter for the property \"" + identifier + "\" could not be found."; LOGGER.error(msg); throw new RuntimeException(msg); }
java
public void setStandardsSubscriptionRequests(java.util.Collection<StandardsSubscriptionRequest> standardsSubscriptionRequests) { if (standardsSubscriptionRequests == null) { this.standardsSubscriptionRequests = null; return; } this.standardsSubscriptionRequests = new java.util.ArrayList<StandardsSubscriptionRequest>(standardsSubscriptionRequests); }
python
def _indices(self, indices): """Turn all string indices into int indices, preserving ellipsis.""" if isinstance(indices, tuple): out = [] dim = 0 for i, index in enumerate(indices): if index is Ellipsis: out.append(index) dim = len(self.shape) - len(indices) + i else: out.append(self._get_index(dim, index)) dim += 1 return tuple(out) else: return self._get_index(0, indices)
java
@SuppressWarnings("deprecation") public CmsResource createFolder(String targetFolder, String folderName) throws Exception { if (m_cms.existsResource(targetFolder + folderName)) { m_shell.getOut().println( getMessages().key(Messages.GUI_SHELL_FOLDER_ALREADY_EXISTS_1, targetFolder + folderName)); return null; } return m_cms.createResource(targetFolder + folderName, CmsResourceTypeFolder.getStaticTypeId()); }
java
public static float calculate(Map<String, Integer> features1, Map<String, Integer> features2) { TreeSet<String> keys = new TreeSet<String>(features1.keySet()); keys.addAll(features2.keySet()); float sum = 0.0f; for (String key : keys) { Integer c1 = features1.get(key); Integer c2 = features2.get(key); c1 = c1 == null ? 0 : c1; c2 = c2 == null ? 0 : c2; sum += 1.0 - Math.abs(c1 - c2) / (c1 + c2); } return sum / keys.size(); }
java
public static ConfigurationException invalidConfiguration(Throwable cause, String message, Object... args) { throw new ConfigurationException(cause, message, args); }
python
def require_attribute(self, attribute: str, typ: Type = _Any) -> None: """Require an attribute on the node to exist. If `typ` is given, the attribute must have this type. Args: attribute: The name of the attribute / mapping key. typ: The type the attribute must have. """ attr_nodes = [ value_node for key_node, value_node in self.yaml_node.value if key_node.value == attribute ] if len(attr_nodes) == 0: raise RecognitionError( ('{}{}Missing required attribute {}').format( self.yaml_node.start_mark, os.linesep, attribute)) attr_node = attr_nodes[0] if typ != _Any: recognized_types, message = self.__recognizer.recognize( attr_node, cast(Type, typ)) if len(recognized_types) == 0: raise RecognitionError(message)
python
def set_salt_view(): ''' Helper function that sets the salt design document. Uses get_valid_salt_views and some hardcoded values. ''' options = _get_options(ret=None) # Create the new object that we will shove in as the design doc. new_doc = {} new_doc['views'] = get_valid_salt_views() new_doc['language'] = "javascript" # Make the request to update the design doc. _response = _request("PUT", options['url'] + options['db'] + "/_design/salt", "application/json", salt.utils.json.dumps(new_doc)) if 'error' in _response: log.warning('Unable to set the salt design document: %s', _response['error']) return False return True
python
def coordinate_reproject(x, y, s_crs, t_crs): """ reproject a coordinate from one CRS to another Parameters ---------- x: int or float the X coordinate component y: int or float the Y coordinate component s_crs: int, str or :osgeo:class:`osr.SpatialReference` the source CRS. See :func:`~spatialist.auxil.crsConvert` for options. t_crs: int, str or :osgeo:class:`osr.SpatialReference` the target CRS. See :func:`~spatialist.auxil.crsConvert` for options. Returns ------- """ source = crsConvert(s_crs, 'osr') target = crsConvert(t_crs, 'osr') transform = osr.CoordinateTransformation(source, target) point = transform.TransformPoint(x, y)[:2] return point
python
def __set_missense_status(self, hgvs_string): """Sets the self.is_missense flag.""" # set missense status if re.search('^[A-Z?]\d+[A-Z?]$', hgvs_string): self.is_missense = True self.is_non_silent = True else: self.is_missense = False
java
public void setOverlayPainter(Painter<? super JXMapViewer> overlay) { Painter<? super JXMapViewer> old = getOverlayPainter(); this.overlay = overlay; PropertyChangeListener listener = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (evt.getNewValue().equals(Boolean.TRUE)) { repaint(); } } }; if (old instanceof AbstractPainter) { AbstractPainter<?> ap = (AbstractPainter<?>) old; ap.removePropertyChangeListener("dirty", listener); } if (overlay instanceof AbstractPainter) { AbstractPainter<?> ap = (AbstractPainter<?>) overlay; ap.addPropertyChangeListener("dirty", listener); } firePropertyChange("mapOverlay", old, getOverlayPainter()); repaint(); }
python
def isInstalledBuild(self): """ Determines if the Engine is an Installed Build """ sentinelFile = os.path.join(self.getEngineRoot(), 'Engine', 'Build', 'InstalledBuild.txt') return os.path.exists(sentinelFile)
python
def plot_posterior( data, var_names=None, coords=None, figsize=None, textsize=None, credible_interval=0.94, round_to=1, point_estimate="mean", rope=None, ref_val=None, kind="kde", bw=4.5, bins=None, ax=None, **kwargs ): """Plot Posterior densities in the style of John K. Kruschke's book. Parameters ---------- data : obj Any object that can be converted to an az.InferenceData object Refer to documentation of az.convert_to_dataset for details var_names : list of variable names Variables to be plotted, two variables are required. coords : mapping, optional Coordinates of var_names to be plotted. Passed to `Dataset.sel` figsize : tuple Figure size. If None it will be defined automatically. textsize: float Text size scaling factor for labels, titles and lines. If None it will be autoscaled based on figsize. credible_interval : float, optional Credible intervals. Defaults to 0.94. round_to : int Controls formatting for floating point numbers point_estimate: str Must be in ('mode', 'mean', 'median') rope: tuple or dictionary of tuples Lower and upper values of the Region Of Practical Equivalence. If a list is provided, its length should match the number of variables. ref_val: float or dictionary of floats display the percentage below and above the values in ref_val. If a list is provided, its length should match the number of variables. kind: str Type of plot to display (kde or hist) For discrete variables this argument is ignored and a histogram is always used. bw : float Bandwidth scaling factor for the KDE. Should be larger than 0. The higher this number the smoother the KDE will be. Defaults to 4.5 which is essentially the same as the Scott's rule of thumb (the default rule used by SciPy). Only works if `kind == kde`. bins : integer or sequence or 'auto', optional Controls the number of bins, accepts the same keywords `matplotlib.hist()` does. Only works if `kind == hist`. If None (default) it will use `auto` for continuous variables and `range(xmin, xmax + 1)` for discrete variables. ax : axes Matplotlib axes. Defaults to None. **kwargs Passed as-is to plt.hist() or plt.plot() function depending on the value of `kind`. Returns ------- ax : matplotlib axes Examples -------- Show a default kernel density plot following style of John Kruschke .. plot:: :context: close-figs >>> import arviz as az >>> data = az.load_arviz_data('centered_eight') >>> az.plot_posterior(data) Plot subset variables by specifying variable name exactly .. plot:: :context: close-figs >>> az.plot_posterior(data, var_names=['mu']) Plot Region of Practical Equivalence (rope) for all distributions .. plot:: :context: close-figs >>> az.plot_posterior(data, var_names=['mu', 'theta'], rope=(-1, 1)) Plot Region of Practical Equivalence for selected distributions .. plot:: :context: close-figs >>> rope = {'mu': [{'rope': (-2, 2)}], 'theta': [{'school': 'Choate', 'rope': (2, 4)}]} >>> az.plot_posterior(data, var_names=['mu', 'theta'], rope=rope) Add reference lines .. plot:: :context: close-figs >>> az.plot_posterior(data, var_names=['mu', 'theta_tilde'], ref_val=0) Show point estimate of distribution .. plot:: :context: close-figs >>> az.plot_posterior(data, var_names=['mu', 'theta_tilde'], point_estimate='mode') Plot posterior as a histogram .. plot:: :context: close-figs >>> az.plot_posterior(data, var_names=['mu'], kind='hist') Change size of credible interval .. plot:: :context: close-figs >>> az.plot_posterior(data, var_names=['mu'], credible_interval=.75) """ data = convert_to_dataset(data, group="posterior") var_names = _var_names(var_names, data) if coords is None: coords = {} plotters = list(xarray_var_iter(get_coords(data, coords), var_names=var_names, combined=True)) length_plotters = len(plotters) rows, cols = default_grid(length_plotters) (figsize, ax_labelsize, titlesize, xt_labelsize, _linewidth, _) = _scale_fig_size( figsize, textsize, rows, cols ) kwargs.setdefault("linewidth", _linewidth) if ax is None: _, ax = _create_axes_grid( length_plotters, rows, cols, figsize=figsize, squeeze=False, constrained_layout=True ) for (var_name, selection, x), ax_ in zip(plotters, np.ravel(ax)): _plot_posterior_op( x.flatten(), var_name, selection, ax=ax_, bw=bw, bins=bins, kind=kind, point_estimate=point_estimate, round_to=round_to, credible_interval=credible_interval, ref_val=ref_val, rope=rope, ax_labelsize=ax_labelsize, xt_labelsize=xt_labelsize, **kwargs ) ax_.set_title(make_label(var_name, selection), fontsize=titlesize, wrap=True) return ax
python
def create_cache_database(self): """ Create a new SQLite3 database for use with Cache objects :raises: IOError if there is a problem creating the database file """ conn = sqlite3.connect(self.database) conn.text_factory = str c = conn.cursor() c.execute("""CREATE TABLE items (url text, metadata text, datetime text)""") c.execute("""CREATE TABLE documents (url text, path text, datetime text)""") c.execute("""CREATE TABLE primary_texts (item_url text, primary_text text, datetime text)""") conn.commit() conn.close()
java
public void initializeDefaultPreferences() { IPreferenceStore store = Activator.getDefault().getPreferenceStore(); store.setDefault(PreferenceConstants.P_DEFAULT_ROOT_FOLDER, PreferencesUtil.getDefaultRootFolder()); store.setDefault(PreferenceConstants.P_MAVEN_HOME, PreferencesUtil.getMavenHome()); store.setDefault(PreferenceConstants.P_GROOVY_MODEL, PreferencesUtil.getCustomGroovyModelImpl()); store.setDefault(PreferenceConstants.P_ECLIPSE_GOAL, "eclipse:eclipse"); store.setDefault(PreferenceConstants.P_SFTP_ROOT_FOLDER, PreferencesUtil.getDefaultSftpRootFolder()); /* static public String getDefaultRootFolder() { static public String getMavenHome() { static public String getCustomGroovyModelImpl() { static public String getDefaultSftpRootFolder() { */ }
java
public void marshall(DescribeUsersRequest describeUsersRequest, ProtocolMarshaller protocolMarshaller) { if (describeUsersRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeUsersRequest.getAuthenticationType(), AUTHENTICATIONTYPE_BINDING); protocolMarshaller.marshall(describeUsersRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(describeUsersRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def load_file(self, sequence): """Load data from an "external" data file an pass it to the given |IOSequence|.""" try: if sequence.filetype_ext == 'npy': sequence.series = sequence.adjust_series( *self._load_npy(sequence)) elif sequence.filetype_ext == 'asc': sequence.series = sequence.adjust_series( *self._load_asc(sequence)) elif sequence.filetype_ext == 'nc': self._load_nc(sequence) except BaseException: objecttools.augment_excmessage( 'While trying to load the external data of sequence %s' % objecttools.devicephrase(sequence))
java
public static String formatDate(java.util.Date pDate, String format) { if (pDate == null) { pDate = new java.util.Date(); } SimpleDateFormat sdf = new SimpleDateFormat(format); return sdf.format(pDate); }
python
def ask(question, default=None): """ @question: str @default: Any value which can be converted to string. Asks a user for a input. If default parameter is passed it will be appended to the end of the message in square brackets. """ question = str(question) if default: question += ' [' + str(default) + ']' question += ': ' reply = raw_input(question) return reply if reply else default
java
public void layoutContainer (Container parent) { Insets insets = parent.getInsets(); int pcount = parent.getComponentCount(); for (int ii = 0; ii < pcount; ii++) { Component comp = parent.getComponent(ii); if (!comp.isVisible()) { continue; } Object constr = _constraints.get(comp); if (constr == null) { log.warning("No constraints for child!?", "cont", parent, "comp", comp); continue; } if (constr instanceof Rectangle) { Rectangle r = (Rectangle)constr; comp.setBounds(insets.left + r.x, insets.top + r.y, r.width, r.height); } else { Point p = (Point)constr; Dimension d = comp.getPreferredSize(); comp.setBounds(insets.left + p.x, insets.top + p.y, d.width, d.height); } } }
python
def check_requires_python(requires_python): # type: (Optional[str]) -> bool """ Check if the python version in use match the `requires_python` specifier. Returns `True` if the version of python in use matches the requirement. Returns `False` if the version of python in use does not matches the requirement. Raises an InvalidSpecifier if `requires_python` have an invalid format. """ if requires_python is None: # The package provides no information return True requires_python_specifier = specifiers.SpecifierSet(requires_python) # We only use major.minor.micro python_version = version.parse('{0}.{1}.{2}'.format(*sys.version_info[:3])) return python_version in requires_python_specifier
python
def lenet5(images, labels): """Creates a multi layer convolutional network. The architecture is similar to that defined in LeNet 5. Please change this to experiment with architectures. Args: images: The input images. labels: The labels as dense one-hot vectors. Returns: A softmax result. """ images = pt.wrap(images) with pt.defaults_scope(activation_fn=tf.nn.relu, l2loss=0.00001): return (images.conv2d(5, 20).max_pool(2, 2).conv2d(5, 50).max_pool(2, 2) .flatten().fully_connected(500).softmax_classifier(10, labels))
python
def concat(*cols): """ Concatenates multiple input columns together into a single column. The function works with strings, binary and compatible array columns. >>> df = spark.createDataFrame([('abcd','123')], ['s', 'd']) >>> df.select(concat(df.s, df.d).alias('s')).collect() [Row(s=u'abcd123')] >>> df = spark.createDataFrame([([1, 2], [3, 4], [5]), ([1, 2], None, [3])], ['a', 'b', 'c']) >>> df.select(concat(df.a, df.b, df.c).alias("arr")).collect() [Row(arr=[1, 2, 3, 4, 5]), Row(arr=None)] """ sc = SparkContext._active_spark_context return Column(sc._jvm.functions.concat(_to_seq(sc, cols, _to_java_column)))
java
public Quaternionf set(AxisAngle4f axisAngle) { return setAngleAxis(axisAngle.angle, axisAngle.x, axisAngle.y, axisAngle.z); }
python
def _patch_argument_parser(self): """ Since argparse doesn't support much introspection, we monkey-patch it to replace the parse_known_args method and all actions with hooks that tell us which action was last taken or about to be taken, and let us have the parser figure out which subparsers need to be activated (then recursively monkey-patch those). We save all active ArgumentParsers to extract all their possible option names later. """ self.active_parsers = [] self.visited_positionals = [] completer = self def patch(parser): completer.visited_positionals.append(parser) completer.active_parsers.append(parser) if isinstance(parser, IntrospectiveArgumentParser): return classname = "MonkeyPatchedIntrospectiveArgumentParser" if USING_PYTHON2: classname = bytes(classname) parser.__class__ = type(classname, (IntrospectiveArgumentParser, parser.__class__), {}) for action in parser._actions: if hasattr(action, "_orig_class"): continue # TODO: accomplish this with super class IntrospectAction(action.__class__): def __call__(self, parser, namespace, values, option_string=None): debug("Action stub called on", self) debug("\targs:", parser, namespace, values, option_string) debug("\torig class:", self._orig_class) debug("\torig callable:", self._orig_callable) if not completer.completing: self._orig_callable(parser, namespace, values, option_string=option_string) elif issubclass(self._orig_class, argparse._SubParsersAction): debug("orig class is a subparsers action: patching and running it") patch(self._name_parser_map[values[0]]) self._orig_callable(parser, namespace, values, option_string=option_string) elif self._orig_class in safe_actions: if not self.option_strings: completer.visited_positionals.append(self) self._orig_callable(parser, namespace, values, option_string=option_string) action._orig_class = action.__class__ action._orig_callable = action.__call__ action.__class__ = IntrospectAction patch(self._parser) debug("Active parsers:", self.active_parsers) debug("Visited positionals:", self.visited_positionals) return self.active_parsers
java
private static String getActionString(int action) { switch (action) { case Constraint.RESTRICT : return Tokens.T_RESTRICT; case Constraint.CASCADE : return Tokens.T_CASCADE; case Constraint.SET_DEFAULT : return Tokens.T_SET + ' ' + Tokens.T_DEFAULT; case Constraint.SET_NULL : return Tokens.T_SET + ' ' + Tokens.T_NULL; default : return Tokens.T_NO + ' ' + Tokens.T_ACTION; } }
python
def append_tag(self, field_number, wire_type): """Appends a tag containing field number and wire type information.""" self._stream.append_var_uint32(wire_format.pack_tag(field_number, wire_type))
python
def color_(self, i=None): """ Get a color from the palette """ global palette, color_num if i is not None: color_num = i if color_num == len(palette) - 1: color_num = 0 res = palette[color_num] color_num += 1 return res
java
public static String toStringType(int type, String defaultValue) { switch (type) { case Types.ARRAY: return "CF_SQL_ARRAY"; case Types.BIGINT: return "CF_SQL_BIGINT"; case Types.BINARY: return "CF_SQL_BINARY"; case Types.BIT: return "CF_SQL_BIT"; case Types.BOOLEAN: return "CF_SQL_BOOLEAN"; case Types.BLOB: return "CF_SQL_BLOB"; case Types.CHAR: return "CF_SQL_CHAR"; case Types.CLOB: return "CF_SQL_CLOB"; case Types.DATALINK: return "CF_SQL_DATALINK"; case Types.DATE: return "CF_SQL_DATE"; case Types.DISTINCT: return "CF_SQL_DISTINCT"; case Types.NUMERIC: return "CF_SQL_NUMERIC"; case Types.DECIMAL: return "CF_SQL_DECIMAL"; case Types.DOUBLE: return "CF_SQL_DOUBLE"; case Types.REAL: return "CF_SQL_REAL"; case Types.FLOAT: return "CF_SQL_FLOAT"; case Types.TINYINT: return "CF_SQL_TINYINT"; case Types.SMALLINT: return "CF_SQL_SMALLINT"; case Types.STRUCT: return "CF_SQL_STRUCT"; case Types.INTEGER: return "CF_SQL_INTEGER"; case Types.VARCHAR: return "CF_SQL_VARCHAR"; case Types.NVARCHAR: return "CF_SQL_NVARCHAR"; case CFTypes.VARCHAR2: return "CF_SQL_VARCHAR2"; case Types.LONGVARBINARY: return "CF_SQL_LONGVARBINARY"; case Types.VARBINARY: return "CF_SQL_VARBINARY"; case Types.LONGVARCHAR: return "CF_SQL_LONGVARCHAR"; case Types.TIME: return "CF_SQL_TIME"; case Types.TIMESTAMP: return "CF_SQL_TIMESTAMP"; case Types.REF: return "CF_SQL_REF"; case CFTypes.CURSOR: return "CF_SQL_REFCURSOR"; case Types.OTHER: return "CF_SQL_OTHER"; case Types.NULL: return "CF_SQL_NULL"; default: return null; } }
python
def sort_link(context, text, sort_field, visible_name=None): """Usage: {% sort_link "text" "field_name" %} Usage: {% sort_link "text" "field_name" "Visible name" %} """ sorted_fields = False ascending = None class_attrib = 'sortable' orig_sort_field = sort_field if context.get('current_sort_field') == sort_field: sort_field = '-%s' % sort_field visible_name = '-%s' % (visible_name or orig_sort_field) sorted_fields = True ascending = False class_attrib += ' sorted descending' elif context.get('current_sort_field') == '-'+sort_field: visible_name = '%s' % (visible_name or orig_sort_field) sorted_fields = True ascending = True class_attrib += ' sorted ascending' if visible_name: if 'request' in context: request = context['request'] request.session[visible_name] = sort_field # builds url if 'request' in context: url = context['request'].path else: url = "./" url += "?sort_by=" if visible_name is None: url += sort_field else: url += visible_name if 'getsortvars' in context: extra_vars = context['getsortvars'] else: if 'request' in context: request = context['request'] getvars = request.GET.copy() if 'sort_by' in getvars: del getvars['sort_by'] if len(getvars.keys()) > 0: context['getsortvars'] = "&%s" % getvars.urlencode() else: context['getsortvars'] = '' extra_vars = context['getsortvars'] # append other vars to url url += extra_vars return { 'text': text, 'url': url, 'ascending': ascending, 'sorted_fields': sorted_fields, 'class_attrib': class_attrib }
java
public static MachineTime<TimeUnit> ofPosixSeconds(BigDecimal seconds) { BigDecimal secs = seconds.setScale(0, RoundingMode.FLOOR); int fraction = seconds.subtract(secs) .multiply(BigDecimal.valueOf(MRD)) .setScale(0, RoundingMode.DOWN) .intValueExact(); return ofPosixUnits(secs.longValueExact(), fraction); }
java
public List<JAXBElement<Object>> get_GenericApplicationPropertyOfPlantCover() { if (_GenericApplicationPropertyOfPlantCover == null) { _GenericApplicationPropertyOfPlantCover = new ArrayList<JAXBElement<Object>>(); } return this._GenericApplicationPropertyOfPlantCover; }
python
def register_templatetags(): """ Register templatetags defined in settings as basic templatetags """ from turboengine.conf import settings from google.appengine.ext.webapp import template for python_file in settings.TEMPLATE_PATH: template.register_template_library(python_file)
python
def three_dim_props(event): """ Get information for a pick event on a 3D artist. Parameters ----------- event : PickEvent The pick event to process Returns -------- A dict with keys: `x`: The estimated x-value of the click on the artist `y`: The estimated y-value of the click on the artist `z`: The estimated z-value of the click on the artist Notes ----- Based on mpl_toolkits.axes3d.Axes3D.format_coord Many thanks to Ben Root for pointing this out! """ ax = event.artist.axes if ax.M is None: return {} xd, yd = event.mouseevent.xdata, event.mouseevent.ydata p = (xd, yd) edges = ax.tunit_edges() ldists = [(mplot3d.proj3d.line2d_seg_dist(p0, p1, p), i) for \ i, (p0, p1) in enumerate(edges)] ldists.sort() # nearest edge edgei = ldists[0][1] p0, p1 = edges[edgei] # scale the z value to match x0, y0, z0 = p0 x1, y1, z1 = p1 d0 = np.hypot(x0-xd, y0-yd) d1 = np.hypot(x1-xd, y1-yd) dt = d0+d1 z = d1/dt * z0 + d0/dt * z1 x, y, z = mplot3d.proj3d.inv_transform(xd, yd, z, ax.M) return dict(x=x, y=y, z=z)
python
def run(): print("Environment", os.environ) try: os.environ["SELENIUM"] except KeyError: print("Please set the environment variable SELENIUM to Selenium URL") sys.exit(1) driver = WhatsAPIDriver(client='remote', command_executor=os.environ["SELENIUM"]) print("Waiting for QR") driver.wait_for_login() print("Bot started") driver.subscribe_new_messages(NewMessageObserver()) print("Waiting for new messages...") """ Locks the main thread while the subscription in running """ while True: time.sleep(60)
python
def search(self, filterstr, attrlist): """Query the configured LDAP server.""" return self._paged_search_ext_s(self.settings.BASE, ldap.SCOPE_SUBTREE, filterstr=filterstr, attrlist=attrlist, page_size=self.settings.PAGE_SIZE)
java
public static boolean applyFilters(Object event, Set<EventFilter> filters, StatsTimer filterStats, String invokerDesc, Logger logger) { if (filters.isEmpty()) { return true; } Stopwatch filterStart = filterStats.start(); try { for (EventFilter filter : filters) { if (!filter.apply(event)) { logger.debug( "Event: " + event + " filtered out for : " + invokerDesc + " due to the filter: " + filter); return false; } } return true; } finally { filterStart.stop(); } }
java
public static MutableFst importFst(File fileToFst, Semiring semiring) { Preconditions.checkArgument(fileToFst.exists(), "File to the fst.txt openfst output doesnt exist", fileToFst); Preconditions.checkArgument(fileToFst.getName().endsWith(FST_TXT), "fst.txt path must end in .fst.txt", fileToFst); String basepath = fileToFst.getAbsolutePath(); basepath = StringUtils.removeEnd(basepath, FST_TXT); CharSource cs = Files.asCharSource(fileToFst, Charsets.UTF_8); Optional<MutableSymbolTable> maybeInputs = importSymbols(new File(basepath + INPUT_SYMS)); Optional<MutableSymbolTable> maybeOutputs = importSymbols(new File(basepath + OUTPUT_SYMS)); Optional<MutableSymbolTable> maybeStates = importSymbols(new File(basepath + STATES_SYMS)); return convertFrom(cs, maybeInputs, maybeOutputs, maybeStates, semiring); }
java
public void getMFInstruments(KiteConnect kiteConnect) throws KiteException, IOException { List<MFInstrument> mfList = kiteConnect.getMFInstruments(); System.out.println("size of mf instrument list: "+mfList.size()); }
java
public void printHead(final OutputStream output) throws IOException { final String eol = "\r\n"; final Writer writer = new Utf8OutputStreamContent(output); for (final String line : this.head()) { writer.append(line); writer.append(eol); } writer.append(eol); writer.flush(); }
python
def _getAnnotationAnalysis(self, varFile): """ Assembles metadata within the VCF header into a GA4GH Analysis object. :return: protocol.Analysis """ header = varFile.header analysis = protocol.Analysis() formats = header.formats.items() infos = header.info.items() filters = header.filters.items() for prefix, content in [("FORMAT", formats), ("INFO", infos), ("FILTER", filters)]: for contentKey, value in content: key = "{0}.{1}".format(prefix, value.name) if key not in analysis.attributes.attr: analysis.attributes.attr[key].Clear() if value.description is not None: analysis.attributes.attr[ key].values.add().string_value = value.description analysis.created = self._creationTime analysis.updated = self._updatedTime for r in header.records: # Don't add a key to info if there's nothing in the value if r.value is not None: if r.key not in analysis.attributes.attr: analysis.attributes.attr[r.key].Clear() analysis.attributes.attr[r.key] \ .values.add().string_value = str(r.value) if r.key == "created" or r.key == "fileDate": # TODO handle more date formats try: if '-' in r.value: fmtStr = "%Y-%m-%d" else: fmtStr = "%Y%m%d" analysis.created = datetime.datetime.strptime( r.value, fmtStr).isoformat() + "Z" except ValueError: # is there a logger we should tell? # print("INFO: Could not parse variant annotation time") pass # analysis.create_date_time remains datetime.now() if r.key == "software": analysis.software.append(r.value) if r.key == "name": analysis.name = r.value if r.key == "description": analysis.description = r.value analysis.id = str(datamodel.VariantAnnotationSetAnalysisCompoundId( self._compoundId, "analysis")) return analysis
java
static String requestURI(HttpServletRequest request) { String uri = request.getRequestURI(); if (uri != null) { String contextPath = request.getContextPath(); int length = contextPath == null ? 0 : contextPath.length(); if (length > 0) { uri = uri.substring(length); } if (uri.equals("/")) { uri = ""; } } else { uri = ""; } // according to the JACC specification, all colons within the request URI must be escaped. if (uri.indexOf(':') > 0) uri = uri.replaceAll(":", ENCODED_COLON); return uri; }
java
public void configure() throws IOException { // Read config file sConfig = new Properties(); String configFile = DEFAULT_CONFIG_FILE; if (System.getProperty("selenuim_config")!=null){ configFile = System.getProperty("selenuim_config"); } sConfig.load(new FileReader(configFile)); // Prepare capabilities sCaps = new DesiredCapabilities(); sCaps.setJavascriptEnabled(true); sCaps.setCapability("takesScreenshot", false); String driver = sConfig.getProperty("driver", DRIVER_PHANTOMJS); // Fetch PhantomJS-specific configuration parameters if (driver.equals(DRIVER_PHANTOMJS)) { // "phantomjs_exec_path" if (sConfig.getProperty("phantomjs_exec_path") != null) { sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY, sConfig.getProperty("phantomjs_exec_path")); } else { throw new IOException( String.format( "Property '%s' not set!", PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY)); } // "phantomjs_driver_path" if (sConfig.getProperty("phantomjs_driver_path") != null) { System.out.println("Test will use an external GhostDriver"); sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_GHOSTDRIVER_PATH_PROPERTY, sConfig.getProperty("phantomjs_driver_path")); } else { System.out .println("Test will use PhantomJS internal GhostDriver"); } } // Disable "web-security", enable all possible "ssl-protocols" and // "ignore-ssl-errors" for PhantomJSDriver // sCaps.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, new // String[] { // "--web-security=false", // "--ssl-protocol=any", // "--ignore-ssl-errors=true" // }); ArrayList<String> cliArgsCap = new ArrayList<String>(); cliArgsCap.add("--web-security=false"); cliArgsCap.add("--ssl-protocol=any"); cliArgsCap.add("--ignore-ssl-errors=true"); sCaps.setCapability(PhantomJSDriverService.PHANTOMJS_CLI_ARGS, cliArgsCap); // Control LogLevel for GhostDriver, via CLI arguments sCaps.setCapability( PhantomJSDriverService.PHANTOMJS_GHOSTDRIVER_CLI_ARGS, new String[] { "--logLevel=" + (sConfig.getProperty("phantomjs_driver_loglevel") != null ? sConfig .getProperty("phantomjs_driver_loglevel") : "INFO") }); // String driver = sConfig.getProperty("driver", DRIVER_PHANTOMJS); // Start appropriate Driver if (isUrl(driver)) { sCaps.setBrowserName("phantomjs"); mDriver = new RemoteWebDriver(new URL(driver), sCaps); } else if (driver.equals(DRIVER_FIREFOX)) { mDriver = new FirefoxDriver(sCaps); } else if (driver.equals(DRIVER_CHROME)) { mDriver = new ChromeDriver(sCaps); } else if (driver.equals(DRIVER_PHANTOMJS)) { mDriver = new PhantomJSDriver(sCaps); } }
java
public void add (T element) { DependencyNode<T> node = new DependencyNode<T>(element); _nodes.put(element, node); _orphans.add(element); }
java
public static <T extends ISized & IChild<U>, U extends ISized> IntSupplier middleAligned(T owner, int offset) { return () -> { U parent = owner.getParent(); if (owner.getParent() == null) return 0; return (int) (Math.ceil(((float) parent.size().height() - Padding.of(parent).vertical() - owner.size().height()) / 2) + offset); }; }
python
def new_action(project_id): """Add action.""" project = get_data_or_404('project', project_id) if project['owner_id'] != get_current_user_id(): return jsonify(message='forbidden'), 403 form = NewActionForm() if not form.validate_on_submit(): return jsonify(errors=form.errors), 400 data = form.data data['project_id'] = project_id id = add_instance('action', **data) if not id: return jsonify(errors={'name': ['duplicated slug.']}), 400 action = get_data_or_404('action', id) return jsonify(**action)
python
def format_file_url_params(file_urls): ''' Utility method for formatting file URL parameters for transmission ''' file_urls_payload = {} if file_urls: for idx, fileurl in enumerate(file_urls): file_urls_payload["file_url[" + str(idx) + "]"] = fileurl return file_urls_payload
python
def _StopProfiling(self): """Stops profiling.""" if self._guppy_memory_profiler: self._guppy_memory_profiler.Sample() self._guppy_memory_profiler.Stop() self._guppy_memory_profiler = None if self._memory_profiler: self._memory_profiler.Stop() self._memory_profiler = None if self._processing_profiler: self._processing_profiler.Stop() self._processing_profiler = None if self._serializers_profiler: self._serializers_profiler.Stop() self._serializers_profiler = None if self._storage_profiler: self._storage_profiler.Stop() self._storage_profiler = None if self._tasks_profiler: self._tasks_profiler.Stop() self._tasks_profiler = None
java
public void serialize(final DataOutput output) throws TTIOException { try { output.writeLong(dataKey); output.writeBoolean(eof); output.writeInt(val.length); output.write(val); } catch (final IOException exc) { throw new TTIOException(exc); } }
java
protected SortedMap<String, String> getProperties(ITextNode[]nodes, String language, Status[] status) { TreeMap<String, String> map = new TreeMap<>(); for (ITextNode node : nodes) { IValueNode valueNode = node.getValueNode(language); if (valueNode != null) { if (status == null || TremaCoreUtil.containsStatus(valueNode.getStatus(), status)) { IKeyValuePair keyValuePair = new KeyValuePair(node.getKey(), valueNode.getValue()); if (iExportFilters != null) { for (IExportFilter filter : iExportFilters) { filter.filter(keyValuePair); } } map.put(keyValuePair.getKey(), keyValuePair.getValue()); } } } return map; }
java
public HostVsanInternalSystemVsanObjectOperationResult[] upgradeVsanObjects(String[] uuids, int newVersion) throws RuntimeFault, VsanFault, RemoteException { return getVimService().upgradeVsanObjects(getMOR(), uuids, newVersion); }
java
public String deleteAbucoinsOrder(String orderID) throws IOException { String resp = abucoinsAuthenticated.deleteOrder( orderID, exchange.getExchangeSpecification().getApiKey(), signatureCreator, exchange.getExchangeSpecification().getPassword(), timestamp()); String[] ids = AbucoinsAdapters.adaptToSetOfIDs(resp); return ids[0]; }
python
def partial_transform(self, traj): """Featurize an MD trajectory into a vector space via calculation of dihedral (torsion) angles of alpha carbon backbone Parameters ---------- traj : mdtraj.Trajectory A molecular dynamics trajectory to featurize. Returns ------- features : np.ndarray, dtype=float, shape=(n_samples, n_features) A featurized trajectory is a 2D array of shape `(length_of_trajectory x n_features)` where each `features[i]` vector is computed by applying the featurization function to the `i`th snapshot of the input trajectory. """ ca = [a.index for a in traj.top.atoms if a.name == 'CA'] if len(ca) < 4: return np.zeros((len(traj), 0), dtype=np.float32) alpha_indices = np.array( [(ca[i - 1], ca[i], ca[i + 1], ca[i + 2]) for i in range(1, len(ca) - 2)]) result = md.compute_dihedrals(traj, alpha_indices) x = [] if self.atom_indices is None: self.atom_indices = np.vstack(alpha_indices) if self.sincos: x.extend([np.cos(result), np.sin(result)]) else: x.append(result) return np.hstack(x)
java
@Override public EObject create(EClass eClass) { switch (eClass.getClassifierID()) { case LogPackage.LOG_ACTION: return (EObject) createLogAction(); case LogPackage.SERVER_LOG: return (EObject) createServerLog(); case LogPackage.PROJECT_RELATED: return (EObject) createProjectRelated(); case LogPackage.CHECKOUT_RELATED: return (EObject) createCheckoutRelated(); case LogPackage.REVISION_RELATED: return (EObject) createRevisionRelated(); case LogPackage.USER_RELATED: return (EObject) createUserRelated(); case LogPackage.NEW_USER_ADDED: return (EObject) createNewUserAdded(); case LogPackage.NEW_PROJECT_ADDED: return (EObject) createNewProjectAdded(); case LogPackage.REVISION_BRANCHED: return (EObject) createRevisionBranched(); case LogPackage.NEW_REVISION_ADDED: return (EObject) createNewRevisionAdded(); case LogPackage.NEW_CHECKOUT_ADDED: return (EObject) createNewCheckoutAdded(); case LogPackage.SETTINGS_SAVED: return (EObject) createSettingsSaved(); case LogPackage.USER_ADDED_TO_PROJECT: return (EObject) createUserAddedToProject(); case LogPackage.NEW_OBJECT_IDM_UPLOADED: return (EObject) createNewObjectIDMUploaded(); case LogPackage.DOWNLOAD: return (EObject) createDownload(); case LogPackage.USER_REMOVED_FROM_PROJECT: return (EObject) createUserRemovedFromProject(); case LogPackage.PROJECT_DELETED: return (EObject) createProjectDeleted(); case LogPackage.USER_DELETED: return (EObject) createUserDeleted(); case LogPackage.PASSWORD_RESET: return (EObject) createPasswordReset(); case LogPackage.DATABASE_CREATED: return (EObject) createDatabaseCreated(); case LogPackage.SERVER_STARTED: return (EObject) createServerStarted(); case LogPackage.PROJECT_UPDATED: return (EObject) createProjectUpdated(); case LogPackage.USER_UNDELETED: return (EObject) createUserUndeleted(); case LogPackage.PROJECT_UNDELETED: return (EObject) createProjectUndeleted(); case LogPackage.REVISION_UPDATED: return (EObject) createRevisionUpdated(); case LogPackage.GEO_TAG_UPDATED: return (EObject) createGeoTagUpdated(); case LogPackage.PASSWORD_CHANGED: return (EObject) createPasswordChanged(); case LogPackage.USER_CHANGED: return (EObject) createUserChanged(); case LogPackage.EXTENDED_DATA_ADDED_TO_REVISION: return (EObject) createExtendedDataAddedToRevision(); case LogPackage.EXTENDED_DATA_ADDED_TO_PROJECT: return (EObject) createExtendedDataAddedToProject(); case LogPackage.REMOTE_SERVICE_CALLED: return (EObject) createRemoteServiceCalled(); default: throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier"); } }
python
def forward(self, tokens, mask=None): """ Args: tokens (:class:`torch.FloatTensor` [batch_size, num_tokens, input_dim]): Sequence matrix to encode. mask (:class:`torch.FloatTensor`): Broadcastable matrix to `tokens` used as a mask. Returns: (:class:`torch.FloatTensor` [batch_size, output_dim]): Encoding of sequence. """ if mask is not None: tokens = tokens * mask.unsqueeze(-1).float() # Our input is expected to have shape `(batch_size, num_tokens, embedding_dim)`. The # convolution layers expect input of shape `(batch_size, in_channels, sequence_length)`, # where the conv layer `in_channels` is our `embedding_dim`. We thus need to transpose the # tensor first. tokens = torch.transpose(tokens, 1, 2) # Each convolution layer returns output of size `(batch_size, num_filters, pool_length)`, # where `pool_length = num_tokens - ngram_size + 1`. We then do an activation function, # then do max pooling over each filter for the whole input sequence. Because our max # pooling is simple, we just use `torch.max`. The resultant tensor of has shape # `(batch_size, num_conv_layers * num_filters)`, which then gets projected using the # projection layer, if requested. filter_outputs = [] for i in range(len(self._convolution_layers)): convolution_layer = getattr(self, 'conv_layer_{}'.format(i)) filter_outputs.append(self._activation(convolution_layer(tokens)).max(dim=2)[0]) # Now we have a list of `num_conv_layers` tensors of shape `(batch_size, num_filters)`. # Concatenating them gives us a tensor of shape # `(batch_size, num_filters * num_conv_layers)`. maxpool_output = torch.cat( filter_outputs, dim=1) if len(filter_outputs) > 1 else filter_outputs[0] if self.projection_layer: result = self.projection_layer(maxpool_output) else: result = maxpool_output return result
java
@Override public void configure() throws Exception { LOG.debug("Started REST data stream source at port {}.", port); from("netty4-http:http://0.0.0.0:" + port + "/?matchOnUriPrefix=true&httpMethodRestrict=OPTIONS,GET,POST,PUT,DELETE"). choice(). when(header(HTTP_METHOD).isEqualTo("OPTIONS")).setBody().constant("").endChoice(). otherwise(). setHeader(CONTENT_TYPE).constant(contentType). process( exc -> { String requestUri = exc.getIn().getHeader(HTTP_URI, String.class); LOG.debug("Processing request URI: {}", requestUri); String trimmedUri = removeEnd(requestUri, "/"); LOG.debug("Trimmed request URI: {}", trimmedUri); String busChannel = trimmedUri.substring(1).replaceAll("\\/", "."); exc.setProperty("target", "amqp:" + busChannel); }).toD("${property.target}").endChoice(). end(). process(exc -> { exc.getIn().setHeader("Access-Control-Allow-Origin", "*"); exc.getIn().setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Authorization"); }); }
java
public void updateChannelConfiguration(UpdateChannelConfiguration updateChannelConfiguration, Orderer orderer, byte[]... signers) throws TransactionException, InvalidArgumentException { checkChannelState(); checkOrderer(orderer); try { final long startLastConfigIndex = getLastConfigIndex(orderer); logger.trace(format("startLastConfigIndex: %d. Channel config wait time is: %d", startLastConfigIndex, CHANNEL_CONFIG_WAIT_TIME)); sendUpdateChannel(updateChannelConfiguration.getUpdateChannelConfigurationAsBytes(), signers, orderer); long currentLastConfigIndex = -1; final long nanoTimeStart = System.nanoTime(); //Try to wait to see the channel got updated but don't fail if we don't see it. do { currentLastConfigIndex = getLastConfigIndex(orderer); if (currentLastConfigIndex == startLastConfigIndex) { final long duration = TimeUnit.MILLISECONDS.convert(System.nanoTime() - nanoTimeStart, TimeUnit.NANOSECONDS); if (duration > CHANNEL_CONFIG_WAIT_TIME) { logger.warn(format("Channel %s did not get updated last config after %d ms, Config wait time: %d ms. startLastConfigIndex: %d, currentLastConfigIndex: %d ", name, duration, CHANNEL_CONFIG_WAIT_TIME, startLastConfigIndex, currentLastConfigIndex)); //waited long enough .. currentLastConfigIndex = startLastConfigIndex - 1L; // just bail don't throw exception. } else { try { Thread.sleep(ORDERER_RETRY_WAIT_TIME); //try again sleep } catch (InterruptedException e) { TransactionException te = new TransactionException("update channel thread Sleep", e); logger.warn(te.getMessage(), te); } } } logger.trace(format("currentLastConfigIndex: %d", currentLastConfigIndex)); } while (currentLastConfigIndex == startLastConfigIndex); } catch (TransactionException e) { logger.error(format("Channel %s error: %s", name, e.getMessage()), e); throw e; } catch (Exception e) { String msg = format("Channel %s error: %s", name, e.getMessage()); logger.error(msg, e); throw new TransactionException(msg, e); } }
python
def process(self, input_data, ratio, end_of_input=False, verbose=False): """Resample the signal in `input_data`. Parameters ---------- input_data : ndarray Input data. A single channel is provided as a 1D array of `num_frames` length. Input data with several channels is represented as a 2D array of shape (`num_frames`, `num_channels`). For use with `libsamplerate`, `input_data` is converted to 32-bit float and C (row-major) memory order. ratio : float Conversion ratio = output sample rate / input sample rate. end_of_input : int Set to `True` if no more data is available, or to `False` otherwise. verbose : bool If `True`, print additional information about the conversion. Returns ------- output_data : ndarray Resampled input data. """ from samplerate.lowlevel import src_process from samplerate.exceptions import ResamplingError input_data = np.require(input_data, requirements='C', dtype=np.float32) if input_data.ndim == 2: num_frames, channels = input_data.shape output_shape = (int(num_frames * ratio), channels) elif input_data.ndim == 1: num_frames, channels = input_data.size, 1 output_shape = (int(num_frames * ratio), ) else: raise ValueError('rank > 2 not supported') if channels != self._channels: raise ValueError('Invalid number of channels in input data.') output_data = np.empty(output_shape, dtype=np.float32) (error, input_frames_used, output_frames_gen) = src_process( self._state, input_data, output_data, ratio, end_of_input) if error != 0: raise ResamplingError(error) if verbose: info = ('samplerate info:\n' '{} input frames used\n' '{} output frames generated\n' .format(input_frames_used, output_frames_gen)) print(info) return (output_data[:output_frames_gen, :] if channels > 1 else output_data[:output_frames_gen])
python
def _tls_hmac_verify(self, hdr, msg, mac): """ Provided with the record header, the TLSCompressed.fragment and the HMAC, return True if the HMAC is correct. If we could not compute the HMAC because the key was missing, there is no sense in verifying anything, thus we also return True. Meant to be used with a block cipher or a stream cipher. It would fail with an AEAD cipher, because rcs.hmac would be None. See RFC 5246, section 6.2.3. """ read_seq_num = struct.pack("!Q", self.tls_session.rcs.seq_num) self.tls_session.rcs.seq_num += 1 mac_len = self.tls_session.rcs.mac_len if mac_len == 0: # should be TLS_NULL_WITH_NULL_NULL return True if len(mac) != mac_len: return False alg = self.tls_session.rcs.hmac version = struct.unpack("!H", hdr[1:3])[0] try: if version > 0x300: h = alg.digest(read_seq_num + hdr + msg) elif version == 0x300: h = alg.digest_sslv3(read_seq_num + hdr[:1] + hdr[3:5] + msg) else: raise Exception("Unrecognized version.") except HMACError: h = mac return h == mac
python
def create_volume(name, bricks, stripe=False, replica=False, device_vg=False, transport='tcp', start=False, force=False, arbiter=False): ''' Create a glusterfs volume name Name of the gluster volume bricks Bricks to create volume from, in <peer>:<brick path> format. For \ multiple bricks use list format: '["<peer1>:<brick1>", \ "<peer2>:<brick2>"]' stripe Stripe count, the number of bricks should be a multiple of the stripe \ count for a distributed striped volume replica Replica count, the number of bricks should be a multiple of the \ replica count for a distributed replicated volume arbiter If true, specifies volume should use arbiter brick(s). \ Valid configuration limited to "replica 3 arbiter 1" per \ Gluster documentation. Every third brick in the brick list \ is used as an arbiter brick. .. versionadded:: 2019.2.0 device_vg If true, specifies volume should use block backend instead of regular \ posix backend. Block device backend volume does not support multiple \ bricks transport Transport protocol to use, can be 'tcp', 'rdma' or 'tcp,rdma' start Start the volume after creation force Force volume creation, this works even if creating in root FS CLI Examples: .. code-block:: bash salt host1 glusterfs.create newvolume host1:/brick salt gluster1 glusterfs.create vol2 '["gluster1:/export/vol2/brick", \ "gluster2:/export/vol2/brick"]' replica=2 start=True ''' # If single brick given as a string, accept it if isinstance(bricks, six.string_types): bricks = [bricks] # Error for block devices with multiple bricks if device_vg and len(bricks) > 1: raise SaltInvocationError('Block device backend volume does not ' + 'support multiple bricks') # Validate bricks syntax for brick in bricks: try: peer_name, path = brick.split(':') if not path.startswith('/'): raise SaltInvocationError( 'Brick paths must start with / in {0}'.format(brick)) except ValueError: raise SaltInvocationError( 'Brick syntax is <peer>:<path> got {0}'.format(brick)) # Validate arbiter config if arbiter and replica != 3: raise SaltInvocationError('Arbiter configuration only valid ' + 'in replica 3 volume') # Format creation call cmd = 'volume create {0} '.format(name) if stripe: cmd += 'stripe {0} '.format(stripe) if replica: cmd += 'replica {0} '.format(replica) if arbiter: cmd += 'arbiter 1 ' if device_vg: cmd += 'device vg ' if transport != 'tcp': cmd += 'transport {0} '.format(transport) cmd += ' '.join(bricks) if force: cmd += ' force' if not _gluster(cmd): return False if start: return start_volume(name) return True
java
public final T fromJsonTree(JsonElement jsonTree) { try { JsonReader jsonReader = new JsonTreeReader(jsonTree); return read(jsonReader); } catch (IOException e) { throw new JsonIOException(e); } }
python
def get_repo_revision(): ''' Returns git revision string somelike `git rev-parse --short HEAD` does. Returns an empty string if anything goes wrong, such as missing .hg files or an unexpected format of internal HG files or no mercurial repository found. ''' repopath = _findrepo() if not repopath: return '' try: head = open(os.path.join(repopath, 'HEAD'), 'rU').read() for l in head.splitlines(): l = l.split() if l[0] == 'ref:': ref = l[1] break else: ref = None if ref: rev = open(os.path.join(repopath, ref), 'rU').read() rev = rev[:7] if rev: return rev except IOError: pass try: rev = compat.exec_command('git', 'rev-parse', '--short', 'HEAD').strip() if rev: return rev except: pass return ''
java
public void addSummary(Record recSummary, BaseField[][] mxKeyFields, BaseField[][] mxDataFields) { try { recSummary.addNew(); // First move the key to see if a record exists this.setupSummaryKey(mxKeyFields); boolean bSuccess = recSummary.seek("="); if (bSuccess) recSummary.edit(); else { recSummary.addNew(); this.setupSummaryKey(mxKeyFields); } this.addSummaryData(mxDataFields); if (bSuccess) recSummary.set(); else recSummary.add(); } catch (DBException ex) { ex.printStackTrace(); } }
java
public static <T> Iterable<T> cycle(T... elements) { return cycle(Lists.newArrayList(elements)); }
java
public void loadModel(String modelfile) throws IOException, ClassNotFoundException { ObjectInputStream instream = new ObjectInputStream(new GZIPInputStream( new FileInputStream(modelfile))); factory = (AlphabetFactory) instream.readObject(); models = (Linear[]) instream.readObject(); instream.close(); IFeatureAlphabet features = factory.DefaultFeatureAlphabet(); features.setStopIncrement(true); }
python
def consolidateBy(requestContext, seriesList, consolidationFunc): """ Takes one metric or a wildcard seriesList and a consolidation function name. Valid function names are 'sum', 'average', 'min', and 'max'. When a graph is drawn where width of the graph size in pixels is smaller than the number of datapoints to be graphed, Graphite consolidates the values to to prevent line overlap. The consolidateBy() function changes the consolidation function from the default of 'average' to one of 'sum', 'max', or 'min'. This is especially useful in sales graphs, where fractional values make no sense and a 'sum' of consolidated values is appropriate. Example:: &target=consolidateBy(Sales.widgets.largeBlue, 'sum') &target=consolidateBy(Servers.web01.sda1.free_space, 'max') """ for series in seriesList: # datalib will throw an exception, so it's not necessary to validate # here series.consolidationFunc = consolidationFunc series.name = 'consolidateBy(%s,"%s")' % (series.name, series.consolidationFunc) series.pathExpression = series.name return seriesList
java
public void writeUTCDate(long time) throws IOException { if (SIZE < _offset + 32) flushBuffer(); int offset = _offset; byte[] buffer = _buffer; if (time % 60000L == 0) { // compact date ::= x65 b3 b2 b1 b0 long minutes = time / 60000L; if ((minutes >> 31) == 0 || (minutes >> 31) == -1) { buffer[offset++] = (byte) BC_DATE_MINUTE; buffer[offset++] = ((byte) (minutes >> 24)); buffer[offset++] = ((byte) (minutes >> 16)); buffer[offset++] = ((byte) (minutes >> 8)); buffer[offset++] = ((byte) (minutes >> 0)); _offset = offset; return; } } buffer[offset++] = (byte) BC_DATE; buffer[offset++] = ((byte) (time >> 56)); buffer[offset++] = ((byte) (time >> 48)); buffer[offset++] = ((byte) (time >> 40)); buffer[offset++] = ((byte) (time >> 32)); buffer[offset++] = ((byte) (time >> 24)); buffer[offset++] = ((byte) (time >> 16)); buffer[offset++] = ((byte) (time >> 8)); buffer[offset++] = ((byte) (time)); _offset = offset; }
java
static Command parse(String s) { @Cleanup Scanner scanner = new Scanner(s); String component = scanner.findInLine(SCANNER_PATTERN); String command = scanner.findInLine(SCANNER_PATTERN); ArrayList<String> args = new ArrayList<>(); String arg; while ((arg = scanner.findInLine(SCANNER_PATTERN)) != null) { args.add(arg); } return new Command(component, command, Collections.unmodifiableList(args)); }
python
def bulk_query(self, query, *multiparams): """Bulk insert or update.""" with self.get_connection() as conn: conn.bulk_query(query, *multiparams)
python
def discard_event(event: events.Event, bot_id: str = None) -> bool: """ Check if the incoming event needs to be discarded Args: event: Incoming :class:`slack.events.Event` bot_id: Id of connected bot Returns: boolean """ if event["type"] in SKIP_EVENTS: return True elif bot_id and isinstance(event, events.Message): if event.get("bot_id") == bot_id: LOG.debug("Ignoring event: %s", event) return True elif "message" in event and event["message"].get("bot_id") == bot_id: LOG.debug("Ignoring event: %s", event) return True return False
python
def build_url(self): """Build the url path based on the filter options.""" if not self.api_filters: return self.url # Reduce complex objects to simpler strings for k, v in self.api_filters.items(): if isinstance(v, datetime): # datetime > UNIX timestamp self.api_filters[k] = int(calendar.timegm(v.timetuple())) if isinstance(v, (tuple, list)): # tuples & lists > x,y,z self.api_filters[k] = ",".join([str(_) for _ in v]) if ( self.id_filter in self.api_filters and len(str(self.api_filters[self.id_filter])) > self.URL_LENGTH_LIMIT ): self.build_url_chunks() return self.split_urls.pop(0) filters = '&'.join("%s=%s" % (k, v) for (k, v) in self.api_filters.items()) return "%s?%s" % (self.url, filters)
java
private static Key keyForCode(int keyCode) { switch (keyCode) { case KeyEvent.KEYCODE_0: return Key.K0; case KeyEvent.KEYCODE_1: return Key.K1; case KeyEvent.KEYCODE_2: return Key.K2; case KeyEvent.KEYCODE_3: return Key.K3; case KeyEvent.KEYCODE_4: return Key.K4; case KeyEvent.KEYCODE_5: return Key.K5; case KeyEvent.KEYCODE_6: return Key.K6; case KeyEvent.KEYCODE_7: return Key.K7; case KeyEvent.KEYCODE_8: return Key.K8; case KeyEvent.KEYCODE_9: return Key.K9; case KeyEvent.KEYCODE_A: return Key.A; case KeyEvent.KEYCODE_ALT_LEFT: return Key.ALT; case KeyEvent.KEYCODE_ALT_RIGHT: return Key.ALT; case KeyEvent.KEYCODE_APOSTROPHE: return Key.QUOTE; // case KeyEvent.KEYCODE_APP_SWITCH: return Key.APP_SWITCH; case KeyEvent.KEYCODE_AT: return Key.AT; // case KeyEvent.KEYCODE_AVR_INPUT: return Key.AVR_INPUT; // case KeyEvent.KEYCODE_AVR_POWER: return Key.AVR_POWER; case KeyEvent.KEYCODE_B: return Key.B; case KeyEvent.KEYCODE_BACK: return Key.BACK; case KeyEvent.KEYCODE_BACKSLASH: return Key.BACKSLASH; // case KeyEvent.KEYCODE_BOOKMARK: return Key.BOOKMARK; // case KeyEvent.KEYCODE_BREAK: return Key.BREAK; // case KeyEvent.KEYCODE_BUTTON_1: return Key.BUTTON_1; // case KeyEvent.KEYCODE_BUTTON_2: return Key.BUTTON_2; // case KeyEvent.KEYCODE_BUTTON_3: return Key.BUTTON_3; // case KeyEvent.KEYCODE_BUTTON_4: return Key.BUTTON_4; // case KeyEvent.KEYCODE_BUTTON_5: return Key.BUTTON_5; // case KeyEvent.KEYCODE_BUTTON_6: return Key.BUTTON_6; // case KeyEvent.KEYCODE_BUTTON_7: return Key.BUTTON_7; // case KeyEvent.KEYCODE_BUTTON_8: return Key.BUTTON_8; // case KeyEvent.KEYCODE_BUTTON_9: return Key.BUTTON_9; // case KeyEvent.KEYCODE_BUTTON_10: return Key.BUTTON_10; // case KeyEvent.KEYCODE_BUTTON_11: return Key.BUTTON_11; // case KeyEvent.KEYCODE_BUTTON_12: return Key.BUTTON_12; // case KeyEvent.KEYCODE_BUTTON_13: return Key.BUTTON_13; // case KeyEvent.KEYCODE_BUTTON_14: return Key.BUTTON_14; // case KeyEvent.KEYCODE_BUTTON_15: return Key.BUTTON_15; // case KeyEvent.KEYCODE_BUTTON_16: return Key.BUTTON_16; // case KeyEvent.KEYCODE_BUTTON_A: return Key.BUTTON_A; // case KeyEvent.KEYCODE_BUTTON_B: return Key.BUTTON_B; // case KeyEvent.KEYCODE_BUTTON_C: return Key.BUTTON_C; // case KeyEvent.KEYCODE_BUTTON_L1: return Key.BUTTON_L1; // case KeyEvent.KEYCODE_BUTTON_L2: return Key.BUTTON_L2; // case KeyEvent.KEYCODE_BUTTON_MODE: return Key.BUTTON_MODE; // case KeyEvent.KEYCODE_BUTTON_R1: return Key.BUTTON_R1; // case KeyEvent.KEYCODE_BUTTON_R2: return Key.BUTTON_R2; // case KeyEvent.KEYCODE_BUTTON_SELECT: return Key.BUTTON_SELECT; // case KeyEvent.KEYCODE_BUTTON_START: return Key.BUTTON_START; // case KeyEvent.KEYCODE_BUTTON_THUMBL: return Key.BUTTON_THUMBL; // case KeyEvent.KEYCODE_BUTTON_THUMBR: return Key.BUTTON_THUMBR; // case KeyEvent.KEYCODE_BUTTON_X: return Key.BUTTON_X; // case KeyEvent.KEYCODE_BUTTON_Y: return Key.BUTTON_Y; // case KeyEvent.KEYCODE_BUTTON_Z: return Key.BUTTON_Z; case KeyEvent.KEYCODE_C: return Key.C; case KeyEvent.KEYCODE_CALL: return Key.CALL; case KeyEvent.KEYCODE_CAMERA: return Key.CAMERA; // case KeyEvent.KEYCODE_CAPS_LOCK: return Key.CAPS_LOCK; // case KeyEvent.KEYCODE_CAPTIONS: return Key.CAPTIONS; // case KeyEvent.KEYCODE_CHANNEL_DOWN: return Key.CHANNEL_DOWN; // case KeyEvent.KEYCODE_CHANNEL_UP: return Key.CHANNEL_UP; case KeyEvent.KEYCODE_CLEAR: return Key.CLEAR; case KeyEvent.KEYCODE_COMMA: return Key.COMMA; // case KeyEvent.KEYCODE_CTRL_LEFT: return Key.CTRL; // case KeyEvent.KEYCODE_CTRL_RIGHT: return Key.CTRL; case KeyEvent.KEYCODE_D: return Key.D; case KeyEvent.KEYCODE_DEL: return Key.DELETE; case KeyEvent.KEYCODE_DPAD_CENTER: return Key.DPAD_CENTER; case KeyEvent.KEYCODE_DPAD_DOWN: return Key.DPAD_DOWN; case KeyEvent.KEYCODE_DPAD_LEFT: return Key.DPAD_LEFT; case KeyEvent.KEYCODE_DPAD_RIGHT: return Key.DPAD_RIGHT; case KeyEvent.KEYCODE_DPAD_UP: return Key.DPAD_UP; // case KeyEvent.KEYCODE_DVR: return Key.DVR; case KeyEvent.KEYCODE_E: return Key.E; case KeyEvent.KEYCODE_ENDCALL: return Key.ENDCALL; case KeyEvent.KEYCODE_ENTER: return Key.ENTER; case KeyEvent.KEYCODE_ENVELOPE: return Key.ENVELOPE; case KeyEvent.KEYCODE_EQUALS: return Key.EQUALS; // case KeyEvent.KEYCODE_ESCAPE: return Key.ESCAPE; case KeyEvent.KEYCODE_EXPLORER: return Key.EXPLORER; case KeyEvent.KEYCODE_F: return Key.F; // case KeyEvent.KEYCODE_F1: return Key.F1; // case KeyEvent.KEYCODE_F2: return Key.F2; // case KeyEvent.KEYCODE_F3: return Key.F3; // case KeyEvent.KEYCODE_F4: return Key.F4; // case KeyEvent.KEYCODE_F5: return Key.F5; // case KeyEvent.KEYCODE_F6: return Key.F6; // case KeyEvent.KEYCODE_F7: return Key.F7; // case KeyEvent.KEYCODE_F8: return Key.F8; // case KeyEvent.KEYCODE_F9: return Key.F9; // case KeyEvent.KEYCODE_F10: return Key.F10; // case KeyEvent.KEYCODE_F11: return Key.F11; // case KeyEvent.KEYCODE_F12: return Key.F12; case KeyEvent.KEYCODE_FOCUS: return Key.FOCUS; // case KeyEvent.KEYCODE_FORWARD: return Key.FORWARD; // case KeyEvent.KEYCODE_FORWARD_DEL: return Key.FORWARD_DEL; // case KeyEvent.KEYCODE_FUNCTION: return Key.FUNCTION; case KeyEvent.KEYCODE_G: return Key.G; case KeyEvent.KEYCODE_GRAVE: return Key.BACKQUOTE; // case KeyEvent.KEYCODE_GUIDE: return Key.GUIDE; case KeyEvent.KEYCODE_H: return Key.H; case KeyEvent.KEYCODE_HEADSETHOOK: return Key.HEADSETHOOK; case KeyEvent.KEYCODE_HOME: return Key.HOME; case KeyEvent.KEYCODE_I: return Key.I; // case KeyEvent.KEYCODE_INFO: return Key.INFO; // case KeyEvent.KEYCODE_INSERT: return Key.INSERT; case KeyEvent.KEYCODE_J: return Key.J; case KeyEvent.KEYCODE_K: return Key.K; case KeyEvent.KEYCODE_L: return Key.L; case KeyEvent.KEYCODE_LEFT_BRACKET: return Key.LEFT_BRACKET; case KeyEvent.KEYCODE_M: return Key.M; // case KeyEvent.KEYCODE_MEDIA_CLOSE: return Key.MEDIA_CLOSE; // case KeyEvent.KEYCODE_MEDIA_EJECT: return Key.MEDIA_EJECT; // case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD: return Key.MEDIA_FAST_FORWARD; // case KeyEvent.KEYCODE_MEDIA_NEXT: return Key.MEDIA_NEXT; // case KeyEvent.KEYCODE_MEDIA_PAUSE: return Key.MEDIA_PAUSE; // case KeyEvent.KEYCODE_MEDIA_PLAY: return Key.MEDIA_PLAY; // case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE: return Key.MEDIA_PLAY_PAUSE; // case KeyEvent.KEYCODE_MEDIA_PREVIOUS: return Key.MEDIA_PREVIOUS; // case KeyEvent.KEYCODE_MEDIA_RECORD: return Key.MEDIA_RECORD; // case KeyEvent.KEYCODE_MEDIA_REWIND: return Key.MEDIA_REWIND; // case KeyEvent.KEYCODE_MEDIA_STOP: return Key.MEDIA_STOP; case KeyEvent.KEYCODE_MENU: return Key.MENU; // case KeyEvent.KEYCODE_META_LEFT: return Key.META; // case KeyEvent.KEYCODE_META_RIGHT: return Key.META; case KeyEvent.KEYCODE_MINUS: return Key.MINUS; // case KeyEvent.KEYCODE_MOVE_END: return Key.END; // case KeyEvent.KEYCODE_MOVE_HOME: return Key.HOME; case KeyEvent.KEYCODE_MUTE: return Key.MUTE; case KeyEvent.KEYCODE_N: return Key.N; case KeyEvent.KEYCODE_NOTIFICATION: return Key.NOTIFICATION; case KeyEvent.KEYCODE_NUM: return Key.NUM; // case KeyEvent.KEYCODE_NUMPAD_0: return Key.NP0; // case KeyEvent.KEYCODE_NUMPAD_1: return Key.NP1; // case KeyEvent.KEYCODE_NUMPAD_2: return Key.NP2; // case KeyEvent.KEYCODE_NUMPAD_3: return Key.NP3; // case KeyEvent.KEYCODE_NUMPAD_4: return Key.NP4; // case KeyEvent.KEYCODE_NUMPAD_5: return Key.NP5; // case KeyEvent.KEYCODE_NUMPAD_6: return Key.NP6; // case KeyEvent.KEYCODE_NUMPAD_7: return Key.NP7; // case KeyEvent.KEYCODE_NUMPAD_8: return Key.NP8; // case KeyEvent.KEYCODE_NUMPAD_9: return Key.NP9; // case KeyEvent.KEYCODE_NUMPAD_ADD: return Key.NP_ADD; // case KeyEvent.KEYCODE_NUMPAD_COMMA: return Key.COMMA; // case KeyEvent.KEYCODE_NUMPAD_DIVIDE: return Key.NP_DIVIDE; // case KeyEvent.KEYCODE_NUMPAD_DOT: return Key.NP_DECIMAL; // case KeyEvent.KEYCODE_NUMPAD_ENTER: return Key.NP_ENTER; // case KeyEvent.KEYCODE_NUMPAD_EQUALS: return Key.EQUALS; // case KeyEvent.KEYCODE_NUMPAD_LEFT_PAREN: return Key.LEFT_PAREN; // case KeyEvent.KEYCODE_NUMPAD_MULTIPLY: return Key.NP_MULTIPLY; // case KeyEvent.KEYCODE_NUMPAD_RIGHT_PAREN: return Key.RIGHT_PAREN; // case KeyEvent.KEYCODE_NUMPAD_SUBTRACT: return Key.NP_SUBTRACT; // case KeyEvent.KEYCODE_NUM_LOCK: return Key.NP_NUM_LOCK; case KeyEvent.KEYCODE_O: return Key.O; case KeyEvent.KEYCODE_P: return Key.P; case KeyEvent.KEYCODE_PAGE_DOWN: return Key.PAGE_DOWN; case KeyEvent.KEYCODE_PAGE_UP: return Key.PAGE_UP; case KeyEvent.KEYCODE_PERIOD: return Key.PERIOD; case KeyEvent.KEYCODE_PICTSYMBOLS: return Key.PICTSYMBOLS; case KeyEvent.KEYCODE_PLUS: return Key.PLUS; case KeyEvent.KEYCODE_POUND: return Key.HASH; case KeyEvent.KEYCODE_POWER: return Key.POWER; // case KeyEvent.KEYCODE_PROG_BLUE: return Key.BLUE; // case KeyEvent.KEYCODE_PROG_GREEN: return Key.GREEN; // case KeyEvent.KEYCODE_PROG_RED: return Key.RED; // case KeyEvent.KEYCODE_PROG_YELLOW: return Key.YELLOW; case KeyEvent.KEYCODE_Q: return Key.Q; case KeyEvent.KEYCODE_R: return Key.R; case KeyEvent.KEYCODE_RIGHT_BRACKET: return Key.RIGHT_BRACKET; case KeyEvent.KEYCODE_S: return Key.S; // case KeyEvent.KEYCODE_SCROLL_LOCK: return Key.SCROLL_LOCK; case KeyEvent.KEYCODE_SEARCH: return Key.SEARCH; case KeyEvent.KEYCODE_SEMICOLON: return Key.SEMICOLON; // case KeyEvent.KEYCODE_SETTINGS: return Key.SETTINGS; case KeyEvent.KEYCODE_SHIFT_LEFT: return Key.SHIFT; case KeyEvent.KEYCODE_SHIFT_RIGHT: return Key.SHIFT; case KeyEvent.KEYCODE_SLASH: return Key.SLASH; case KeyEvent.KEYCODE_SOFT_LEFT: return Key.SOFT_LEFT; case KeyEvent.KEYCODE_SOFT_RIGHT: return Key.SOFT_RIGHT; case KeyEvent.KEYCODE_SPACE: return Key.SPACE; case KeyEvent.KEYCODE_STAR: return Key.STAR; // case KeyEvent.KEYCODE_STB_INPUT: return Key.STB_INPUT; // case KeyEvent.KEYCODE_STB_POWER: return Key.STB_POWER; case KeyEvent.KEYCODE_SWITCH_CHARSET: return Key.SWITCH_CHARSET; case KeyEvent.KEYCODE_SYM: return Key.SYM; // case KeyEvent.KEYCODE_SYSRQ: return Key.SYSRQ; case KeyEvent.KEYCODE_T: return Key.T; case KeyEvent.KEYCODE_TAB: return Key.TAB; // case KeyEvent.KEYCODE_TV: return Key.TV; // case KeyEvent.KEYCODE_TV_INPUT: return Key.TV_INPUT; // case KeyEvent.KEYCODE_TV_POWER: return Key.TV_POWER; case KeyEvent.KEYCODE_U: return Key.U; case KeyEvent.KEYCODE_UNKNOWN: return Key.UNKNOWN; case KeyEvent.KEYCODE_V: return Key.V; case KeyEvent.KEYCODE_VOLUME_DOWN: return Key.VOLUME_DOWN; // case KeyEvent.KEYCODE_VOLUME_MUTE: return Key.VOLUME_MUTE; case KeyEvent.KEYCODE_VOLUME_UP: return Key.VOLUME_UP; case KeyEvent.KEYCODE_W: return Key.W; // case KeyEvent.KEYCODE_WINDOW: return Key.WINDOW; case KeyEvent.KEYCODE_X: return Key.X; case KeyEvent.KEYCODE_Y: return Key.Y; case KeyEvent.KEYCODE_Z: return Key.Z; // case KeyEvent.KEYCODE_ZOOM_IN: return Key.ZOOM_IN; // case KeyEvent.KEYCODE_ZOOM_OUT: return Key.ZOOM_OUT; default: return Key.UNKNOWN; } }
python
def format_field_names(obj, format_type=None): """ Takes a dict and returns it with formatted keys as set in `format_type` or `JSON_API_FORMAT_FIELD_NAMES` :format_type: Either 'dasherize', 'camelize', 'capitalize' or 'underscore' """ if format_type is None: format_type = json_api_settings.FORMAT_FIELD_NAMES if isinstance(obj, dict): formatted = OrderedDict() for key, value in obj.items(): key = format_value(key, format_type) formatted[key] = value return formatted return obj
java
public IfcColumnTypeEnum createIfcColumnTypeEnumFromString(EDataType eDataType, String initialValue) { IfcColumnTypeEnum result = IfcColumnTypeEnum.get(initialValue); if (result == null) throw new IllegalArgumentException( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; }
java
public MessageDataDesc getMessageDataDesc(String strKey) { if (m_messageDataDesc == null) return null; return m_messageDataDesc.getMessageDataDesc(strKey); }
python
def css(self, path): """ Link/embed CSS file. """ if self.settings.embed_content: content = codecs.open(path, 'r', encoding='utf8').read() tag = Style(content, type="text/css") else: tag = Link(href=path, rel="stylesheet", type_="text/css") self.head.append(tag)
python
def do_ams_put(endpoint, path, body, access_token, rformat="json", ds_min_version="3.0;NetFx"): '''Do a AMS HTTP PUT request and return JSON. Args: endpoint (str): Azure Media Services Initial Endpoint. path (str): Azure Media Services Endpoint Path. body (str): Azure Media Services Content Body. access_token (str): A valid Azure authentication token. rformat (str): A required JSON Accept Format. ds_min_version (str): A required DS MIN Version. Returns: HTTP response. JSON body. ''' min_ds = dsversion_min content_acceptformat = json_acceptformat if rformat == "json_only": min_ds = ds_min_version content_acceptformat = json_only_acceptformat headers = {"Content-Type": content_acceptformat, "DataServiceVersion": min_ds, "MaxDataServiceVersion": dsversion_max, "Accept": json_acceptformat, "Accept-Charset" : charset, "Authorization": "Bearer " + access_token, "x-ms-version" : xmsversion} response = requests.put(endpoint, data=body, headers=headers, allow_redirects=False) # AMS response to the first call can be a redirect, # so we handle it here to make it transparent for the caller... if response.status_code == 301: redirected_url = ''.join([response.headers['location'], path]) response = requests.put(redirected_url, data=body, headers=headers) return response