prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>subsampling_step.py<|end_file_name|><|fim▁begin|># Copyright 2017 reinforce.io. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import from __future__ import print_function from __future__ import division import tensorflow as tf from tensorforce import util, TensorForceError from tensorforce.core.optimizers import MetaOptimizer class SubsamplingStep(MetaOptimizer): """ The subsampling-step meta optimizer randomly samples a subset of batch instances to calculate the optimization step of another optimizer. """ def __init__(self, optimizer, fraction=0.1, scope='subsampling-step', summary_labels=()): """ Creates a new subsampling-step meta optimizer instance. Args: optimizer: The optimizer which is modified by this meta optimizer. fraction: The fraction of instances of the batch to subsample. """ assert isinstance(fraction, float) and fraction > 0.0 self.fraction = fraction super(SubsamplingStep, self).__init__(optimizer=optimizer, scope=scope, summary_labels=summary_labels) def tf_step( self, time, variables, arguments, **kwargs ): """ Creates the TensorFlow operations for performing an optimization step. Args: time: Time tensor. variables: List of variables to optimize. arguments: Dict of arguments for callables, like fn_loss. **kwargs: Additional arguments passed on to the internal optimizer. Returns: List of delta tensors corresponding to the updates for each optimized variable. """ # Get some (batched) argument to determine batch size. arguments_iter = iter(arguments.values()) some_argument = next(arguments_iter) try: while not isinstance(some_argument, tf.Tensor) or util.rank(some_argument) == 0: if isinstance(some_argument, dict): if some_argument: arguments_iter = iter(some_argument.values()) some_argument = next(arguments_iter) elif isinstance(some_argument, list): if some_argument: arguments_iter = iter(some_argument) some_argument = next(arguments_iter) elif some_argument is None or util.rank(some_argument) == 0: # Non-batched argument some_argument = next(arguments_iter) else: raise TensorForceError("Invalid argument type.") except StopIteration: raise TensorForceError("Invalid argument type.") batch_size = tf.shape(input=some_argument)[0] num_samples = tf.cast( x=(self.fraction * tf.cast(x=batch_size, dtype=util.tf_dtype('float'))), dtype=util.tf_dtype('int') )<|fim▁hole|> indices = tf.random_uniform(shape=(num_samples,), maxval=batch_size, dtype=tf.int32) subsampled_arguments = util.map_tensors( fn=(lambda arg: arg if util.rank(arg) == 0 else tf.gather(params=arg, indices=indices)), tensors=arguments ) return self.optimizer.step( time=time, variables=variables, arguments=subsampled_arguments, **kwargs )<|fim▁end|>
num_samples = tf.maximum(x=num_samples, y=1)
<|file_name|>test_support.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import * as fs from 'fs'; import * as os from 'os'; import * as path from 'path'; import * as ts from 'typescript'; import * as ng from '../index'; // TEST_TMPDIR is set by bazel. const tmpdir = process.env.TEST_TMPDIR || os.tmpdir(); function getNgRootDir() { const moduleFilename = module.filename.replace(/\\/g, '/'); const distIndex = moduleFilename.indexOf('/dist/all'); return moduleFilename.substr(0, distIndex); } export function writeTempFile(name: string, contents: string): string { const id = (Math.random() * 1000000).toFixed(0); const fn = path.join(tmpdir, `tmp.${id}.${name}`); fs.writeFileSync(fn, contents); return fn; } export function makeTempDir(): string { let dir: string; while (true) { const id = (Math.random() * 1000000).toFixed(0); dir = path.join(tmpdir, `tmp.${id}`); if (!fs.existsSync(dir)) break; } fs.mkdirSync(dir); return dir; } export interface TestSupport { basePath: string; write(fileName: string, content: string): void; writeFiles(...mockDirs: {[fileName: string]: string}[]): void; createCompilerOptions(overrideOptions?: ng.CompilerOptions): ng.CompilerOptions; shouldExist(fileName: string): void; shouldNotExist(fileName: string): void; } export function setup(): TestSupport { const basePath = makeTempDir(); const ngRootDir = getNgRootDir(); const nodeModulesPath = path.resolve(basePath, 'node_modules'); fs.mkdirSync(nodeModulesPath); fs.symlinkSync( path.resolve(ngRootDir, 'dist', 'all', '@angular'), path.resolve(nodeModulesPath, '@angular')); fs.symlinkSync( path.resolve(ngRootDir, 'node_modules', 'rxjs'), path.resolve(nodeModulesPath, 'rxjs')); fs.symlinkSync( path.resolve(ngRootDir, 'node_modules', 'typescript'), path.resolve(nodeModulesPath, 'typescript')); return {basePath, write, writeFiles, createCompilerOptions, shouldExist, shouldNotExist}; function write(fileName: string, content: string) { const dir = path.dirname(fileName); if (dir != '.') { const newDir = path.resolve(basePath, dir); if (!fs.existsSync(newDir)) fs.mkdirSync(newDir); } fs.writeFileSync(path.resolve(basePath, fileName), content, {encoding: 'utf-8'}); } function writeFiles(...mockDirs: {[fileName: string]: string}[]) { mockDirs.forEach( (dir) => { Object.keys(dir).forEach((fileName) => { write(fileName, dir[fileName]); }); }); } function createCompilerOptions(overrideOptions: ng.CompilerOptions = {}): ng.CompilerOptions { return { basePath, 'experimentalDecorators': true, 'skipLibCheck': true, 'strict': true, 'types': [], 'outDir': path.resolve(basePath, 'built'), 'rootDir': basePath, 'baseUrl': basePath, 'declaration': true, 'target': ts.ScriptTarget.ES5, 'module': ts.ModuleKind.ES2015, 'moduleResolution': ts.ModuleResolutionKind.NodeJs, 'lib': [ path.resolve(basePath, 'node_modules/typescript/lib/lib.es6.d.ts'), ], ...overrideOptions, };<|fim▁hole|> if (!fs.existsSync(path.resolve(basePath, fileName))) { throw new Error(`Expected ${fileName} to be emitted (basePath: ${basePath})`); } } function shouldNotExist(fileName: string) { if (fs.existsSync(path.resolve(basePath, fileName))) { throw new Error(`Did not expect ${fileName} to be emitted (basePath: ${basePath})`); } } } export function expectNoDiagnostics(options: ng.CompilerOptions, diags: ng.Diagnostics) { const errorDiags = diags.filter(d => d.category !== ts.DiagnosticCategory.Message); if (errorDiags.length) { throw new Error(`Expected no diagnostics: ${ng.formatDiagnostics(errorDiags)}`); } } export function expectNoDiagnosticsInProgram(options: ng.CompilerOptions, p: ng.Program) { expectNoDiagnostics(options, [ ...p.getNgStructuralDiagnostics(), ...p.getTsSemanticDiagnostics(), ...p.getNgSemanticDiagnostics() ]); }<|fim▁end|>
} function shouldExist(fileName: string) {
<|file_name|>zpm.py<|end_file_name|><|fim▁begin|># Copyright 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import fnmatch import glob import gzip import json import os import shlex import sys import tarfile try: import urlparse except ImportError: import urllib.parse as urlparse try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO import jinja2 import prettytable import six import swiftclient import yaml import zpmlib from zpmlib import util from zpmlib import zappbundler from zpmlib import zapptemplate _DEFAULT_UI_TEMPLATES = ['index.html.tmpl', 'style.css', 'zerocloud.js'] _ZAPP_YAML = 'python-zapp.yaml' _ZAPP_WITH_UI_YAML = 'python-zapp-with-ui.yaml' LOG = zpmlib.get_logger(__name__) BUFFER_SIZE = 65536 #: path/filename of the system.map (job description) in every zapp SYSTEM_MAP_ZAPP_PATH = 'boot/system.map' #: Message displayed if insufficient auth settings are specified, either on the #: command line or in environment variables. Shamelessly copied from #: ``python-swiftclient``. NO_AUTH_MSG = """\ Auth version 1.0 requires ST_AUTH, ST_USER, and ST_KEY environment variables to be set or overridden with -A, -U, or -K. Auth version 2.0 requires OS_AUTH_URL, OS_USERNAME, OS_PASSWORD, and OS_TENANT_NAME OS_TENANT_ID to be set or overridden with --os-auth-url, --os-username, --os-password, --os-tenant-name or os-tenant-id. Note: adding "-V 2" is necessary for this.""" #: Column labels for the execution summary table EXEC_TABLE_HEADER = [ 'Node', 'Status', 'Retcode', 'NodeT', 'SysT', 'UserT', 'DiskReads', 'DiskBytesR', 'DiskWrites', 'DiskBytesW', 'NetworkReads', 'NetworkBytesR', 'NetworkWrites', 'NetworkBytesW', ] def create_project(location, with_ui=False, template=None): """ Create a ZeroVM application project by writing a default `zapp.yaml` in the specified directory `location`. :param location: Directory location to place project files. :param with_ui: Defaults to `False`. If `True`, add basic UI template files as well to ``location``. :param template: Default: ``None``. If no template is specified, use the default project template. (See `zpmlib.zapptemplate`.) :returns: List of created project files. """ if os.path.exists(location): if not os.path.isdir(location): # target must be an empty directory raise RuntimeError("Target `location` must be a directory") else: os.makedirs(location) # Run the template builder, and create additional files for the project by # the type. If ``template`` is none, this is essientially a NOP. # TODO: just use the afc._created_files created_files = [] with util.AtomicFileCreator() as afc: for file_type, path, contents in zapptemplate.template( location, template, with_ui=with_ui): afc.create_file(file_type, path, contents) created_files.append(path) return created_files def find_project_root(): """ Starting from the `cwd`, search up the file system hierarchy until a ``zapp.yaml`` file is found. Once the file is found, return the directory containing it. If no file is found, raise a `RuntimeError`. """ root = os.getcwd() while not os.path.isfile(os.path.join(root, 'zapp.yaml')): oldroot, root = root, os.path.dirname(root) if root == oldroot: raise RuntimeError("no zapp.yaml file found") return root def _generate_job_desc(zapp): """ Generate the boot/system.map file contents from the zapp config file. :param zapp: `dict` of the contents of a ``zapp.yaml`` file. :returns: `dict` of the job description<|fim▁hole|> job = [] # TODO(mg): we should eventually reuse zvsh._nvram_escape def escape(value): for c in '\\", \n': value = value.replace(c, '\\x%02x' % ord(c)) return value def translate_args(cmdline): # On Python 2, the yaml module loads non-ASCII strings as # unicode objects. In Python 2.7.2 and earlier, we must give # shlex.split a str -- but it is an error to give shlex.split # a bytes object in Python 3. need_decode = not isinstance(cmdline, str) if need_decode: cmdline = cmdline.encode('utf8') args = shlex.split(cmdline) if need_decode: args = [arg.decode('utf8') for arg in args] return ' '.join(escape(arg) for arg in args) for zgroup in zapp['execution']['groups']: # Copy everything, but handle 'env', 'path', and 'args' specially: jgroup = dict(zgroup) path = zgroup['path'] # if path is `file://image:exe`, exec->name is "exe" # if path is `swift://~/container/obj`, exec->name is "obj" exec_name = None if path.startswith('file://'): exec_name = path.split(':')[-1] elif path.startswith('swift://'): # If obj is a pseudo path, like foo/bar/obj, we need to # handle this as well with a careful split. # If the object path is something like `swift://~/container/obj`, # then exec_name will be `obj`. # If the object path is something like # `swift://./container/foo/bar/obj`, then the exec_name will be # `foo/bar/obj`. exec_name = path.split('/', 4)[-1] jgroup['exec'] = { 'path': zgroup['path'], 'args': translate_args(zgroup['args']), } if exec_name is not None: jgroup['exec']['name'] = exec_name del jgroup['path'], jgroup['args'] if 'env' in zgroup: jgroup['exec']['env'] = zgroup['env'] del jgroup['env'] job.append(jgroup) return job def _get_swift_zapp_url(swift_service_url, zapp_path): """ :param str swift_service_url: The Swift service URL returned from a Keystone service catalog. Example: http://localhost:8080/v1/AUTH_469a9cd20b5a4fc5be9438f66bb5ee04 :param str zapp_path: <container>/<zapp-file-name>. Example: test_container/myapp.zapp Here's a typical usage example, with typical input and output: >>> swift_service_url = ('http://localhost:8080/v1/' ... 'AUTH_469a9cd20b5a4fc5be9438f66bb5ee04') >>> zapp_path = 'test_container/myapp.zapp' >>> _get_swift_zapp_url(swift_service_url, zapp_path) 'swift://AUTH_469a9cd20b5a4fc5be9438f66bb5ee04/test_container/myapp.zapp' """ swift_path = urlparse.urlparse(swift_service_url).path # TODO(larsbutler): Why do we need to check if the path contains '/v1/'? # This is here due to legacy reasons, but it's not clear to me why this is # needed. if swift_path.startswith('/v1/'): swift_path = swift_path[4:] return 'swift://%s/%s' % (swift_path, zapp_path) def _prepare_job(tar, zapp, zapp_swift_url): """ :param tar: The application .zapp file, as a :class:`tarfile.TarFile` object. :param dict zapp: Parsed contents of the application `zapp.yaml` specification, as a `dict`. :param str zapp_swift_url: Path of the .zapp in Swift, which looks like this:: 'swift://AUTH_abcdef123/test_container/hello.zapp' See :func:`_get_swift_zapp_url`. :returns: Extracted contents of the boot/system.map with the swift path to the .zapp added to the `devices` for each `group`. So if the job looks like this:: [{'exec': {'args': 'hello.py', 'path': 'file://python2.7:python'}, 'devices': [{'name': 'python2.7'}, {'name': 'stdout'}], 'name': 'hello'}] the output will look like something like this:: [{'exec': {u'args': 'hello.py', 'path': 'file://python2.7:python'}, 'devices': [ {'name': 'python2.7'}, {'name': 'stdout'}, {'name': 'image', 'path': 'swift://AUTH_abcdef123/test_container/hello.zapp'}, ], 'name': 'hello'}] """ fp = tar.extractfile(SYSTEM_MAP_ZAPP_PATH) # NOTE(larsbutler): the `decode` is needed for python3 # compatibility job = json.loads(fp.read().decode('utf-8')) device = {'name': 'image', 'path': zapp_swift_url} for group in job: group['devices'].append(device) return job def bundle_project(root, refresh_deps=False): """ Bundle the project under root. """ zapp_yaml = os.path.join(root, 'zapp.yaml') zapp = yaml.safe_load(open(zapp_yaml)) zapp_name = zapp['meta']['name'] + '.zapp' zapp_tar_path = os.path.join(root, zapp_name) tar = tarfile.open(zapp_tar_path, 'w:gz') job = _generate_job_desc(zapp) job_json = json.dumps(job) info = tarfile.TarInfo(name='boot/system.map') # This size is only correct because json.dumps uses # ensure_ascii=True by default and we thus have a 1-1 # correspondence between Unicode characters and bytes. info.size = len(job_json) LOG.info('adding %s' % info.name) # In Python 3, we cannot use a str or bytes object with addfile, # we need a BytesIO object. In Python 2, BytesIO is just StringIO. # Since json.dumps produces an ASCII-only Unicode string in Python # 3, it is safe to encode it to ASCII. tar.addfile(info, BytesIO(job_json.encode('ascii'))) _add_file_to_tar(root, 'zapp.yaml', tar) sections = ('bundling', 'ui') # Keep track of the files we add, given the configuration in the zapp.yaml. file_add_count = 0 for section in sections: for pattern in zapp.get(section, []): paths = glob.glob(os.path.join(root, pattern)) if len(paths) == 0: LOG.warning( "pattern '%(pat)s' in section '%(sec)s' matched no files", dict(pat=pattern, sec=section) ) else: for path in paths: _add_file_to_tar(root, path, tar) file_add_count += len(paths) if file_add_count == 0: # None of the files specified in the "bundling" or "ui" sections were # found. Something is wrong. raise zpmlib.ZPMException( "None of the files specified in the 'bundling' or 'ui' sections of" " the zapp.yaml matched anything." ) # Do template-specific bundling zappbundler.bundle(root, zapp, tar, refresh_deps=refresh_deps) tar.close() print('created %s' % zapp_name) def _add_file_to_tar(root, path, tar, arcname=None): """ :param root: Root working directory. :param path: File path. :param tar: Open :class:`tarfile.TarFile` object to add the ``files`` to. """ # TODO(larsbutler): document ``arcname`` LOG.info('adding %s' % path) path = os.path.join(root, path) relpath = os.path.relpath(path, root) if arcname is None: # In the archive, give the file the same name and path. arcname = relpath tar.add(path, arcname=arcname) def _find_ui_uploads(zapp, tar): matches = set() names = tar.getnames() for pattern in zapp.get('ui', []): matches.update(fnmatch.filter(names, pattern)) return sorted(matches) def _post_job(url, token, data, http_conn=None, response_dict=None, content_type='application/json', content_length=None, response_body_buffer=None): # Modelled after swiftclient.client.post_account. headers = {'X-Auth-Token': token, 'X-Zerovm-Execute': '1.0', 'Content-Type': content_type} if content_length: headers['Content-Length'] = str(content_length) if http_conn: parsed, conn = http_conn else: parsed, conn = swiftclient.http_connection(url) conn.request('POST', parsed.path, data, headers) resp = conn.getresponse() body = resp.read() swiftclient.http_log((url, 'POST'), {'headers': headers}, resp, body) swiftclient.store_response(resp, response_dict) if response_body_buffer is not None: response_body_buffer.write(body) class ZeroCloudConnection(swiftclient.Connection): """ An extension of the `swiftclient.Connection` which has the capability of posting ZeroVM jobs to an instance of ZeroCloud (running on Swift). """ def authenticate(self): """ Authenticate with the provided credentials and cache the storage URL and auth token as `self.url` and `self.token`, respectively. """ self.url, self.token = self.get_auth() def post_job(self, job, response_dict=None, response_body_buffer=None): """Start a ZeroVM job, using a pre-uploaded zapp :param object job: Job description. This will be encoded as JSON and sent to ZeroCloud. """ json_data = json.dumps(job) LOG.debug('JOB: %s' % json_data) return self._retry(None, _post_job, json_data, response_dict=response_dict, response_body_buffer=response_body_buffer) def post_zapp(self, data, response_dict=None, content_length=None, response_body_buffer=None): return self._retry(None, _post_job, data, response_dict=response_dict, content_type='application/x-gzip', content_length=content_length, response_body_buffer=response_body_buffer) def _get_zerocloud_conn(args): version = args.auth_version # no version was explicitly requested; try to guess it: if version is None: version = _guess_auth_version(args) if version == '1.0': if any([arg is None for arg in (args.auth, args.user, args.key)]): raise zpmlib.ZPMException( "Version 1 auth requires `--auth`, `--user`, and `--key`." "\nSee `zpm deploy --help` for more information." ) conn = ZeroCloudConnection(args.auth, args.user, args.key) elif version == '2.0': if any([arg is None for arg in (args.os_auth_url, args.os_username, args.os_tenant_name, args.os_password)]): raise zpmlib.ZPMException( "Version 2 auth requires `--os-auth-url`, `--os-username`, " "`--os-password`, and `--os-tenant-name`." "\nSee `zpm deploy --help` for more information." ) conn = ZeroCloudConnection(args.os_auth_url, args.os_username, args.os_password, tenant_name=args.os_tenant_name, auth_version='2.0') else: raise zpmlib.ZPMException(NO_AUTH_MSG) return conn def _deploy_zapp(conn, target, zapp_path, auth_opts, force=False): """Upload all of the necessary files for a zapp. Returns the name an uploaded index file, or the target if no index.html file was uploaded. :param bool force: Force deployment, even if the target container is not empty. This means that files could be overwritten and could cause consistency problems with these objects in Swift. """ base_container = target.split('/')[0] try: _, objects = conn.get_container(base_container) if not len(objects) == 0: if not force: raise zpmlib.ZPMException( "Target container ('%s') is not empty.\nDeploying to a " "non-empty container can cause consistency problems with " "overwritten objects.\nSpecify the flag `--force/-f` to " "overwrite anyway." % base_container ) except swiftclient.exceptions.ClientException: # container doesn't exist; create it LOG.info("Container '%s' not found. Creating it...", base_container) conn.put_container(base_container) # If we get here, everything with the container is fine. index = target + '/' uploads = _generate_uploads(conn, target, zapp_path, auth_opts) for path, data, content_type in uploads: if path.endswith('/index.html'): index = path container, obj = path.split('/', 1) conn.put_object(container, obj, data, content_type=content_type) return index def _generate_uploads(conn, target, zapp_path, auth_opts): """Generate sequence of (container-and-file-path, data, content-type) tuples. """ tar = tarfile.open(zapp_path, 'r:gz') zapp_config = yaml.safe_load(tar.extractfile('zapp.yaml')) remote_zapp_path = '%s/%s' % (target, os.path.basename(zapp_path)) swift_url = _get_swift_zapp_url(conn.url, remote_zapp_path) job = _prepare_job(tar, zapp_config, swift_url) yield (remote_zapp_path, gzip.open(zapp_path).read(), 'application/x-tar') yield ('%s/%s' % (target, SYSTEM_MAP_ZAPP_PATH), json.dumps(job), 'application/json') for path in _find_ui_uploads(zapp_config, tar): output = tar.extractfile(path).read() if path.endswith('.tmpl'): tmpl = jinja2.Template(output.decode('utf-8')) output = tmpl.render(auth_opts=auth_opts, zapp=zapp_config) # drop the .tmpl extension path = os.path.splitext(path)[0] ui_path = '%s/%s' % (target, path) yield (ui_path, output, None) def _prepare_auth(version, args, conn): """ :param str version: Auth version: "0.0", "1.0", or "2.0". "0.0" indicates "no auth". :param args: :class:`argparse.Namespace` instance, with attributes representing the various authentication parameters :param conn: :class:`ZeroCloudConnection` instance. """ version = str(float(version)) auth = {'version': version} if version == '0.0': auth['swiftUrl'] = conn.url elif version == '1.0': auth['authUrl'] = args.auth auth['username'] = args.user auth['password'] = args.key else: # TODO(mg): inserting the username and password in the # uploaded file makes testing easy, but should not be done in # production. See issue #46. auth['authUrl'] = args.os_auth_url auth['tenant'] = args.os_tenant_name auth['username'] = args.os_username auth['password'] = args.os_password return auth def _guess_auth_version(args): """Guess the auth version from first the command line args and/or envvars. Command line arguments override environment variables, so we check those first. Auth v1 arguments: * ``--auth`` * ``--user`` * ``--key`` Auth v2 arguments: * ``--os-auth-url`` * ``--os-username`` * ``--os-password`` * ``--os-tenant-name`` If all of the v1 and v2 arguments are specified, default to 1.0 (this is how ``python-swiftclient`` behaves). If no auth version can be determined from the command line args, we check environment variables. Auth v1 vars: * ``ST_AUTH`` * ``ST_USER`` * ``ST_KEY`` Auth v2 vars: * ``OS_AUTH_URL`` * ``OS_USERNAME`` * ``OS_PASSWORD`` * ``OS_TENANT_NAME`` The same rule above applies; if both sets of variables are specified, default to 1.0. If no auth version can be determined, return `None`. :param args: :class:`argparse.Namespace`, representing the args specified on the command line. :returns: '1.0', '2.0', or ``None`` """ v1 = (args.auth, args.user, args.key) v2 = (args.os_auth_url, args.os_username, args.os_password, args.os_tenant_name) if all(v1) and not all(v2): return '1.0' elif all(v2) and not all(v1): return '2.0' elif all(v1) and all(v2): # All vars for v1 and v2 auth are set, so we follow the # `python-swiftclient` behavior and default to 1.0. return '1.0' else: # deduce from envvars env = os.environ v1_env = (env.get('ST_AUTH'), env.get('ST_USER'), env.get('ST_KEY')) v2_env = (env.get('OS_AUTH_URL'), env.get('OS_USERNAME'), env.get('OS_PASSWORD'), env.get('OS_TENANT_NAME')) if all(v1_env) and not all(v2_env): return '1.0' if all(v2_env) and not all(v1_env): return '2.0' elif all(v1_env) and all(v2_env): # Same as above, if all v1 and v2 vars are set, default to 1.0. return '1.0' else: # Insufficient auth details have been specified. return None def deploy_project(args): conn = _get_zerocloud_conn(args) conn.authenticate() ui_auth_version = conn.auth_version # We can now reset the auth for the web UI, if needed if args.no_ui_auth: ui_auth_version = '0.0' auth = _prepare_auth(ui_auth_version, args, conn) auth_opts = jinja2.Markup(json.dumps(auth)) deploy_index = _deploy_zapp(conn, args.target, args.zapp, auth_opts, force=args.force) print('app deployed to\n %s/%s' % (conn.url, deploy_index)) if args.execute: # for compatibility with the option name in 'zpm execute' args.container = args.target resp_body_buffer = BytesIO() resp = execute(args, response_body_buffer=resp_body_buffer) resp_body_buffer.seek(0) if resp['status'] < 200 or resp['status'] >= 300: raise zpmlib.ZPMException(resp_body_buffer.read()) if args.summary: total_time, exec_table = _get_exec_table(resp) print('Execution summary:') print(exec_table) print('Total time: %s' % total_time) sys.stdout.write(resp_body_buffer.read()) def _get_exec_table(resp): """Build an execution summary table from a job execution response. :param dict resp: Response dictionary from job execution. Must contain a ``headers`` key at least (and will typically contain ``status`` and ``reason`` as well). :returns: Tuple of total execution time (`str`), ``prettytable.PrettyTable`` containing the summary of all node executions in the job. """ headers = resp['headers'] total_time, table_data = _get_exec_table_data(headers) table = prettytable.PrettyTable(EXEC_TABLE_HEADER) for row in table_data: table.add_row(row) return total_time, table def _get_exec_table_data(headers): """Extract a stats table from execution HTTP response headers. Stats include things like node name, execution time, number of reads/writes, bytes read/written, etc. :param dict headers: `dict` of response headers from a job execution request. It must contain at least ``x-nexe-system``, ``x-nexe-status``, ``x-nexe-retcode``, ``x-nexe-cdr-line``. :returns: Tuple of two items. The first is the total time for the executed job (as a `str`). The second is a table (2d `list`) of execution data extracted from ``X-Nexe-System`` and ``X-Nexe-Cdr-Line`` headers. Each row in the table consists of the following data: * node name * node time * system time * user time * number of disk reads * number of bytes read from disk * number of disk writes * number of bytes written to disk * number of network reads * number of bytes read from network * number of network writes * number of bytes written to network """ node_names = iter(headers['x-nexe-system'].split(',')) statuses = iter(headers['x-nexe-status'].split(',')) retcodes = iter(headers['x-nexe-retcode'].split(',')) cdr = headers['x-nexe-cdr-line'] cdr_data = [x.strip() for x in cdr.split(',')] total_time = cdr_data.pop(0) cdr_data = iter(cdr_data) def adviter(x): return six.advance_iterator(x) table_data = [] while True: try: node_name = adviter(node_names) status = adviter(statuses) retcode = adviter(retcodes) node_time = adviter(cdr_data) cdr = adviter(cdr_data).split() row = [node_name, status, retcode, node_time] + cdr table_data.append(row) except StopIteration: break return total_time, table_data def execute(args, response_body_buffer=None): """Execute a zapp remotely on a ZeroCloud deployment. :returns: A `dict` with response data, including the keys 'status', 'reason', and 'headers'. """ conn = _get_zerocloud_conn(args) resp = dict() if args.container: job_filename = SYSTEM_MAP_ZAPP_PATH try: headers, content = conn.get_object(args.container, job_filename) except swiftclient.ClientException as exc: if exc.http_status == 404: raise zpmlib.ZPMException("Could not find %s" % exc.http_path) else: raise zpmlib.ZPMException(str(exc)) job = json.loads(content) conn.post_job(job, response_dict=resp, response_body_buffer=response_body_buffer) LOG.debug('RESP STATUS: %s %s', resp['status'], resp['reason']) LOG.debug('RESP HEADERS: %s', resp['headers']) else: size = os.path.getsize(args.zapp) zapp_file = open(args.zapp, 'rb') data_reader = iter(lambda: zapp_file.read(BUFFER_SIZE), b'') conn.post_zapp(data_reader, response_dict=resp, content_length=size, response_body_buffer=response_body_buffer) zapp_file.close() return resp def auth(args): conn = _get_zerocloud_conn(args) conn.authenticate() print('Auth token: %s' % conn.token) print('Storage URL: %s' % conn.url)<|fim▁end|>
"""
<|file_name|>schema.py<|end_file_name|><|fim▁begin|># encoding: utf-8 # # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http:# mozilla.org/MPL/2.0/. # from __future__ import absolute_import, division, unicode_literals from jx_base.queries import get_property_name from jx_sqlite.utils import GUID, untyped_column from mo_dots import concat_field, relative_field, set_default, startswith_field from mo_json import EXISTS, OBJECT, STRUCT from mo_logs import Log class Schema(object): """ A Schema MAPS ALL COLUMNS IN SNOWFLAKE FROM THE PERSPECTIVE OF A SINGLE TABLE (a nested_path) """ def __init__(self, nested_path, snowflake): if nested_path[-1] != '.': Log.error("Expecting full nested path") self.path = concat_field(snowflake.fact_name, nested_path[0]) self.nested_path = nested_path self.snowflake = snowflake # def add(self, column_name, column):<|fim▁hole|> # if column_name != column.names[self.nested_path[0]]: # Log.error("Logic error") # # self.columns.append(column) # # for np in self.nested_path: # rel_name = column.names[np] # container = self.namespace.setdefault(rel_name, set()) # hidden = [ # c # for c in container # if len(c.nested_path[0]) < len(np) # ] # for h in hidden: # container.remove(h) # # container.add(column) # # container = self.namespace.setdefault(column.es_column, set()) # container.add(column) # def remove(self, column_name, column): # if column_name != column.names[self.nested_path[0]]: # Log.error("Logic error") # # self.namespace[column_name] = [c for c in self.namespace[column_name] if c != column] def __getitem__(self, item): output = self.snowflake.namespace.columns.find(self.path, item) return output # def __copy__(self): # output = Schema(self.nested_path) # for k, v in self.namespace.items(): # output.namespace[k] = copy(v) # return output def get_column_name(self, column): """ RETURN THE COLUMN NAME, FROM THE PERSPECTIVE OF THIS SCHEMA :param column: :return: NAME OF column """ relative_name = relative_field(column.name, self.nested_path[0]) return get_property_name(relative_name) @property def namespace(self): return self.snowflake.namespace def keys(self): """ :return: ALL COLUMN NAMES """ return set(c.name for c in self.columns) @property def columns(self): return self.snowflake.namespace.columns.find(self.snowflake.fact_name) def column(self, prefix): full_name = untyped_column(concat_field(self.nested_path, prefix)) return set( c for c in self.snowflake.namespace.columns.find(self.snowflake.fact_name) for k, t in [untyped_column(c.name)] if k == full_name and k != GUID if c.jx_type not in [OBJECT, EXISTS] ) def leaves(self, prefix): full_name = concat_field(self.nested_path, prefix) return set( c for c in self.snowflake.namespace.columns.find(self.snowflake.fact_name) for k in [c.name] if startswith_field(k, full_name) and k != GUID or k == full_name if c.jx_type not in [OBJECT, EXISTS] ) def map_to_sql(self, var=""): """ RETURN A MAP FROM THE RELATIVE AND ABSOLUTE NAME SPACE TO COLUMNS """ origin = self.nested_path[0] if startswith_field(var, origin) and origin != var: var = relative_field(var, origin) fact_dict = {} origin_dict = {} for k, cs in self.namespace.items(): for c in cs: if c.jx_type in STRUCT: continue if startswith_field(get_property_name(k), var): origin_dict.setdefault(c.names[origin], []).append(c) if origin != c.nested_path[0]: fact_dict.setdefault(c.name, []).append(c) elif origin == var: origin_dict.setdefault(concat_field(var, c.names[origin]), []).append(c) if origin != c.nested_path[0]: fact_dict.setdefault(concat_field(var, c.name), []).append(c) return set_default(origin_dict, fact_dict)<|fim▁end|>
<|file_name|>dmltestgenerated_easyjson.go<|end_file_name|><|fim▁begin|>// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. package dmltestgenerated import ( json "encoding/json" easyjson "github.com/mailru/easyjson" jlexer "github.com/mailru/easyjson/jlexer" jwriter "github.com/mailru/easyjson/jwriter" ) // suppress unused package warning var ( _ *json.RawMessage _ *jlexer.Lexer _ *jwriter.Writer _ easyjson.Marshaler ) func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated(in *jlexer.Lexer, out *ViewCustomerNoAutoIncrements) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*ViewCustomerNoAutoIncrement, 0, 8) } else { out.Data = []*ViewCustomerNoAutoIncrement{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v1 *ViewCustomerNoAutoIncrement if in.IsNull() { in.Skip() v1 = nil } else { if v1 == nil { v1 = new(ViewCustomerNoAutoIncrement) } (*v1).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v1) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated(out *jwriter.Writer, in ViewCustomerNoAutoIncrements) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v2, v3 := range in.Data { if v2 > 0 { out.RawByte(',') } if v3 == nil { out.RawString("null") } else { (*v3).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v ViewCustomerNoAutoIncrements) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v ViewCustomerNoAutoIncrements) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *ViewCustomerNoAutoIncrements) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *ViewCustomerNoAutoIncrements) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated1(in *jlexer.Lexer, out *ViewCustomerNoAutoIncrement) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "email": if data := in.Raw(); in.Ok() { in.AddError((out.Email).UnmarshalJSON(data)) } case "firstname": out.Firstname = string(in.String()) case "lastname": out.Lastname = string(in.String()) case "city": out.City = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated1(out *jwriter.Writer, in ViewCustomerNoAutoIncrement) { out.RawByte('{') first := true _ = first if true { const prefix string = ",\"email\":" first = false out.RawString(prefix[1:]) out.Raw((in.Email).MarshalJSON()) } if in.Firstname != "" { const prefix string = ",\"firstname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Firstname)) } if in.Lastname != "" { const prefix string = ",\"lastname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Lastname)) } if in.City != "" { const prefix string = ",\"city\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.City)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v ViewCustomerNoAutoIncrement) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated1(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v ViewCustomerNoAutoIncrement) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated1(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *ViewCustomerNoAutoIncrement) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated1(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *ViewCustomerNoAutoIncrement) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated1(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated2(in *jlexer.Lexer, out *ViewCustomerAutoIncrements) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*ViewCustomerAutoIncrement, 0, 8) } else { out.Data = []*ViewCustomerAutoIncrement{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v4 *ViewCustomerAutoIncrement if in.IsNull() { in.Skip() v4 = nil } else { if v4 == nil { v4 = new(ViewCustomerAutoIncrement) } (*v4).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v4) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated2(out *jwriter.Writer, in ViewCustomerAutoIncrements) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v5, v6 := range in.Data { if v5 > 0 { out.RawByte(',') } if v6 == nil { out.RawString("null") } else { (*v6).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v ViewCustomerAutoIncrements) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated2(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v ViewCustomerAutoIncrements) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated2(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *ViewCustomerAutoIncrements) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated2(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *ViewCustomerAutoIncrements) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated2(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated3(in *jlexer.Lexer, out *ViewCustomerAutoIncrement) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "ceEntityID": out.CeEntityID = uint32(in.Uint32()) case "email": if data := in.Raw(); in.Ok() { in.AddError((out.Email).UnmarshalJSON(data)) } case "firstname": out.Firstname = string(in.String()) case "lastname": out.Lastname = string(in.String()) case "city": out.City = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated3(out *jwriter.Writer, in ViewCustomerAutoIncrement) { out.RawByte('{') first := true _ = first if in.CeEntityID != 0 { const prefix string = ",\"ceEntityID\":" first = false out.RawString(prefix[1:]) out.Uint32(uint32(in.CeEntityID)) } if true { const prefix string = ",\"email\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Email).MarshalJSON()) } if in.Firstname != "" { const prefix string = ",\"firstname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Firstname)) } if in.Lastname != "" { const prefix string = ",\"lastname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Lastname)) } if in.City != "" { const prefix string = ",\"city\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.City)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v ViewCustomerAutoIncrement) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated3(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v ViewCustomerAutoIncrement) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated3(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *ViewCustomerAutoIncrement) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated3(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *ViewCustomerAutoIncrement) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated3(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated4(in *jlexer.Lexer, out *SalesOrderStatusStates) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*SalesOrderStatusState, 0, 8) } else { out.Data = []*SalesOrderStatusState{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v7 *SalesOrderStatusState if in.IsNull() { in.Skip() v7 = nil } else { if v7 == nil { v7 = new(SalesOrderStatusState) } (*v7).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v7) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated4(out *jwriter.Writer, in SalesOrderStatusStates) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v8, v9 := range in.Data { if v8 > 0 { out.RawByte(',') } if v9 == nil { out.RawString("null") } else { (*v9).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v SalesOrderStatusStates) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated4(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SalesOrderStatusStates) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated4(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SalesOrderStatusStates) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated4(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SalesOrderStatusStates) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated4(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated5(in *jlexer.Lexer, out *SalesOrderStatusState) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "status": out.Status = string(in.String()) case "state": out.State = string(in.String()) case "isDefault": out.IsDefault = bool(in.Bool()) case "visibleOnFront": out.VisibleOnFront = uint32(in.Uint32()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated5(out *jwriter.Writer, in SalesOrderStatusState) { out.RawByte('{') first := true _ = first if in.Status != "" { const prefix string = ",\"status\":" first = false out.RawString(prefix[1:]) out.String(string(in.Status)) } if in.State != "" { const prefix string = ",\"state\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.State)) } if in.IsDefault { const prefix string = ",\"isDefault\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Bool(bool(in.IsDefault)) } if in.VisibleOnFront != 0 { const prefix string = ",\"visibleOnFront\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Uint32(uint32(in.VisibleOnFront)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v SalesOrderStatusState) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated5(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v SalesOrderStatusState) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated5(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *SalesOrderStatusState) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated5(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *SalesOrderStatusState) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated5(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated6(in *jlexer.Lexer, out *DmlgenTypesCollection) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*DmlgenTypes, 0, 8) } else { out.Data = []*DmlgenTypes{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v10 *DmlgenTypes if in.IsNull() { in.Skip() v10 = nil } else { if v10 == nil { v10 = new(DmlgenTypes) } (*v10).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v10) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated6(out *jwriter.Writer, in DmlgenTypesCollection) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v11, v12 := range in.Data { if v11 > 0 { out.RawByte(',') } if v12 == nil { out.RawString("null") } else { (*v12).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v DmlgenTypesCollection) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated6(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DmlgenTypesCollection) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated6(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DmlgenTypesCollection) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated6(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DmlgenTypesCollection) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated6(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated7(in *jlexer.Lexer, out *DmlgenTypes) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "id": out.ID = int32(in.Int32()) case "col_bigint_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColBigint1).UnmarshalJSON(data)) } case "col_bigint_2": out.ColBigint2 = int64(in.Int64()) case "col_bigint_3": if data := in.Raw(); in.Ok() { in.AddError((out.ColBigint3).UnmarshalJSON(data)) } case "col_bigint_4": out.ColBigint4 = uint64(in.Uint64()) case "col_blob": if in.IsNull() { in.Skip() out.ColBlob = nil } else { out.ColBlob = in.Bytes() } case "col_date_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColDate1).UnmarshalJSON(data)) } case "col_date_2": if data := in.Raw(); in.Ok() { in.AddError((out.ColDate2).UnmarshalJSON(data)) } case "col_datetime_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColDatetime1).UnmarshalJSON(data)) } case "col_datetime_2": if data := in.Raw(); in.Ok() { in.AddError((out.ColDatetime2).UnmarshalJSON(data)) } case "col_decimal_10_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColDecimal101).UnmarshalJSON(data)) } case "col_decimal_12_4": if data := in.Raw(); in.Ok() { in.AddError((out.ColDecimal124).UnmarshalJSON(data)) } case "price_a_12_4": if data := in.Raw(); in.Ok() { in.AddError((out.PriceA124).UnmarshalJSON(data)) } case "price_b_12_4": if data := in.Raw(); in.Ok() { in.AddError((out.PriceB124).UnmarshalJSON(data)) } case "col_decimal_12_3": if data := in.Raw(); in.Ok() { in.AddError((out.ColDecimal123).UnmarshalJSON(data)) } case "col_decimal_20_6": if data := in.Raw(); in.Ok() { in.AddError((out.ColDecimal206).UnmarshalJSON(data)) } case "col_decimal_24_12": if data := in.Raw(); in.Ok() { in.AddError((out.ColDecimal2412).UnmarshalJSON(data)) } case "col_int_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColInt1).UnmarshalJSON(data)) } case "col_int_2": out.ColInt2 = int32(in.Int32()) case "col_int_3": if data := in.Raw(); in.Ok() { in.AddError((out.ColInt3).UnmarshalJSON(data)) } case "col_int_4": out.ColInt4 = uint32(in.Uint32()) case "col_longtext_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColLongtext1).UnmarshalJSON(data)) } case "col_longtext_2": out.ColLongtext2 = string(in.String()) case "col_mediumblob": if in.IsNull() { in.Skip() out.ColMediumblob = nil } else { out.ColMediumblob = in.Bytes() } case "col_mediumtext_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColMediumtext1).UnmarshalJSON(data)) } case "col_mediumtext_2": out.ColMediumtext2 = string(in.String()) case "col_smallint_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColSmallint1).UnmarshalJSON(data)) } case "col_smallint_2": out.ColSmallint2 = int32(in.Int32()) case "col_smallint_3": if data := in.Raw(); in.Ok() { in.AddError((out.ColSmallint3).UnmarshalJSON(data)) } case "col_smallint_4": out.ColSmallint4 = uint32(in.Uint32()) case "has_smallint_5": out.HasSmallint5 = bool(in.Bool()) case "is_smallint_5": if data := in.Raw(); in.Ok() { in.AddError((out.IsSmallint5).UnmarshalJSON(data)) } case "col_text": if data := in.Raw(); in.Ok() { in.AddError((out.ColText).UnmarshalJSON(data)) } case "col_timestamp_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColTimestamp1).UnmarshalJSON(data)) } case "col_timestamp_2": if data := in.Raw(); in.Ok() { in.AddError((out.ColTimestamp2).UnmarshalJSON(data)) } case "col_tinyint_1": out.ColTinyint1 = int32(in.Int32()) case "col_varchar_1": out.ColVarchar1 = string(in.String()) case "col_varchar_100": if data := in.Raw(); in.Ok() { in.AddError((out.ColVarchar100).UnmarshalJSON(data)) } case "col_varchar_16": out.ColVarchar16 = string(in.String()) case "col_char_1": if data := in.Raw(); in.Ok() { in.AddError((out.ColChar1).UnmarshalJSON(data)) } case "col_char_2": out.ColChar2 = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated7(out *jwriter.Writer, in DmlgenTypes) { out.RawByte('{') first := true _ = first if in.ID != 0 { const prefix string = ",\"id\":" first = false out.RawString(prefix[1:]) out.Int32(int32(in.ID)) } if true { const prefix string = ",\"col_bigint_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColBigint1).MarshalJSON()) } if in.ColBigint2 != 0 { const prefix string = ",\"col_bigint_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Int64(int64(in.ColBigint2)) } if true { const prefix string = ",\"col_bigint_3\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColBigint3).MarshalJSON()) } if in.ColBigint4 != 0 { const prefix string = ",\"col_bigint_4\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Uint64(uint64(in.ColBigint4)) } if len(in.ColBlob) != 0 { const prefix string = ",\"col_blob\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Base64Bytes(in.ColBlob) } if true { const prefix string = ",\"col_date_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDate1).MarshalJSON()) } if true { const prefix string = ",\"col_date_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDate2).MarshalJSON()) } if true { const prefix string = ",\"col_datetime_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDatetime1).MarshalJSON()) } if true { const prefix string = ",\"col_datetime_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDatetime2).MarshalJSON()) } if true { const prefix string = ",\"col_decimal_10_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDecimal101).MarshalJSON()) } if true { const prefix string = ",\"col_decimal_12_4\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDecimal124).MarshalJSON()) } if true { const prefix string = ",\"price_a_12_4\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.PriceA124).MarshalJSON()) } if true { const prefix string = ",\"price_b_12_4\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.PriceB124).MarshalJSON()) } if true { const prefix string = ",\"col_decimal_12_3\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDecimal123).MarshalJSON()) } if true { const prefix string = ",\"col_decimal_20_6\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDecimal206).MarshalJSON()) } if true { const prefix string = ",\"col_decimal_24_12\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColDecimal2412).MarshalJSON()) } if true { const prefix string = ",\"col_int_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColInt1).MarshalJSON()) } if in.ColInt2 != 0 { const prefix string = ",\"col_int_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Int32(int32(in.ColInt2)) } if true { const prefix string = ",\"col_int_3\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColInt3).MarshalJSON()) } if in.ColInt4 != 0 { const prefix string = ",\"col_int_4\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Uint32(uint32(in.ColInt4)) } if true { const prefix string = ",\"col_longtext_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColLongtext1).MarshalJSON()) } if in.ColLongtext2 != "" { const prefix string = ",\"col_longtext_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.ColLongtext2)) } if len(in.ColMediumblob) != 0 { const prefix string = ",\"col_mediumblob\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Base64Bytes(in.ColMediumblob) } if true { const prefix string = ",\"col_mediumtext_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColMediumtext1).MarshalJSON()) } if in.ColMediumtext2 != "" { const prefix string = ",\"col_mediumtext_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.ColMediumtext2)) } if true { const prefix string = ",\"col_smallint_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColSmallint1).MarshalJSON()) } if in.ColSmallint2 != 0 { const prefix string = ",\"col_smallint_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Int32(int32(in.ColSmallint2)) } if true { const prefix string = ",\"col_smallint_3\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColSmallint3).MarshalJSON()) } if in.ColSmallint4 != 0 { const prefix string = ",\"col_smallint_4\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Uint32(uint32(in.ColSmallint4)) } if in.HasSmallint5 { const prefix string = ",\"has_smallint_5\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Bool(bool(in.HasSmallint5)) } if true { const prefix string = ",\"is_smallint_5\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.IsSmallint5).MarshalJSON()) } if true { const prefix string = ",\"col_text\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColText).MarshalJSON()) } if true { const prefix string = ",\"col_timestamp_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColTimestamp1).MarshalJSON()) } if true { const prefix string = ",\"col_timestamp_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColTimestamp2).MarshalJSON()) } if in.ColTinyint1 != 0 { const prefix string = ",\"col_tinyint_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Int32(int32(in.ColTinyint1)) }<|fim▁hole|> out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.ColVarchar1)) } if true { const prefix string = ",\"col_varchar_100\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColVarchar100).MarshalJSON()) } if in.ColVarchar16 != "" { const prefix string = ",\"col_varchar_16\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.ColVarchar16)) } if true { const prefix string = ",\"col_char_1\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ColChar1).MarshalJSON()) } if in.ColChar2 != "" { const prefix string = ",\"col_char_2\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.ColChar2)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v DmlgenTypes) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated7(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v DmlgenTypes) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated7(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *DmlgenTypes) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated7(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *DmlgenTypes) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated7(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated8(in *jlexer.Lexer, out *CustomerEntity) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "entityID": out.EntityID = uint32(in.Uint32()) case "websiteID": if data := in.Raw(); in.Ok() { in.AddError((out.WebsiteID).UnmarshalJSON(data)) } case "email": if data := in.Raw(); in.Ok() { in.AddError((out.Email).UnmarshalJSON(data)) } case "groupID": out.GroupID = uint32(in.Uint32()) case "storeID": if data := in.Raw(); in.Ok() { in.AddError((out.StoreID).UnmarshalJSON(data)) } case "createdAt": if data := in.Raw(); in.Ok() { in.AddError((out.CreatedAt).UnmarshalJSON(data)) } case "updatedAt": if data := in.Raw(); in.Ok() { in.AddError((out.UpdatedAt).UnmarshalJSON(data)) } case "isActive": out.IsActive = bool(in.Bool()) case "createdIn": if data := in.Raw(); in.Ok() { in.AddError((out.CreatedIn).UnmarshalJSON(data)) } case "firstname": if data := in.Raw(); in.Ok() { in.AddError((out.Firstname).UnmarshalJSON(data)) } case "lastname": if data := in.Raw(); in.Ok() { in.AddError((out.Lastname).UnmarshalJSON(data)) } case "dob": if data := in.Raw(); in.Ok() { in.AddError((out.Dob).UnmarshalJSON(data)) } case "rpToken": if data := in.Raw(); in.Ok() { in.AddError((out.RpToken).UnmarshalJSON(data)) } case "rpTokenCreatedAt": if data := in.Raw(); in.Ok() { in.AddError((out.RpTokenCreatedAt).UnmarshalJSON(data)) } case "defaultBilling": if data := in.Raw(); in.Ok() { in.AddError((out.DefaultBilling).UnmarshalJSON(data)) } case "defaultShipping": if data := in.Raw(); in.Ok() { in.AddError((out.DefaultShipping).UnmarshalJSON(data)) } case "gender": if data := in.Raw(); in.Ok() { in.AddError((out.Gender).UnmarshalJSON(data)) } case "relations": if in.IsNull() { in.Skip() out.Relations = nil } else { if out.Relations == nil { out.Relations = new(customerEntityRelations) } easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated9(in, out.Relations) } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated8(out *jwriter.Writer, in CustomerEntity) { out.RawByte('{') first := true _ = first if in.EntityID != 0 { const prefix string = ",\"entityID\":" first = false out.RawString(prefix[1:]) out.Uint32(uint32(in.EntityID)) } if true { const prefix string = ",\"websiteID\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.WebsiteID).MarshalJSON()) } if true { const prefix string = ",\"email\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Email).MarshalJSON()) } if in.GroupID != 0 { const prefix string = ",\"groupID\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Uint32(uint32(in.GroupID)) } if true { const prefix string = ",\"storeID\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.StoreID).MarshalJSON()) } if true { const prefix string = ",\"createdAt\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.CreatedAt).MarshalJSON()) } if true { const prefix string = ",\"updatedAt\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.UpdatedAt).MarshalJSON()) } if in.IsActive { const prefix string = ",\"isActive\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Bool(bool(in.IsActive)) } if true { const prefix string = ",\"createdIn\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.CreatedIn).MarshalJSON()) } if true { const prefix string = ",\"firstname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Firstname).MarshalJSON()) } if true { const prefix string = ",\"lastname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Lastname).MarshalJSON()) } if true { const prefix string = ",\"dob\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Dob).MarshalJSON()) } if true { const prefix string = ",\"rpToken\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.RpToken).MarshalJSON()) } if true { const prefix string = ",\"rpTokenCreatedAt\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.RpTokenCreatedAt).MarshalJSON()) } if true { const prefix string = ",\"defaultBilling\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.DefaultBilling).MarshalJSON()) } if true { const prefix string = ",\"defaultShipping\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.DefaultShipping).MarshalJSON()) } if true { const prefix string = ",\"gender\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Gender).MarshalJSON()) } if in.Relations != nil { const prefix string = ",\"relations\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated9(out, *in.Relations) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v CustomerEntity) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated8(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CustomerEntity) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated8(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CustomerEntity) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated8(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CustomerEntity) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated8(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated9(in *jlexer.Lexer, out *customerEntityRelations) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "customerAddressEntities": if in.IsNull() { in.Skip() out.CustomerAddressEntities = nil } else { if out.CustomerAddressEntities == nil { out.CustomerAddressEntities = new(CustomerAddressEntities) } (*out.CustomerAddressEntities).UnmarshalEasyJSON(in) } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated9(out *jwriter.Writer, in customerEntityRelations) { out.RawByte('{') first := true _ = first if in.CustomerAddressEntities != nil { const prefix string = ",\"customerAddressEntities\":" first = false out.RawString(prefix[1:]) (*in.CustomerAddressEntities).MarshalEasyJSON(out) } out.RawByte('}') } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated10(in *jlexer.Lexer, out *CustomerEntities) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*CustomerEntity, 0, 8) } else { out.Data = []*CustomerEntity{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v19 *CustomerEntity if in.IsNull() { in.Skip() v19 = nil } else { if v19 == nil { v19 = new(CustomerEntity) } (*v19).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v19) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated10(out *jwriter.Writer, in CustomerEntities) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v20, v21 := range in.Data { if v20 > 0 { out.RawByte(',') } if v21 == nil { out.RawString("null") } else { (*v21).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v CustomerEntities) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated10(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CustomerEntities) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated10(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CustomerEntities) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated10(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CustomerEntities) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated10(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated11(in *jlexer.Lexer, out *CustomerAddressEntity) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "entityID": out.EntityID = uint32(in.Uint32()) case "incrementID": if data := in.Raw(); in.Ok() { in.AddError((out.IncrementID).UnmarshalJSON(data)) } case "parentID": if data := in.Raw(); in.Ok() { in.AddError((out.ParentID).UnmarshalJSON(data)) } case "createdAt": if data := in.Raw(); in.Ok() { in.AddError((out.CreatedAt).UnmarshalJSON(data)) } case "updatedAt": if data := in.Raw(); in.Ok() { in.AddError((out.UpdatedAt).UnmarshalJSON(data)) } case "isActive": out.IsActive = bool(in.Bool()) case "city": out.City = string(in.String()) case "company": if data := in.Raw(); in.Ok() { in.AddError((out.Company).UnmarshalJSON(data)) } case "countryID": out.CountryID = string(in.String()) case "firstname": out.Firstname = string(in.String()) case "lastname": out.Lastname = string(in.String()) case "postcode": if data := in.Raw(); in.Ok() { in.AddError((out.Postcode).UnmarshalJSON(data)) } case "region": if data := in.Raw(); in.Ok() { in.AddError((out.Region).UnmarshalJSON(data)) } case "street": out.Street = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated11(out *jwriter.Writer, in CustomerAddressEntity) { out.RawByte('{') first := true _ = first if in.EntityID != 0 { const prefix string = ",\"entityID\":" first = false out.RawString(prefix[1:]) out.Uint32(uint32(in.EntityID)) } if true { const prefix string = ",\"incrementID\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.IncrementID).MarshalJSON()) } if true { const prefix string = ",\"parentID\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.ParentID).MarshalJSON()) } if true { const prefix string = ",\"createdAt\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.CreatedAt).MarshalJSON()) } if true { const prefix string = ",\"updatedAt\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.UpdatedAt).MarshalJSON()) } if in.IsActive { const prefix string = ",\"isActive\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Bool(bool(in.IsActive)) } if in.City != "" { const prefix string = ",\"city\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.City)) } if true { const prefix string = ",\"company\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Company).MarshalJSON()) } if in.CountryID != "" { const prefix string = ",\"countryID\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.CountryID)) } if in.Firstname != "" { const prefix string = ",\"firstname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Firstname)) } if in.Lastname != "" { const prefix string = ",\"lastname\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Lastname)) } if true { const prefix string = ",\"postcode\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Postcode).MarshalJSON()) } if true { const prefix string = ",\"region\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Region).MarshalJSON()) } if in.Street != "" { const prefix string = ",\"street\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Street)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v CustomerAddressEntity) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated11(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CustomerAddressEntity) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated11(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CustomerAddressEntity) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated11(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CustomerAddressEntity) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated11(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated12(in *jlexer.Lexer, out *CustomerAddressEntities) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*CustomerAddressEntity, 0, 8) } else { out.Data = []*CustomerAddressEntity{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v22 *CustomerAddressEntity if in.IsNull() { in.Skip() v22 = nil } else { if v22 == nil { v22 = new(CustomerAddressEntity) } (*v22).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v22) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated12(out *jwriter.Writer, in CustomerAddressEntities) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v23, v24 := range in.Data { if v23 > 0 { out.RawByte(',') } if v24 == nil { out.RawString("null") } else { (*v24).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v CustomerAddressEntities) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated12(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CustomerAddressEntities) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated12(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CustomerAddressEntities) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated12(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CustomerAddressEntities) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated12(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated13(in *jlexer.Lexer, out *CoreConfigurations) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "data": if in.IsNull() { in.Skip() out.Data = nil } else { in.Delim('[') if out.Data == nil { if !in.IsDelim(']') { out.Data = make([]*CoreConfiguration, 0, 8) } else { out.Data = []*CoreConfiguration{} } } else { out.Data = (out.Data)[:0] } for !in.IsDelim(']') { var v25 *CoreConfiguration if in.IsNull() { in.Skip() v25 = nil } else { if v25 == nil { v25 = new(CoreConfiguration) } (*v25).UnmarshalEasyJSON(in) } out.Data = append(out.Data, v25) in.WantComma() } in.Delim(']') } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated13(out *jwriter.Writer, in CoreConfigurations) { out.RawByte('{') first := true _ = first if len(in.Data) != 0 { const prefix string = ",\"data\":" first = false out.RawString(prefix[1:]) { out.RawByte('[') for v26, v27 := range in.Data { if v26 > 0 { out.RawByte(',') } if v27 == nil { out.RawString("null") } else { (*v27).MarshalEasyJSON(out) } } out.RawByte(']') } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v CoreConfigurations) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated13(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CoreConfigurations) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated13(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CoreConfigurations) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated13(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CoreConfigurations) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated13(l, v) } func easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated14(in *jlexer.Lexer, out *CoreConfiguration) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "config_id": out.ConfigID = uint32(in.Uint32()) case "scope": out.Scope = string(in.String()) case "scope_id": out.ScopeID = int32(in.Int32()) case "expires": if data := in.Raw(); in.Ok() { in.AddError((out.Expires).UnmarshalJSON(data)) } case "x_path": out.Path = string(in.String()) case "value": if data := in.Raw(); in.Ok() { in.AddError((out.Value).UnmarshalJSON(data)) } case "version_ts": if data := in.Raw(); in.Ok() { in.AddError((out.VersionTs).UnmarshalJSON(data)) } case "version_te": if data := in.Raw(); in.Ok() { in.AddError((out.VersionTe).UnmarshalJSON(data)) } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated14(out *jwriter.Writer, in CoreConfiguration) { out.RawByte('{') first := true _ = first if in.ConfigID != 0 { const prefix string = ",\"config_id\":" first = false out.RawString(prefix[1:]) out.Uint32(uint32(in.ConfigID)) } if in.Scope != "" { const prefix string = ",\"scope\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Scope)) } if in.ScopeID != 0 { const prefix string = ",\"scope_id\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Int32(int32(in.ScopeID)) } if true { const prefix string = ",\"expires\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Expires).MarshalJSON()) } if in.Path != "" { const prefix string = ",\"x_path\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.String(string(in.Path)) } if true { const prefix string = ",\"value\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.Value).MarshalJSON()) } if true { const prefix string = ",\"version_ts\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.VersionTs).MarshalJSON()) } if true { const prefix string = ",\"version_te\":" if first { first = false out.RawString(prefix[1:]) } else { out.RawString(prefix) } out.Raw((in.VersionTe).MarshalJSON()) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v CoreConfiguration) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated14(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v CoreConfiguration) MarshalEasyJSON(w *jwriter.Writer) { easyjson4b0a353eEncodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated14(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *CoreConfiguration) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated14(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *CoreConfiguration) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjson4b0a353eDecodeGithubComCorestoreioPkgSqlDmlgenDmltestgenerated14(l, v) }<|fim▁end|>
if in.ColVarchar1 != "" { const prefix string = ",\"col_varchar_1\":" if first { first = false
<|file_name|>custom_entities.rs<|end_file_name|><|fim▁begin|>//! This example demonstrate how custom entities can be extracted from the DOCTYPE!, //! and later use to decode text and attribute values. //!<|fim▁hole|>//! * it only handles internal entities; //! * the regex in this example is simple but brittle; //! * it does not support the use of entities in entity declaration. extern crate quick_xml; extern crate regex; use quick_xml::events::Event; use quick_xml::Reader; use regex::bytes::Regex; use std::collections::HashMap; const DATA: &str = r#" <?xml version="1.0"?> <!DOCTYPE test [ <!ENTITY msg "hello world" > ]> <test label="&msg;">&msg;</test> "#; fn main() -> Result<(), Box<dyn std::error::Error>> { let mut reader = Reader::from_str(DATA); reader.trim_text(true); let mut buf = Vec::new(); let mut custom_entities = HashMap::new(); let entity_re = Regex::new(r#"<!ENTITY\s+([^ \t\r\n]+)\s+"([^"]*)"\s*>"#)?; loop { match reader.read_event(&mut buf) { Ok(Event::DocType(ref e)) => { for cap in entity_re.captures_iter(&e) { custom_entities.insert(cap[1].to_vec(), cap[2].to_vec()); } } Ok(Event::Start(ref e)) => match e.name() { b"test" => println!( "attributes values: {:?}", e.attributes() .map(|a| a .unwrap() .unescape_and_decode_value_with_custom_entities( &reader, &custom_entities ) .unwrap()) .collect::<Vec<_>>() ), _ => (), }, Ok(Event::Text(ref e)) => { println!( "text value: {}", e.unescape_and_decode_with_custom_entities(&reader, &custom_entities) .unwrap() ); } Ok(Event::Eof) => break, Err(e) => panic!("Error at position {}: {:?}", reader.buffer_position(), e), _ => (), } } Ok(()) }<|fim▁end|>
//! NB: this example is deliberately kept simple: //! * it assumes that the XML file is UTF-8 encoded (custom_entities must only contain UTF-8 data)
<|file_name|>foreach.tsx<|end_file_name|><|fim▁begin|>/* * Copyright (c) "Neo4j" * Neo4j Sweden AB [http://neo4j.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ import React from 'react' import ManualLink from 'browser-components/ManualLink' const title = 'FOREACH' const subtitle = 'Operate on a collection' const category = 'cypherHelp' const content = ( <> <p> The <code>FOREACH</code> clause is used to update data within a collection whether components of a path, or result of aggregation. </p> <div className="links"> <div className="link"> <p className="title">Reference</p> <p className="content"> <ManualLink chapter="cypher-manual" page="/clauses/foreach/"> FOREACH </ManualLink>{' '} manual page </p> </div> <div className="link"> <p className="title">Related</p><|fim▁hole|> <a help-topic="set">:help SET</a> <a help-topic="cypher">:help Cypher</a> </p> </div> </div> <section className="example"> <figure className="runnable"> <pre> {`MATCH p = (ups)<-[DEPENDS_ON]-(device) WHERE ups.id='EPS-7001' FOREACH (n IN nodes(p) | SET n.available = FALSE )`} </pre> <figcaption> Mark all devices plugged into a failed UPS as unavailable. </figcaption> </figure> </section> </> ) export default { title, subtitle, category, content }<|fim▁end|>
<p className="content"> <a help-topic="create">:help CREATE</a> <a help-topic="delete">:help DELETE</a>
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># ETConf -- web-based user-friendly computer hardware configurator # Copyright (C) 2010-2011 ETegro Technologies, PLC <http://etegro.com/><|fim▁hole|># it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.conf.urls.defaults import * urlpatterns = patterns( "configurator.giver.views", ( r"^perform/(?P<computermodel_alias>.+)/$", "perform" ), ( r"^configurator/(?P<computermodel_alias>.+)/$", "configurator" ), ( r"^computermodel/request/(?P<computermodel_alias>.+)$", "computermodel_request" ), )<|fim▁end|>
# Sergey Matveev <[email protected]> # # This program is free software: you can redistribute it and/or modify
<|file_name|>passes.rs<|end_file_name|><|fim▁begin|>use crate::context::{EarlyContext, LateContext}; use rustc_ast as ast; use rustc_data_structures::sync; use rustc_hir as hir; use rustc_session::lint::builtin::HardwiredLints; use rustc_session::lint::LintPass; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::Span; #[macro_export] macro_rules! late_lint_methods { ($macro:path, $args:tt, [$hir:tt]) => ( $macro!($args, [$hir], [ fn check_param(a: &$hir hir::Param<$hir>); fn check_body(a: &$hir hir::Body<$hir>); fn check_body_post(a: &$hir hir::Body<$hir>); fn check_name(a: Span, b: Symbol); fn check_crate(); fn check_crate_post(); fn check_mod(a: &$hir hir::Mod<$hir>, b: Span, c: hir::HirId); fn check_mod_post(a: &$hir hir::Mod<$hir>, b: Span, c: hir::HirId); fn check_foreign_item(a: &$hir hir::ForeignItem<$hir>); fn check_foreign_item_post(a: &$hir hir::ForeignItem<$hir>); fn check_item(a: &$hir hir::Item<$hir>); fn check_item_post(a: &$hir hir::Item<$hir>); fn check_local(a: &$hir hir::Local<$hir>); fn check_block(a: &$hir hir::Block<$hir>); fn check_block_post(a: &$hir hir::Block<$hir>); fn check_stmt(a: &$hir hir::Stmt<$hir>); fn check_arm(a: &$hir hir::Arm<$hir>); fn check_pat(a: &$hir hir::Pat<$hir>); fn check_expr(a: &$hir hir::Expr<$hir>); fn check_expr_post(a: &$hir hir::Expr<$hir>); fn check_ty(a: &$hir hir::Ty<$hir>); fn check_infer(a: &$hir hir::InferArg); fn check_generic_arg(a: &$hir hir::GenericArg<$hir>); fn check_generic_param(a: &$hir hir::GenericParam<$hir>); fn check_generics(a: &$hir hir::Generics<$hir>); fn check_where_predicate(a: &$hir hir::WherePredicate<$hir>); fn check_poly_trait_ref(a: &$hir hir::PolyTraitRef<$hir>, b: hir::TraitBoundModifier); fn check_fn( a: rustc_hir::intravisit::FnKind<$hir>, b: &$hir hir::FnDecl<$hir>, c: &$hir hir::Body<$hir>, d: Span, e: hir::HirId); fn check_fn_post( a: rustc_hir::intravisit::FnKind<$hir>, b: &$hir hir::FnDecl<$hir>, c: &$hir hir::Body<$hir>, d: Span, e: hir::HirId ); fn check_trait_item(a: &$hir hir::TraitItem<$hir>); fn check_trait_item_post(a: &$hir hir::TraitItem<$hir>); fn check_impl_item(a: &$hir hir::ImplItem<$hir>); fn check_impl_item_post(a: &$hir hir::ImplItem<$hir>); fn check_struct_def(a: &$hir hir::VariantData<$hir>); fn check_struct_def_post(a: &$hir hir::VariantData<$hir>); fn check_field_def(a: &$hir hir::FieldDef<$hir>); fn check_variant(a: &$hir hir::Variant<$hir>); fn check_variant_post(a: &$hir hir::Variant<$hir>); fn check_lifetime(a: &$hir hir::Lifetime); fn check_path(a: &$hir hir::Path<$hir>, b: hir::HirId); fn check_attribute(a: &$hir ast::Attribute); /// Called when entering a syntax node that can have lint attributes such /// as `#[allow(...)]`. Called with *all* the attributes of that node. fn enter_lint_attrs(a: &$hir [ast::Attribute]); /// Counterpart to `enter_lint_attrs`. fn exit_lint_attrs(a: &$hir [ast::Attribute]); ]); ) } /// Trait for types providing lint checks. /// /// Each `check` method checks a single syntax node, and should not /// invoke methods recursively (unlike `Visitor`). By default they /// do nothing. // // FIXME: eliminate the duplication with `Visitor`. But this also // contains a few lint-specific methods with no equivalent in `Visitor`. macro_rules! expand_lint_pass_methods { ($context:ty, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => ( $(#[inline(always)] fn $name(&mut self, _: $context, $(_: $arg),*) {})* ) } macro_rules! declare_late_lint_pass { ([], [$hir:tt], [$($methods:tt)*]) => ( pub trait LateLintPass<$hir>: LintPass { expand_lint_pass_methods!(&LateContext<$hir>, [$($methods)*]); } ) } late_lint_methods!(declare_late_lint_pass, [], ['tcx]); impl LateLintPass<'_> for HardwiredLints {} #[macro_export] macro_rules! expand_combined_late_lint_pass_method { ([$($passes:ident),*], $self: ident, $name: ident, $params:tt) => ({ $($self.$passes.$name $params;)* }) } #[macro_export] macro_rules! expand_combined_late_lint_pass_methods { ($passes:tt, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => ( $(fn $name(&mut self, context: &LateContext<'tcx>, $($param: $arg),*) { expand_combined_late_lint_pass_method!($passes, self, $name, (context, $($param),*)); })* ) } #[macro_export] macro_rules! declare_combined_late_lint_pass { ([$v:vis $name:ident, [$($passes:ident: $constructor:expr,)*]], [$hir:tt], $methods:tt) => ( #[allow(non_snake_case)] $v struct $name { $($passes: $passes,)* } impl $name { $v fn new() -> Self { Self { $($passes: $constructor,)* } } $v fn get_lints() -> LintArray { let mut lints = Vec::new(); $(lints.extend_from_slice(&$passes::get_lints());)* lints } } impl<'tcx> LateLintPass<'tcx> for $name { expand_combined_late_lint_pass_methods!([$($passes),*], $methods); } #[allow(rustc::lint_pass_impl_without_macro)] impl LintPass for $name { fn name(&self) -> &'static str { panic!() } } ) } #[macro_export] macro_rules! early_lint_methods { ($macro:path, $args:tt) => ( $macro!($args, [ fn check_param(a: &ast::Param); fn check_ident(a: Ident); fn check_crate(a: &ast::Crate); fn check_crate_post(a: &ast::Crate); fn check_foreign_item(a: &ast::ForeignItem); fn check_foreign_item_post(a: &ast::ForeignItem); fn check_item(a: &ast::Item); fn check_item_post(a: &ast::Item); fn check_local(a: &ast::Local); fn check_block(a: &ast::Block); fn check_block_post(a: &ast::Block); fn check_stmt(a: &ast::Stmt); fn check_arm(a: &ast::Arm); fn check_pat(a: &ast::Pat); fn check_anon_const(a: &ast::AnonConst); fn check_pat_post(a: &ast::Pat); fn check_expr(a: &ast::Expr); fn check_expr_post(a: &ast::Expr); fn check_ty(a: &ast::Ty); fn check_generic_arg(a: &ast::GenericArg); fn check_generic_param(a: &ast::GenericParam); fn check_generics(a: &ast::Generics); fn check_where_predicate(a: &ast::WherePredicate); fn check_poly_trait_ref(a: &ast::PolyTraitRef, b: &ast::TraitBoundModifier); fn check_fn(a: rustc_ast::visit::FnKind<'_>, c: Span, d_: ast::NodeId); fn check_fn_post( a: rustc_ast::visit::FnKind<'_>, c: Span, d: ast::NodeId ); fn check_trait_item(a: &ast::AssocItem); fn check_trait_item_post(a: &ast::AssocItem); fn check_impl_item(a: &ast::AssocItem); fn check_impl_item_post(a: &ast::AssocItem); fn check_struct_def(a: &ast::VariantData); fn check_struct_def_post(a: &ast::VariantData); fn check_field_def(a: &ast::FieldDef); fn check_variant(a: &ast::Variant); fn check_variant_post(a: &ast::Variant); fn check_lifetime(a: &ast::Lifetime); fn check_path(a: &ast::Path, b: ast::NodeId); fn check_attribute(a: &ast::Attribute); fn check_mac_def(a: &ast::MacroDef, b: ast::NodeId); fn check_mac(a: &ast::MacCall); /// Called when entering a syntax node that can have lint attributes such<|fim▁hole|> fn enter_lint_attrs(a: &[ast::Attribute]); /// Counterpart to `enter_lint_attrs`. fn exit_lint_attrs(a: &[ast::Attribute]); ]); ) } macro_rules! expand_early_lint_pass_methods { ($context:ty, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => ( $(#[inline(always)] fn $name(&mut self, _: $context, $(_: $arg),*) {})* ) } macro_rules! declare_early_lint_pass { ([], [$($methods:tt)*]) => ( pub trait EarlyLintPass: LintPass { expand_early_lint_pass_methods!(&EarlyContext<'_>, [$($methods)*]); } ) } early_lint_methods!(declare_early_lint_pass, []); #[macro_export] macro_rules! expand_combined_early_lint_pass_method { ([$($passes:ident),*], $self: ident, $name: ident, $params:tt) => ({ $($self.$passes.$name $params;)* }) } #[macro_export] macro_rules! expand_combined_early_lint_pass_methods { ($passes:tt, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => ( $(fn $name(&mut self, context: &EarlyContext<'_>, $($param: $arg),*) { expand_combined_early_lint_pass_method!($passes, self, $name, (context, $($param),*)); })* ) } #[macro_export] macro_rules! declare_combined_early_lint_pass { ([$v:vis $name:ident, [$($passes:ident: $constructor:expr,)*]], $methods:tt) => ( #[allow(non_snake_case)] $v struct $name { $($passes: $passes,)* } impl $name { $v fn new() -> Self { Self { $($passes: $constructor,)* } } $v fn get_lints() -> LintArray { let mut lints = Vec::new(); $(lints.extend_from_slice(&$passes::get_lints());)* lints } } impl EarlyLintPass for $name { expand_combined_early_lint_pass_methods!([$($passes),*], $methods); } #[allow(rustc::lint_pass_impl_without_macro)] impl LintPass for $name { fn name(&self) -> &'static str { panic!() } } ) } /// A lint pass boxed up as a trait object. pub type EarlyLintPassObject = Box<dyn EarlyLintPass + sync::Send + sync::Sync + 'static>; pub type LateLintPassObject = Box<dyn for<'tcx> LateLintPass<'tcx> + sync::Send + sync::Sync + 'static>;<|fim▁end|>
/// as `#[allow(...)]`. Called with *all* the attributes of that node.
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @flow */ 'use strict'; const AssetServer = require('../AssetServer'); const getPlatformExtension = require('../node-haste').getPlatformExtension; const Bundler = require('../Bundler'); const MultipartResponse = require('./MultipartResponse'); const declareOpts = require('../lib/declareOpts'); const defaults = require('../../defaults'); const mime = require('mime-types'); const path = require('path'); const symbolicate = require('./symbolicate'); const terminal = require('../lib/terminal'); const url = require('url'); const debug = require('debug')('RNP:Server'); import type Module, {HasteImpl} from '../node-haste/Module'; import type {Stats} from 'fs'; import type {IncomingMessage, ServerResponse} from 'http'; import type ResolutionResponse from '../node-haste/DependencyGraph/ResolutionResponse'; import type Bundle from '../Bundler/Bundle'; import type HMRBundle from '../Bundler/HMRBundle'; import type {Reporter} from '../lib/reporting'; import type {GetTransformOptions} from '../Bundler'; import type {GlobalTransformCache} from '../lib/GlobalTransformCache'; import type {SourceMap, Symbolicate} from './symbolicate'; const { createActionStartEntry, createActionEndEntry, log, } = require('../Logger'); function debounceAndBatch(fn, delay) { let args = []; let timeout; return value => { args.push(value); clearTimeout(timeout); timeout = setTimeout(() => { const a = args; args = []; fn(a); }, delay); }; } type Options = { assetExts?: Array<string>, blacklistRE?: RegExp, cacheVersion?: string, extraNodeModules?: {}, getTransformOptions?: GetTransformOptions, globalTransformCache: ?GlobalTransformCache, hasteImpl?: HasteImpl, moduleFormat?: string, platforms?: Array<string>, polyfillModuleNames?: Array<string>, projectRoots: Array<string>, providesModuleNodeModules?: Array<string>, reporter: Reporter, resetCache?: boolean, silent?: boolean, transformModulePath?: string, transformTimeoutInterval?: number, watch?: boolean, }; export type BundleOptions = { +assetPlugins: Array<string>, dev: boolean, entryFile: string, +entryModuleOnly: boolean, +generateSourceMaps: boolean, +hot: boolean, +inlineSourceMap: boolean, +isolateModuleIDs: boolean, minify: boolean, onProgress: ?(doneCont: number, totalCount: number) => mixed, +platform: ?string, +resolutionResponse: ?{}, +runBeforeMainModule: Array<string>, +runModule: boolean, sourceMapUrl: ?string, unbundle: boolean, }; const dependencyOpts = declareOpts({ platform: { type: 'string', required: true, }, dev: { type: 'boolean', default: true, }, entryFile: { type: 'string', required: true, }, recursive: { type: 'boolean', default: true, }, hot: { type: 'boolean', default: false, }, minify: { type: 'boolean', default: undefined, }, }); const bundleDeps = new WeakMap(); const NODE_MODULES = `${path.sep}node_modules${path.sep}`; class Server { _opts: { assetExts: Array<string>, blacklistRE: void | RegExp, cacheVersion: string, extraNodeModules: {}, getTransformOptions?: GetTransformOptions, hasteImpl?: HasteImpl, moduleFormat: string, platforms: Array<string>, polyfillModuleNames: Array<string>, projectRoots: Array<string>, providesModuleNodeModules?: Array<string>, reporter: Reporter, resetCache: boolean, silent: boolean, transformModulePath: void | string, transformTimeoutInterval: ?number, watch: boolean, }; _projectRoots: Array<string>; _bundles: {}; _changeWatchers: Array<{ req: IncomingMessage, res: ServerResponse, }>; _fileChangeListeners: Array<(filePath: string) => mixed>; _assetServer: AssetServer; _bundler: Bundler; _debouncedFileChangeHandler: (filePath: string) => mixed; _hmrFileChangeListener: ?(type: string, filePath: string) => mixed; _reporter: Reporter; _symbolicateInWorker: Symbolicate; constructor(options: Options) { this._opts = { assetExts: options.assetExts || defaults.assetExts, blacklistRE: options.blacklistRE, cacheVersion: options.cacheVersion || '1.0', extraNodeModules: options.extraNodeModules || {}, getTransformOptions: options.getTransformOptions, globalTransformCache: options.globalTransformCache, hasteImpl: options.hasteImpl, moduleFormat: options.moduleFormat != null ? options.moduleFormat : 'haste', platforms: options.platforms || defaults.platforms, polyfillModuleNames: options.polyfillModuleNames || [], projectRoots: options.projectRoots, providesModuleNodeModules: options.providesModuleNodeModules, reporter: options.reporter, resetCache: options.resetCache || false, silent: options.silent || false, transformModulePath: options.transformModulePath, transformTimeoutInterval: options.transformTimeoutInterval, watch: options.watch || false, }; const processFileChange = ({type, filePath, stat}) => this.onFileChange(type, filePath, stat); this._reporter = options.reporter; this._projectRoots = this._opts.projectRoots; this._bundles = Object.create(null); this._changeWatchers = []; this._fileChangeListeners = []; this._assetServer = new AssetServer({ assetExts: this._opts.assetExts, projectRoots: this._opts.projectRoots, }); const bundlerOpts = Object.create(this._opts); bundlerOpts.assetServer = this._assetServer; bundlerOpts.allowBundleUpdates = this._opts.watch; bundlerOpts.globalTransformCache = options.globalTransformCache; bundlerOpts.watch = this._opts.watch; bundlerOpts.reporter = options.reporter; this._bundler = new Bundler(bundlerOpts); // changes to the haste map can affect resolution of files in the bundle this._bundler.getResolver().then(resolver => { resolver.getDependencyGraph().getWatcher().on( 'change', ({eventsQueue}) => eventsQueue.forEach(processFileChange), ); }); this._debouncedFileChangeHandler = debounceAndBatch(filePaths => { // only clear bundles for non-JS changes if (filePaths.every(RegExp.prototype.test, /\.js(?:on)?$/i)) { for (const key in this._bundles) { this._bundles[key].then(bundle => { const deps = bundleDeps.get(bundle); filePaths.forEach(filePath => { // $FlowFixMe(>=0.37.0) if (deps.files.has(filePath)) { // $FlowFixMe(>=0.37.0) deps.outdated.add(filePath); } }); }).catch(e => { debug(`Could not update bundle: ${e}, evicting from cache`); delete this._bundles[key]; }); } } else { debug('Clearing bundles due to non-JS change'); this._clearBundles(); } this._informChangeWatchers(); }, 50); this._symbolicateInWorker = symbolicate.createWorker(); } end(): mixed { return this._bundler.end(); } setHMRFileChangeListener(listener: ?(type: string, filePath: string) => mixed) { this._hmrFileChangeListener = listener; } addFileChangeListener(listener: (filePath: string) => mixed) { if (this._fileChangeListeners.indexOf(listener) === -1) { this._fileChangeListeners.push(listener); } } async buildBundle(options: BundleOptions): Promise<Bundle> { const bundle = await this._bundler.bundle(options); const modules = bundle.getModules(); const nonVirtual = modules.filter(m => !m.virtual); bundleDeps.set(bundle, { files: new Map(nonVirtual.map(({sourcePath, meta}) => [sourcePath, meta != null ? meta.dependencies : []], )), idToIndex: new Map(modules.map(({id}, i) => [id, i])), dependencyPairs: new Map( nonVirtual .filter(({meta}) => meta && meta.dependencyPairs) /* $FlowFixMe: the filter above ensures `dependencyPairs` is not null. */ .map(m => [m.sourcePath, m.meta.dependencyPairs]) ), outdated: new Set(), }); return bundle; } buildBundleFromUrl(reqUrl: string): Promise<Bundle> { const options = this._getOptionsFromUrl(reqUrl); return this.buildBundle(options); } buildBundleForHMR( options: {platform: ?string}, host: string, port: number, ): Promise<HMRBundle> { return this._bundler.hmrBundle(options, host, port); } getShallowDependencies(options: { entryFile: string, platform?: string, }): Promise<Array<Module>> { return Promise.resolve().then(() => { if (!options.platform) { options.platform = getPlatformExtension(options.entryFile); } const opts = dependencyOpts(options); return this._bundler.getShallowDependencies(opts); }); } getModuleForPath(entryFile: string): Promise<Module> { return this._bundler.getModuleForPath(entryFile); } getDependencies(options: { entryFile: string, platform: ?string, }): Promise<ResolutionResponse<Module>> { return Promise.resolve().then(() => { if (!options.platform) { options.platform = getPlatformExtension(options.entryFile); } const opts = dependencyOpts(options); return this._bundler.getDependencies(opts); }); } getOrderedDependencyPaths(options: {}): Promise<mixed> { return Promise.resolve().then(() => { const opts = dependencyOpts(options); return this._bundler.getOrderedDependencyPaths(opts); }); } onFileChange(type: string, filePath: string, stat: Stats) { this._assetServer.onFileChange(type, filePath, stat); // If Hot Loading is enabled avoid rebuilding bundles and sending live // updates. Instead, send the HMR updates right away and clear the bundles // cache so that if the user reloads we send them a fresh bundle const {_hmrFileChangeListener} = this; if (_hmrFileChangeListener) { // Clear cached bundles in case user reloads this._clearBundles(); _hmrFileChangeListener(type, filePath); return; } else if (type !== 'change' && filePath.indexOf(NODE_MODULES) !== -1) { // node module resolution can be affected by added or removed files debug('Clearing bundles due to potential node_modules resolution change'); this._clearBundles(); } Promise.all( this._fileChangeListeners.map(listener => listener(filePath)) ).then( () => this._onFileChangeComplete(filePath), () => this._onFileChangeComplete(filePath) ); } _onFileChangeComplete(filePath: string) { // Make sure the file watcher event runs through the system before // we rebuild the bundles. this._debouncedFileChangeHandler(filePath); } _clearBundles() { this._bundles = Object.create(null); } _informChangeWatchers() { const watchers = this._changeWatchers; const headers = { 'Content-Type': 'application/json; charset=UTF-8', }; watchers.forEach(function(w) { w.res.writeHead(205, headers); w.res.end(JSON.stringify({changed: true})); }); this._changeWatchers = []; } _processDebugRequest(reqUrl: string, res: ServerResponse) { let ret = '<!doctype html>'; const pathname = url.parse(reqUrl).pathname; /* $FlowFixMe: pathname would be null for an invalid URL */ const parts = pathname.split('/').filter(Boolean); if (parts.length === 1) { ret += '<div><a href="/debug/bundles">Cached Bundles</a></div>'; res.end(ret); } else if (parts[1] === 'bundles') { ret += '<h1> Cached Bundles </h1>'; Promise.all(Object.keys(this._bundles).map(optionsJson => this._bundles[optionsJson].then(p => { ret += '<div><h2>' + optionsJson + '</h2>'; ret += p.getDebugInfo(); }) )).then( () => res.end(ret), e => { res.writeHead(500); res.end('Internal Error'); terminal.log(e.stack); // eslint-disable-line no-console-disallow } ); } else { res.writeHead(404); res.end('Invalid debug request'); return; } } _processOnChangeRequest(req: IncomingMessage, res: ServerResponse) { const watchers = this._changeWatchers; watchers.push({ req, res, }); req.on('close', () => { for (let i = 0; i < watchers.length; i++) { if (watchers[i] && watchers[i].req === req) { watchers.splice(i, 1); break; } } }); } _rangeRequestMiddleware( req: IncomingMessage, res: ServerResponse, data: string, assetPath: string, ) { if (req.headers && req.headers.range) { const [rangeStart, rangeEnd] = req.headers.range.replace(/bytes=/, '').split('-'); const dataStart = parseInt(rangeStart, 10); const dataEnd = rangeEnd ? parseInt(rangeEnd, 10) : data.length - 1; const chunksize = (dataEnd - dataStart) + 1; res.writeHead(206, { 'Accept-Ranges': 'bytes', 'Content-Length': chunksize.toString(), 'Content-Range': `bytes ${dataStart}-${dataEnd}/${data.length}`, 'Content-Type': mime.lookup(path.basename(assetPath[1])), }); return data.slice(dataStart, dataEnd + 1); } return data; } _processAssetsRequest(req: IncomingMessage, res: ServerResponse) { const urlObj = url.parse(decodeURI(req.url), true); /* $FlowFixMe: could be empty if the url is invalid */ const assetPath: string = urlObj.pathname.match(/^\/assets\/(.+)$/); const processingAssetRequestLogEntry = log(createActionStartEntry({ action_name: 'Processing asset request', asset: assetPath[1], })); /* $FlowFixMe: query may be empty for invalid URLs */ this._assetServer.get(assetPath[1], urlObj.query.platform) .then( data => { // Tell clients to cache this for 1 year. // This is safe as the asset url contains a hash of the asset. if (process.env.REACT_NATIVE_ENABLE_ASSET_CACHING === true) { res.setHeader('Cache-Control', 'max-age=31536000'); } res.end(this._rangeRequestMiddleware(req, res, data, assetPath)); process.nextTick(() => { log(createActionEndEntry(processingAssetRequestLogEntry)); }); }, error => { console.error(error.stack); res.writeHead(404); res.end('Asset not found'); } ); } optionsHash(options: {}) { // onProgress is a function, can't be serialized return JSON.stringify(Object.assign({}, options, {onProgress: null})); } /** * Ensure we properly report the promise of a build that's happening, * including failed builds. We use that separately for when we update a bundle * and for when we build for scratch. */ _reportBundlePromise( options: {entryFile: string}, bundlePromise: Promise<Bundle>, ): Promise<Bundle> { this._reporter.update({ entryFilePath: options.entryFile, type: 'bundle_build_started', }); return bundlePromise.then(bundle => { this._reporter.update({ entryFilePath: options.entryFile, type: 'bundle_build_done', }); return bundle; }, error => { this._reporter.update({ entryFilePath: options.entryFile, error, type: 'bundle_build_failed', }); return Promise.reject(error); }); } useCachedOrUpdateOrCreateBundle(options: BundleOptions): Promise<Bundle> { const optionsJson = this.optionsHash(options); const bundleFromScratch = () => { const building = this.buildBundle(options); this._bundles[optionsJson] = building; return building; }; if (optionsJson in this._bundles) { return this._bundles[optionsJson].then(bundle => { const deps = bundleDeps.get(bundle); // $FlowFixMe(>=0.37.0) const {dependencyPairs, files, idToIndex, outdated} = deps; if (outdated.size) { const updatingExistingBundleLogEntry = log(createActionStartEntry({ action_name: 'Updating existing bundle', outdated_modules: outdated.size, })); debug('Attempt to update existing bundle'); // $FlowFixMe(>=0.37.0) deps.outdated = new Set(); const {platform, dev, minify, hot} = options; // Need to create a resolution response to pass to the bundler // to process requires after transform. By providing a // specific response we can compute a non recursive one which // is the least we need and improve performance. const bundlePromise = this._bundles[optionsJson] = Promise.all([ this.getDependencies({ platform, dev, hot, minify, entryFile: options.entryFile, recursive: false, }), Promise.all(Array.from(outdated, this.getModuleForPath, this)), ]).then(([response, changedModules]) => { debug('Update bundle: rebuild shallow bundle'); changedModules.forEach(m => { response.setResolvedDependencyPairs( m, dependencyPairs.get(m.path), {ignoreFinalized: true}, );<|fim▁hole|> }); return this.buildBundle({ ...options, resolutionResponse: response.copy({ dependencies: changedModules, }), }).then(updateBundle => { const oldModules = bundle.getModules(); const newModules = updateBundle.getModules(); for (let i = 0, n = newModules.length; i < n; i++) { const moduleTransport = newModules[i]; const {meta, sourcePath} = moduleTransport; if (outdated.has(sourcePath)) { /* $FlowFixMe: `meta` could be empty */ if (!contentsEqual(meta.dependencies, new Set(files.get(sourcePath)))) { // bail out if any dependencies changed return Promise.reject(Error( `Dependencies of ${sourcePath} changed from [${ /* $FlowFixMe: `get` can return empty */ files.get(sourcePath).join(', ') }] to [${ /* $FlowFixMe: `meta` could be empty */ meta.dependencies.join(', ') }]` )); } oldModules[idToIndex.get(moduleTransport.id)] = moduleTransport; } } bundle.invalidateSource(); log(createActionEndEntry(updatingExistingBundleLogEntry)); debug('Successfully updated existing bundle'); return bundle; }); }).catch(e => { debug('Failed to update existing bundle, rebuilding...', e.stack || e.message); return bundleFromScratch(); }); return this._reportBundlePromise(options, bundlePromise); } else { debug('Using cached bundle'); return bundle; } }); } return this._reportBundlePromise(options, bundleFromScratch()); } processRequest( req: IncomingMessage, res: ServerResponse, next: () => mixed, ) { const urlObj = url.parse(req.url, true); const {host} = req.headers; debug(`Handling request: ${host ? 'http://' + host : ''}${req.url}`); /* $FlowFixMe: Could be empty if the URL is invalid. */ const pathname: string = urlObj.pathname; let requestType; if (pathname.match(/\.bundle$/)) { requestType = 'bundle'; } else if (pathname.match(/\.map$/)) { requestType = 'map'; } else if (pathname.match(/\.assets$/)) { requestType = 'assets'; } else if (pathname.match(/^\/debug/)) { this._processDebugRequest(req.url, res); return; } else if (pathname.match(/^\/onchange\/?$/)) { this._processOnChangeRequest(req, res); return; } else if (pathname.match(/^\/assets\//)) { this._processAssetsRequest(req, res); return; } else if (pathname === '/symbolicate') { this._symbolicate(req, res); return; } else { next(); return; } const options = this._getOptionsFromUrl(req.url); const requestingBundleLogEntry = log(createActionStartEntry({ action_name: 'Requesting bundle', bundle_url: req.url, entry_point: options.entryFile, })); let reportProgress = () => {}; if (!this._opts.silent) { reportProgress = (transformedFileCount, totalFileCount) => { this._reporter.update({ type: 'bundle_transform_progressed', entryFilePath: options.entryFile, transformedFileCount, totalFileCount, }); }; } const mres = MultipartResponse.wrap(req, res); options.onProgress = (done, total) => { reportProgress(done, total); mres.writeChunk({'Content-Type': 'application/json'}, JSON.stringify({done, total})); }; debug('Getting bundle for request'); const building = this.useCachedOrUpdateOrCreateBundle(options); building.then( p => { if (requestType === 'bundle') { debug('Generating source code'); const bundleSource = p.getSource({ inlineSourceMap: options.inlineSourceMap, minify: options.minify, dev: options.dev, }); debug('Writing response headers'); const etag = p.getEtag(); mres.setHeader('Content-Type', 'application/javascript'); mres.setHeader('ETag', etag); if (req.headers['if-none-match'] === etag) { debug('Responding with 304'); mres.writeHead(304); mres.end(); } else { mres.end(bundleSource); } debug('Finished response'); log(createActionEndEntry(requestingBundleLogEntry)); } else if (requestType === 'map') { const sourceMap = p.getSourceMapString({ minify: options.minify, dev: options.dev, }); mres.setHeader('Content-Type', 'application/json'); mres.end(sourceMap); log(createActionEndEntry(requestingBundleLogEntry)); } else if (requestType === 'assets') { const assetsList = JSON.stringify(p.getAssets()); mres.setHeader('Content-Type', 'application/json'); mres.end(assetsList); log(createActionEndEntry(requestingBundleLogEntry)); } }, error => this._handleError(mres, this.optionsHash(options), error) ).catch(error => { process.nextTick(() => { throw error; }); }); } _symbolicate(req: IncomingMessage, res: ServerResponse) { const symbolicatingLogEntry = log(createActionStartEntry('Symbolicating')); debug('Start symbolication'); /* $FlowFixMe: where is `rowBody` defined? Is it added by * the `connect` framework? */ Promise.resolve(req.rawBody).then(body => { const stack = JSON.parse(body).stack; // In case of multiple bundles / HMR, some stack frames can have // different URLs from others const urls = new Set(); stack.forEach(frame => { const sourceUrl = frame.file; // Skip `/debuggerWorker.js` which drives remote debugging because it // does not need to symbolication. // Skip anything except http(s), because there is no support for that yet if (!urls.has(sourceUrl) && !sourceUrl.endsWith('/debuggerWorker.js') && sourceUrl.startsWith('http')) { urls.add(sourceUrl); } }); const mapPromises = Array.from(urls.values()).map(this._sourceMapForURL, this); debug('Getting source maps for symbolication'); return Promise.all(mapPromises).then(maps => { debug('Sending stacks and maps to symbolication worker'); const urlsToMaps = zip(urls.values(), maps); return this._symbolicateInWorker(stack, urlsToMaps); }); }).then( stack => { debug('Symbolication done'); res.end(JSON.stringify({stack})); process.nextTick(() => { log(createActionEndEntry(symbolicatingLogEntry)); }); }, error => { console.error(error.stack || error); res.statusCode = 500; res.end(JSON.stringify({error: error.message})); } ); } _sourceMapForURL(reqUrl: string): Promise<SourceMap> { const options = this._getOptionsFromUrl(reqUrl); const building = this.useCachedOrUpdateOrCreateBundle(options); return building.then(p => p.getSourceMap({ minify: options.minify, dev: options.dev, })); } _handleError(res: ServerResponse, bundleID: string, error: { status: number, type: string, description: string, filename: string, lineNumber: number, errors: Array<{description: string, filename: string, lineNumber: number}>, }) { res.writeHead(error.status || 500, { 'Content-Type': 'application/json; charset=UTF-8', }); if (error.type === 'TransformError' || error.type === 'NotFoundError' || error.type === 'UnableToResolveError') { error.errors = [{ description: error.description, filename: error.filename, lineNumber: error.lineNumber, }]; res.end(JSON.stringify(error)); if (error.type === 'NotFoundError') { delete this._bundles[bundleID]; } } else { console.error(error.stack || error); res.end(JSON.stringify({ type: 'InternalError', message: 'react-packager has encountered an internal error, ' + 'please check your terminal error output for more details', })); } } _getOptionsFromUrl(reqUrl: string): BundleOptions { // `true` to parse the query param as an object. const urlObj = url.parse(reqUrl, true); /* $FlowFixMe: `pathname` could be empty for an invalid URL */ const pathname = decodeURIComponent(urlObj.pathname); // Backwards compatibility. Options used to be as added as '.' to the // entry module name. We can safely remove these options. const entryFile = pathname.replace(/^\//, '').split('.').filter(part => { if (part === 'includeRequire' || part === 'runModule' || part === 'bundle' || part === 'map' || part === 'assets') { return false; } return true; }).join('.') + '.js'; // try to get the platform from the url /* $FlowFixMe: `query` could be empty for an invalid URL */ const platform = urlObj.query.platform || getPlatformExtension(pathname); /* $FlowFixMe: `query` could be empty for an invalid URL */ const assetPlugin = urlObj.query.assetPlugin; const assetPlugins = Array.isArray(assetPlugin) ? assetPlugin : (typeof assetPlugin === 'string') ? [assetPlugin] : []; const dev = this._getBoolOptionFromQuery(urlObj.query, 'dev', true); const minify = this._getBoolOptionFromQuery(urlObj.query, 'minify', false); return { sourceMapUrl: url.format({ hash: urlObj.hash, pathname: pathname.replace(/\.bundle$/, '.map'), query: urlObj.query, search: urlObj.search, }), entryFile, dev, minify, hot: this._getBoolOptionFromQuery(urlObj.query, 'hot', false), runBeforeMainModule: defaults.runBeforeMainModule, runModule: this._getBoolOptionFromQuery(urlObj.query, 'runModule', true), inlineSourceMap: this._getBoolOptionFromQuery( urlObj.query, 'inlineSourceMap', false ), isolateModuleIDs: false, platform, resolutionResponse: null, entryModuleOnly: this._getBoolOptionFromQuery( urlObj.query, 'entryModuleOnly', false, ), generateSourceMaps: minify || !dev || this._getBoolOptionFromQuery(urlObj.query, 'babelSourcemap', false), assetPlugins, onProgress: null, unbundle: false, }; } _getBoolOptionFromQuery(query: ?{}, opt: string, defaultVal: boolean): boolean { /* $FlowFixMe: `query` could be empty when it comes from an invalid URL */ if (query[opt] == null) { return defaultVal; } return query[opt] === 'true' || query[opt] === '1'; } static DEFAULT_BUNDLE_OPTIONS; } Server.DEFAULT_BUNDLE_OPTIONS = { assetPlugins: [], dev: true, entryModuleOnly: false, generateSourceMaps: false, hot: false, inlineSourceMap: false, isolateModuleIDs: false, minify: false, onProgress: null, resolutionResponse: null, runBeforeMainModule: defaults.runBeforeMainModule, runModule: true, sourceMapUrl: null, unbundle: false, }; function contentsEqual<T>(array: Array<T>, set: Set<T>): boolean { return array.length === set.size && array.every(set.has, set); } function* zip<X, Y>(xs: Iterable<X>, ys: Iterable<Y>): Iterable<[X, Y]> { //$FlowIssue #9324959 const ysIter: Iterator<Y> = ys[Symbol.iterator](); for (const x of xs) { const y = ysIter.next(); if (y.done) { return; } yield [x, y.value]; } } module.exports = Server;<|fim▁end|>
<|file_name|>reconnect_test.go<|end_file_name|><|fim▁begin|>// Copyright 2013-2019 The NATS Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package test import ( "fmt" "net" "net/url" "sync" "sync/atomic" "testing" "time" "github.com/nats-io/nats-server/v2/server" "github.com/nats-io/nats.go" ) func startReconnectServer(t *testing.T) *server.Server { return RunServerOnPort(22222) } func TestReconnectTotalTime(t *testing.T) { opts := nats.GetDefaultOptions() totalReconnectTime := time.Duration(opts.MaxReconnect) * opts.ReconnectWait if totalReconnectTime < (2 * time.Minute) { t.Fatalf("Total reconnect time should be at least 2 mins: Currently %v\n", totalReconnectTime) } } func TestDefaultReconnectJitter(t *testing.T) { opts := nats.GetDefaultOptions() if opts.ReconnectJitter != nats.DefaultReconnectJitter { t.Fatalf("Expected default jitter for non TLS to be %v, got %v", nats.DefaultReconnectJitter, opts.ReconnectJitter) } if opts.ReconnectJitterTLS != nats.DefaultReconnectJitterTLS { t.Fatalf("Expected default jitter for TLS to be %v, got %v", nats.DefaultReconnectJitterTLS, opts.ReconnectJitterTLS) } } func TestReconnectDisallowedFlags(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() ch := make(chan bool) opts := nats.GetDefaultOptions() opts.Url = "nats://127.0.0.1:22222" opts.AllowReconnect = false opts.ClosedCB = func(_ *nats.Conn) { ch <- true } nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() ts.Shutdown() if e := Wait(ch); e != nil { t.Fatal("Did not trigger ClosedCB correctly") } } func TestReconnectAllowedFlags(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() ch := make(chan bool) dch := make(chan bool) opts := nats.GetDefaultOptions() opts.Url = "nats://127.0.0.1:22222" opts.AllowReconnect = true opts.MaxReconnect = 2 opts.ReconnectWait = 1 * time.Second nats.ReconnectJitter(0, 0)(&opts) opts.ClosedCB = func(_ *nats.Conn) { ch <- true } opts.DisconnectedErrCB = func(_ *nats.Conn, _ error) { dch <- true } nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() ts.Shutdown() // We want wait to timeout here, and the connection // should not trigger the Close CB. if e := WaitTime(ch, 500*time.Millisecond); e == nil { t.Fatal("Triggered ClosedCB incorrectly") } // We should wait to get the disconnected callback to ensure // that we are in the process of reconnecting. if e := Wait(dch); e != nil { t.Fatal("DisconnectedErrCB should have been triggered") } if !nc.IsReconnecting() { t.Fatal("Expected to be in a reconnecting state") } // clear the CloseCB since ch will block nc.Opts.ClosedCB = nil } var reconnectOpts = nats.Options{ Url: "nats://127.0.0.1:22222", AllowReconnect: true, MaxReconnect: 10, ReconnectWait: 100 * time.Millisecond, Timeout: nats.DefaultTimeout, } func TestConnCloseBreaksReconnectLoop(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() cch := make(chan bool) opts := reconnectOpts // Bump the max reconnect attempts opts.MaxReconnect = 100 opts.ClosedCB = func(_ *nats.Conn) { cch <- true } nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() nc.Flush() // Shutdown the server ts.Shutdown() // Wait a second, then close the connection time.Sleep(time.Second) // Close the connection, this should break the reconnect loop. // Do this in a go routine since the issue was that Close() // would block until the reconnect loop is done. go nc.Close() // Even on Windows (where a createConn takes more than a second) // we should be able to break the reconnect loop with the following // timeout. if err := WaitTime(cch, 3*time.Second); err != nil { t.Fatal("Did not get a closed callback") } } func TestBasicReconnectFunctionality(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() ch := make(chan bool) dch := make(chan bool, 2) opts := reconnectOpts opts.DisconnectedErrCB = func(_ *nats.Conn, _ error) { dch <- true } nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v\n", err) } defer nc.Close() ec, err := nats.NewEncodedConn(nc, nats.DEFAULT_ENCODER) if err != nil { t.Fatalf("Failed to create an encoded connection: %v\n", err) } testString := "bar" ec.Subscribe("foo", func(s string) { if s != testString { t.Fatal("String doesn't match") } ch <- true }) ec.Flush() ts.Shutdown() // server is stopped here... if err := Wait(dch); err != nil { t.Fatalf("Did not get the disconnected callback on time\n") } if err := ec.Publish("foo", testString); err != nil { t.Fatalf("Failed to publish message: %v\n", err) } ts = startReconnectServer(t) defer ts.Shutdown() if err := ec.FlushTimeout(5 * time.Second); err != nil { t.Fatalf("Error on Flush: %v", err) } if e := Wait(ch); e != nil { t.Fatal("Did not receive our message") } expectedReconnectCount := uint64(1) reconnectCount := ec.Conn.Stats().Reconnects if reconnectCount != expectedReconnectCount { t.Fatalf("Reconnect count incorrect: %d vs %d\n", reconnectCount, expectedReconnectCount) } } func TestExtendedReconnectFunctionality(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() opts := reconnectOpts dch := make(chan bool, 2) opts.DisconnectedErrCB = func(_ *nats.Conn, _ error) { dch <- true } rch := make(chan bool, 1) opts.ReconnectedCB = func(_ *nats.Conn) { rch <- true } nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() ec, err := nats.NewEncodedConn(nc, nats.DEFAULT_ENCODER) if err != nil { t.Fatalf("Failed to create an encoded connection: %v\n", err) } testString := "bar" received := int32(0) ec.Subscribe("foo", func(s string) { atomic.AddInt32(&received, 1) }) sub, _ := ec.Subscribe("foobar", func(s string) { atomic.AddInt32(&received, 1) }) ec.Publish("foo", testString) ec.Flush() ts.Shutdown() // server is stopped here.. // wait for disconnect if e := WaitTime(dch, 2*time.Second); e != nil { t.Fatal("Did not receive a disconnect callback message") } // Sub while disconnected ec.Subscribe("bar", func(s string) { atomic.AddInt32(&received, 1) }) // Unsub foobar while disconnected sub.Unsubscribe() if err = ec.Publish("foo", testString); err != nil { t.Fatalf("Received an error after disconnect: %v\n", err) } if err = ec.Publish("bar", testString); err != nil { t.Fatalf("Received an error after disconnect: %v\n", err) } ts = startReconnectServer(t) defer ts.Shutdown() // server is restarted here.. // wait for reconnect if e := WaitTime(rch, 2*time.Second); e != nil { t.Fatal("Did not receive a reconnect callback message") } if err = ec.Publish("foobar", testString); err != nil { t.Fatalf("Received an error after server restarted: %v\n", err) } if err = ec.Publish("foo", testString); err != nil { t.Fatalf("Received an error after server restarted: %v\n", err) } ch := make(chan bool) ec.Subscribe("done", func(b bool) { ch <- true }) ec.Publish("done", true) if e := Wait(ch); e != nil { t.Fatal("Did not receive our message") } // Sleep a bit to guarantee scheduler runs and process all subs. time.Sleep(50 * time.Millisecond) if atomic.LoadInt32(&received) != 4 { t.Fatalf("Received != %d, equals %d\n", 4, received) } } func TestQueueSubsOnReconnect(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() opts := reconnectOpts // Allow us to block on reconnect complete. reconnectsDone := make(chan bool) opts.ReconnectedCB = func(nc *nats.Conn) { reconnectsDone <- true } // Create connection nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v\n", err) } defer nc.Close() ec, err := nats.NewEncodedConn(nc, nats.JSON_ENCODER) if err != nil { t.Fatalf("Failed to create an encoded connection: %v\n", err) } // To hold results. results := make(map[int]int) var mu sync.Mutex // Make sure we got what we needed, 1 msg only and all seqnos accounted for.. checkResults := func(numSent int) { mu.Lock() defer mu.Unlock() for i := 0; i < numSent; i++ { if results[i] != 1 { t.Fatalf("Received incorrect number of messages, [%d] for seq: %d\n", results[i], i) } } // Auto reset results map results = make(map[int]int) } subj := "foo.bar" qgroup := "workers" cb := func(seqno int) { mu.Lock() defer mu.Unlock() results[seqno] = results[seqno] + 1 } // Create Queue Subscribers ec.QueueSubscribe(subj, qgroup, cb) ec.QueueSubscribe(subj, qgroup, cb) ec.Flush() // Helper function to send messages and check results. sendAndCheckMsgs := func(numToSend int) { for i := 0; i < numToSend; i++ { ec.Publish(subj, i) } // Wait for processing. ec.Flush() time.Sleep(50 * time.Millisecond) // Check Results checkResults(numToSend) } // Base Test sendAndCheckMsgs(10) // Stop and restart server ts.Shutdown() ts = startReconnectServer(t) defer ts.Shutdown() if err := Wait(reconnectsDone); err != nil { t.Fatal("Did not get the ReconnectedCB!") } // Reconnect Base Test sendAndCheckMsgs(10) } func TestIsClosed(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() nc := NewConnection(t, 22222) defer nc.Close() if nc.IsClosed() { t.Fatalf("IsClosed returned true when the connection is still open.") } ts.Shutdown() if nc.IsClosed() { t.Fatalf("IsClosed returned true when the connection is still open.") } ts = startReconnectServer(t) defer ts.Shutdown() if nc.IsClosed() { t.Fatalf("IsClosed returned true when the connection is still open.") } nc.Close() if !nc.IsClosed() { t.Fatalf("IsClosed returned false after Close() was called.") } } func TestIsReconnectingAndStatus(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() disconnectedch := make(chan bool, 3) reconnectch := make(chan bool, 2) opts := nats.GetDefaultOptions() opts.Url = "nats://127.0.0.1:22222" opts.AllowReconnect = true opts.MaxReconnect = 10000 opts.ReconnectWait = 100 * time.Millisecond nats.ReconnectJitter(0, 0)(&opts) opts.DisconnectedErrCB = func(_ *nats.Conn, _ error) { disconnectedch <- true } opts.ReconnectedCB = func(_ *nats.Conn) { reconnectch <- true } // Connect, verify initial reconnecting state check, then stop the server nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() if nc.IsReconnecting() { t.Fatalf("IsReconnecting returned true when the connection is still open.") } if status := nc.Status(); status != nats.CONNECTED { t.Fatalf("Status returned %d when connected instead of CONNECTED", status) } ts.Shutdown() // Wait until we get the disconnected callback if e := Wait(disconnectedch); e != nil { t.Fatalf("Disconnect callback wasn't triggered: %v", e) } if !nc.IsReconnecting() { t.Fatalf("IsReconnecting returned false when the client is reconnecting.") } if status := nc.Status(); status != nats.RECONNECTING { t.Fatalf("Status returned %d when reconnecting instead of CONNECTED", status) } ts = startReconnectServer(t) defer ts.Shutdown() // Wait until we get the reconnect callback if e := Wait(reconnectch); e != nil { t.Fatalf("Reconnect callback wasn't triggered: %v", e) } if nc.IsReconnecting() { t.Fatalf("IsReconnecting returned true after the connection was reconnected.") } if status := nc.Status(); status != nats.CONNECTED { t.Fatalf("Status returned %d when reconnected instead of CONNECTED", status) } // Close the connection, reconnecting should still be false nc.Close() if nc.IsReconnecting() { t.Fatalf("IsReconnecting returned true after Close() was called.")<|fim▁hole|> } if status := nc.Status(); status != nats.CLOSED { t.Fatalf("Status returned %d after Close() was called instead of CLOSED", status) } } func TestFullFlushChanDuringReconnect(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() reconnectch := make(chan bool, 2) opts := nats.GetDefaultOptions() opts.Url = "nats://127.0.0.1:22222" opts.AllowReconnect = true opts.MaxReconnect = 10000 opts.ReconnectWait = 100 * time.Millisecond nats.ReconnectJitter(0, 0)(&opts) opts.ReconnectedCB = func(_ *nats.Conn) { reconnectch <- true } // Connect nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() // Channel used to make the go routine sending messages to stop. stop := make(chan bool) // While connected, publish as fast as we can go func() { for i := 0; ; i++ { _ = nc.Publish("foo", []byte("hello")) // Make sure we are sending at least flushChanSize (1024) messages // before potentially pausing. if i%2000 == 0 { select { case <-stop: return default: time.Sleep(100 * time.Millisecond) } } } }() // Send a bit... time.Sleep(500 * time.Millisecond) // Shut down the server ts.Shutdown() // Continue sending while we are disconnected time.Sleep(time.Second) // Restart the server ts = startReconnectServer(t) defer ts.Shutdown() // Wait for the reconnect CB to be invoked (but not for too long) if e := WaitTime(reconnectch, 5*time.Second); e != nil { t.Fatalf("Reconnect callback wasn't triggered: %v", e) } close(stop) } func TestReconnectVerbose(t *testing.T) { s := RunDefaultServer() defer s.Shutdown() o := nats.GetDefaultOptions() o.Verbose = true rch := make(chan bool) o.ReconnectedCB = func(_ *nats.Conn) { rch <- true } nc, err := o.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() err = nc.Flush() if err != nil { t.Fatalf("Error during flush: %v", err) } s.Shutdown() s = RunDefaultServer() defer s.Shutdown() if e := Wait(rch); e != nil { t.Fatal("Should have reconnected ok") } err = nc.Flush() if err != nil { t.Fatalf("Error during flush: %v", err) } } func TestReconnectBufSizeOption(t *testing.T) { s := RunDefaultServer() defer s.Shutdown() nc, err := nats.Connect("nats://127.0.0.1:4222", nats.ReconnectBufSize(32)) if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() if nc.Opts.ReconnectBufSize != 32 { t.Fatalf("ReconnectBufSize should be 32 but it is %d", nc.Opts.ReconnectBufSize) } } func TestReconnectBufSize(t *testing.T) { s := RunDefaultServer() defer s.Shutdown() o := nats.GetDefaultOptions() o.ReconnectBufSize = 32 // 32 bytes dch := make(chan bool) o.DisconnectedErrCB = func(_ *nats.Conn, _ error) { dch <- true } nc, err := o.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } defer nc.Close() err = nc.Flush() if err != nil { t.Fatalf("Error during flush: %v", err) } // Force disconnected state. s.Shutdown() if e := Wait(dch); e != nil { t.Fatal("DisconnectedErrCB should have been triggered") } msg := []byte("food") // 4 bytes paylaod, total proto is 16 bytes // These should work, 2X16 = 32 if err := nc.Publish("foo", msg); err != nil { t.Fatalf("Failed to publish message: %v\n", err) } if err := nc.Publish("foo", msg); err != nil { t.Fatalf("Failed to publish message: %v\n", err) } // This should fail since we have exhausted the backing buffer. if err := nc.Publish("foo", msg); err == nil { t.Fatalf("Expected to fail to publish message: got no error\n") } nc.Buffered() } // When a cluster is fronted by a single DNS name (desired) but communicates IPs to clients (also desired), // and we use TLS, we want to make sure we do the right thing connecting to an IP directly for TLS to work. // The reason this may happen is that the cluster has a single DNS name and a single certificate, but the cluster // wants to vend out IPs and not wait on DNS for topology changes and failover. func TestReconnectTLSHostNoIP(t *testing.T) { sa, optsA := RunServerWithConfig("./configs/tls_noip_a.conf") defer sa.Shutdown() sb, optsB := RunServerWithConfig("./configs/tls_noip_b.conf") defer sb.Shutdown() // Wait for cluster to form. wait := time.Now().Add(2 * time.Second) for time.Now().Before(wait) { sanr := sa.NumRoutes() sbnr := sb.NumRoutes() if sanr == 1 && sbnr == 1 { break } time.Sleep(50 * time.Millisecond) } endpoint := fmt.Sprintf("%s:%d", optsA.Host, optsA.Port) secureURL := fmt.Sprintf("tls://%s:%s@%s/", optsA.Username, optsA.Password, endpoint) dch := make(chan bool, 2) dcb := func(_ *nats.Conn, _ error) { dch <- true } rch := make(chan bool) rcb := func(_ *nats.Conn) { rch <- true } nc, err := nats.Connect(secureURL, nats.RootCAs("./configs/certs/ca.pem"), nats.DisconnectErrHandler(dcb), nats.ReconnectHandler(rcb)) if err != nil { t.Fatalf("Failed to create secure (TLS) connection: %v", err) } defer nc.Close() // Wait for DiscoveredServers() to be 1. wait = time.Now().Add(2 * time.Second) for time.Now().Before(wait) { if len(nc.DiscoveredServers()) == 1 { break } } // Make sure this is the server B info, and that it is an IP. expectedDiscoverURL := fmt.Sprintf("tls://%s:%d", optsB.Host, optsB.Port) eurl, err := url.Parse(expectedDiscoverURL) if err != nil { t.Fatalf("Expected to parse discovered server URL: %v", err) } if addr := net.ParseIP(eurl.Hostname()); addr == nil { t.Fatalf("Expected the discovered server to be an IP, got %v", eurl.Hostname()) } ds := nc.DiscoveredServers() if ds[0] != expectedDiscoverURL { t.Fatalf("Expected %q, got %q", expectedDiscoverURL, ds[0]) } // Force us to switch servers. sa.Shutdown() if e := Wait(dch); e != nil { t.Fatal("DisconnectedErrCB should have been triggered") } if e := WaitTime(rch, time.Second); e != nil { t.Fatalf("ReconnectedCB should have been triggered: %v", nc.LastError()) } } func TestConnCloseNoCallback(t *testing.T) { ts := startReconnectServer(t) defer ts.Shutdown() // create a connection that manually sets the options var conns []*nats.Conn cch := make(chan string, 2) opts := reconnectOpts opts.ClosedCB = func(_ *nats.Conn) { cch <- "manual" } opts.NoCallbacksAfterClientClose = true nc, err := opts.Connect() if err != nil { t.Fatalf("Should have connected ok: %v", err) } conns = append(conns, nc) // and another connection that uses the option nc2, err := nats.Connect(reconnectOpts.Url, nats.NoCallbacksAfterClientClose(), nats.ClosedHandler(func(_ *nats.Conn) { cch <- "opts" })) if err != nil { t.Fatalf("Should have connected ok: %v", err) } conns = append(conns, nc2) // defer close() for safety, flush() and close() for _, c := range conns { defer c.Close() c.Flush() // Close the connection, we don't expect to get a notification c.Close() } // if the timeout happens we didn't get data from the channel // if we get a value from the channel that connection type failed. select { case <-time.After(500 * time.Millisecond): // test passed - we timed so no callback was called case what := <-cch: t.Fatalf("%s issued a callback and it shouldn't have", what) } }<|fim▁end|>
<|file_name|>basename.rs<|end_file_name|><|fim▁begin|>#![crate_name = "uu_basename"] /* * This file is part of the uutils coreutils package. * * (c) Jimmy Lu <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ #[macro_use] extern crate uucore; use std::io::Write; use std::path::{is_separator, PathBuf}; static NAME: &'static str = "basename"; static SYNTAX: &'static str = "NAME [SUFFIX]"; static SUMMARY: &'static str = "Print NAME with any leading directory components removed If specified, also remove a trailing SUFFIX"; static LONG_HELP: &'static str = ""; pub fn uumain(args: Vec<String>) -> i32 { // // Argument parsing // let matches = new_coreopts!(SYNTAX, SUMMARY, LONG_HELP) .optflag("a", "multiple", "Support more than one argument. Treat every argument as a name.") .optopt("s", "suffix", "Remove a trailing suffix. This option implies the -a option.", "SUFFIX") .optflag("z", "zero", "Output a zero byte (ASCII NUL) at the end of each line, rather than a newline.") .parse(args); // too few arguments if matches.free.len() < 1 { crash!( 1, "{0}: {1}\nTry '{0} --help' for more information.", NAME, "missing operand" ); } let opt_s = matches.opt_present("s"); let opt_a = matches.opt_present("a"); let opt_z = matches.opt_present("z"); let multiple_paths = opt_s || opt_a; // too many arguments if !multiple_paths && matches.free.len() > 2 { crash!( 1,<|fim▁hole|> "{0}: extra operand '{1}'\nTry '{0} --help' for more information.", NAME, matches.free[2] ); } let suffix = if opt_s { matches.opt_str("s").unwrap() } else if !opt_a && matches.free.len() > 1 { matches.free[1].clone() } else { "".to_owned() }; // // Main Program Processing // let paths = if multiple_paths { &matches.free[..] } else { &matches.free[0..1] }; let line_ending = if opt_z { "\0" } else { "\n" }; for path in paths { print!("{}{}", basename(&path, &suffix), line_ending); } 0 } fn basename(fullname: &str, suffix: &str) -> String { // Remove all platform-specific path separators from the end let mut path: String = fullname.chars().rev().skip_while(|&ch| is_separator(ch)).collect(); // Undo reverse path = path.chars().rev().collect(); // Convert to path buffer and get last path component let pb = PathBuf::from(path); match pb.components().last() { Some(c) => strip_suffix(c.as_os_str().to_str().unwrap(), suffix), None => "".to_owned() } } fn strip_suffix(name: &str, suffix: &str) -> String { if name == suffix { return name.to_owned(); } if name.ends_with(suffix) { return name[..name.len() - suffix.len()].to_owned(); } name.to_owned() }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- from setuptools import setup, find_packages from codecs import open from os import path import re import ast here = path.abspath(path.dirname(__file__)) _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('radmyarchive/__init__.py', 'rb') as vf: version = str(ast.literal_eval(_version_re.search( vf.read().decode('utf-8')).group(1))) with open(path.join(here, 'README.rst'), encoding='utf-8') as f: readme_file = f.read() setup( name="radmyarchive", version=version, author="Ömer Fadıl Usta", author_email="[email protected]", packages=find_packages(), scripts=["scripts/RADMYARCHIVE.py"], url="https://github.com/usta/radmyarchive-py", license="BSD", keywords="exif image photo rename metadata arrange rearrange catalogue", description="A simple photo rearranger with help of EXIF tags",<|fim▁hole|> long_description=readme_file, classifiers=( "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.4", "Topic :: Utilities", ), )<|fim▁end|>
install_requires=['exifread', 'termcolor', 'colorama'],
<|file_name|>so-KE.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ // THIS CODE IS GENERATED - DO NOT MODIFY // See angular/tools/gulp-tasks/cldr/extract.js function plural(n: number): number { if (n === 1) return 1; return 5; } export default [ 'so-KE', [ ['sn.', 'gn.'], , ], , [ ['A', 'I', 'T', 'A', 'Kh', 'J', 'S'], ['Axd', 'Isn', 'Tal', 'Arb', 'Kha', 'Jim', 'Sab'], ['Axad', 'Isniin', 'Talaado', 'Arbaco', 'Khamiis', 'Jimco', 'Sabti'], ['Axd', 'Isn', 'Tal', 'Arb', 'Kha', 'Jim', 'Sab'] ], , [ ['K', 'L', 'S', 'A', 'S', 'L', 'T', 'S', 'S', 'T', 'K', 'L'], ['Kob', 'Lab', 'Sad', 'Afr', 'Sha', 'Lix', 'Tod', 'Sid', 'Sag', 'Tob', 'KIT', 'LIT'], [ 'Bisha Koobaad', 'Bisha Labaad', 'Bisha Saddexaad', 'Bisha Afraad', 'Bisha Shanaad', 'Bisha Lixaad', 'Bisha Todobaad', 'Bisha Sideedaad', 'Bisha Sagaalaad', 'Bisha Tobnaad', 'Bisha Kow iyo Tobnaad', 'Bisha Laba iyo Tobnaad' ] ], [<|fim▁hole|> [ 'Bisha Koobaad', 'Bisha Labaad', 'Bisha Saddexaad', 'Bisha Afraad', 'Bisha Shanaad', 'Bisha Lixaad', 'Bisha Todobaad', 'Bisha Sideedaad', 'Bisha Sagaalaad', 'Bisha Tobnaad', 'Bisha Kow iyo Tobnaad', 'Bisha Laba iyo Tobnaad' ] ], [ ['CK', 'CD'], , ], 0, [6, 0], ['dd/MM/yy', 'dd-MMM-y', 'dd MMMM y', 'EEEE, MMMM dd, y'], ['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'], [ '{1} {0}', , , ], ['.', ',', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'], ['#,##0.###', '#,##0%', '¤#,##0.00', '#E0'], 'Ksh', 'KES', plural ];<|fim▁end|>
['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12'], ['Kob', 'Lab', 'Sad', 'Afr', 'Sha', 'Lix', 'Tod', 'Sid', 'Sag', 'Tob', 'KIT', 'LIT'],
<|file_name|>massemailtxt.py<|end_file_name|><|fim▁begin|>import os.path import time from django.core.management.base import BaseCommand from django.conf import settings import mitxmako.middleware as middleware from django.core.mail import send_mass_mail import sys import datetime middleware.MakoMiddleware() def chunks(l, n): """ Yield successive n-sized chunks from l. """ for i in xrange(0, len(l), n): yield l[i:i + n] class Command(BaseCommand): help = \ '''Sends an e-mail to all users in a text file. E.g. manage.py userlist.txt message logfile.txt rate userlist.txt -- list of all users message -- prefix for template with message logfile.txt -- where to log progress rate -- messages per second ''' log_file = None def hard_log(self, text): self.log_file.write(datetime.datetime.utcnow().isoformat() + ' -- ' + text + '\n') def handle(self, *args, **options):<|fim▁hole|> users = [u.strip() for u in open(user_file).readlines()] message = middleware.lookup['main'].get_template('emails/' + message_base + "_body.txt").render() subject = middleware.lookup['main'].get_template('emails/' + message_base + "_subject.txt").render().strip() rate = int(ratestr) self.log_file = open(logfilename, "a+", buffering=0) i = 0 for users in chunks(users, rate): emails = [(subject, message, settings.DEFAULT_FROM_EMAIL, [u]) for u in users] self.hard_log(" ".join(users)) send_mass_mail(emails, fail_silently=False) time.sleep(1) print datetime.datetime.utcnow().isoformat(), i i = i + len(users) # Emergency interruptor if os.path.exists("/tmp/stopemails.txt"): self.log_file.close() sys.exit(-1) self.log_file.close()<|fim▁end|>
(user_file, message_base, logfilename, ratestr) = args
<|file_name|>parsecmudict.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- '''CMU dict file parser Copyright (C) 2010 Yosuke Matsusaka Intelligent Systems Research Institute, National Institute of Advanced Industrial Science and Technology (AIST),<|fim▁hole|>''' class CMUDict: """ Utility class to parse CMU Pronunciation Dictionaly.""" def __init__(self, fname): self._fname = fname self._dict = {} self.parse(self._fname) def parse(self, fname): f = open(fname, 'r') f.readline() for l in f: t = l.strip().split(' ', 2) w = t[0].strip('()"') v = t[2].replace('(', '').replace(')', '').replace(' 0', '').replace(' 1', '') try: self._dict[w].append(v) except KeyError: self._dict[w] = [v,] def lookup(self, w): try: return self._dict[w] except KeyError: return [] if __name__ == '__main__': doc = CMUDict('/usr/share/festival/dicts/cmu/cmudict-0.4.out') print doc.lookup('hello')<|fim▁end|>
Japan All rights reserved. Licensed under the Eclipse Public License -v 1.0 (EPL) http://www.opensource.org/licenses/eclipse-1.0.txt
<|file_name|>pipeaddflavor_string.go<|end_file_name|><|fim▁begin|>// Code generated by "stringer -type=pipeAddFlavor constants.go"; DO NOT EDIT. package bot import "strconv" func _() { // An "invalid array index" compiler error signifies that the constant values have changed. // Re-run the stringer command to generate them again. var x [1]struct{} _ = x[flavorSpawn-0] _ = x[flavorAdd-1] _ = x[flavorFinal-2] _ = x[flavorFail-3] } const _pipeAddFlavor_name = "flavorSpawnflavorAddflavorFinalflavorFail"<|fim▁hole|>func (i pipeAddFlavor) String() string { if i < 0 || i >= pipeAddFlavor(len(_pipeAddFlavor_index)-1) { return "pipeAddFlavor(" + strconv.FormatInt(int64(i), 10) + ")" } return _pipeAddFlavor_name[_pipeAddFlavor_index[i]:_pipeAddFlavor_index[i+1]] }<|fim▁end|>
var _pipeAddFlavor_index = [...]uint8{0, 11, 20, 31, 41}
<|file_name|>test_node_handler.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2013 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_serialization import jsonutils from nailgun import consts from nailgun import objects from nailgun.db.sqlalchemy.models import Node from nailgun.test.base import BaseIntegrationTest from nailgun.test.base import fake_tasks from nailgun.utils import reverse class TestHandlers(BaseIntegrationTest): def test_node_get(self): node = self.env.create_node(api=False) resp = self.app.get( reverse('NodeHandler', kwargs={'obj_id': node.id}), headers=self.default_headers) self.assertEqual(200, resp.status_code) self.assertEqual(node.id, resp.json_body['id']) self.assertEqual(node.name, resp.json_body['name']) self.assertEqual(node.mac, resp.json_body['mac']) self.assertEqual( node.pending_addition, resp.json_body['pending_addition']) self.assertEqual( node.pending_deletion, resp.json_body['pending_deletion']) self.assertEqual(node.status, resp.json_body['status']) self.assertEqual( node.meta['cpu']['total'], resp.json_body['meta']['cpu']['total'] ) self.assertEqual(node.meta['disks'], resp.json_body['meta']['disks']) self.assertEqual(node.meta['memory'], resp.json_body['meta']['memory']) def test_node_creation_fails_with_wrong_id(self): node_id = '080000000003' resp = self.app.post( reverse('NodeCollectionHandler'), jsonutils.dumps({'id': node_id, 'mac': self.env.generate_random_mac(), 'status': 'discover'}), headers=self.default_headers, expect_errors=True) self.assertEqual(400, resp.status_code) def test_node_deletion(self): node = self.env.create_node(api=False) resp = self.app.delete( reverse('NodeHandler', kwargs={'obj_id': node.id}), "", headers=self.default_headers, expect_errors=True ) self.assertEqual(resp.status_code, 200) def test_node_valid_metadata_gets_updated(self): new_metadata = self.env.default_metadata() node = self.env.create_node(api=False) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'meta': new_metadata}), headers=self.default_headers) self.assertEqual(resp.status_code, 200) self.db.refresh(node) nodes = self.db.query(Node).filter( Node.id == node.id ).all() self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].meta, new_metadata) def test_node_hostname_gets_updated(self): node = self.env.create_node(api=False) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'hostname': 'new-name'}), headers=self.default_headers) self.assertEqual(200, resp.status_code) self.db.refresh(node) # lets put the same hostname again resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'hostname': 'new-name'}), headers=self.default_headers) self.assertEqual(200, resp.status_code) self.db.refresh(node) nodes = self.db.query(Node).filter( Node.id == node.id ).all() self.assertEqual(len(nodes), 1) self.assertEqual(nodes[0].hostname, 'new-name') def test_node_hostname_gets_updated_invalid(self): node = self.env.create_node(api=False) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'hostname': '!#invalid_%&name'}), headers=self.default_headers, expect_errors=True) self.assertEqual(400, resp.status_code) def test_node_hostname_gets_updated_ssl_conflict(self): cluster = self.env.create_cluster(api=False) node = self.env.create_node(cluster_id=cluster.id) cluster_attrs = objects.Cluster.get_attributes(cluster).editable test_hostname = 'test-hostname' cluster_attrs['public_ssl']['hostname']['value'] = test_hostname objects.Cluster.update_attributes( cluster, {'editable': cluster_attrs}) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'hostname': test_hostname}), headers=self.default_headers, expect_errors=True) self.assertEqual(400, resp.status_code) self.assertEqual( "New hostname '{0}' conflicts with public TLS endpoint" .format(test_hostname), resp.json_body['message']) def test_node_hostname_gets_updated_after_provisioning_starts(self): node = self.env.create_node(api=False, status=consts.NODE_STATUSES.provisioning) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'hostname': 'new-name'}), headers=self.default_headers, expect_errors=True) self.assertEqual(403, resp.status_code) self.assertEqual( 'Node hostname may be changed only before provisioning.', resp.json_body['message']) def test_node_hostname_gets_updated_duplicate(self): node = self.env.create_node(api=False) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({'hostname': 'new-name'}), headers=self.default_headers) self.assertEqual(200, resp.status_code) self.db.refresh(node) node_2 = self.env.create_node(api=False) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node_2.id}), jsonutils.dumps({'hostname': 'new-name'}), headers=self.default_headers, expect_errors=True) self.assertEqual(409, resp.status_code) def test_node_valid_status_gets_updated(self): node = self.env.create_node(api=False) params = {'status': 'error'} resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps(params), headers=self.default_headers) self.assertEqual(resp.status_code, 200) def test_node_action_flags_are_set(self): flags = ['pending_addition', 'pending_deletion'] node = self.env.create_node(api=False) for flag in flags: resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps({flag: True}), headers=self.default_headers ) self.assertEqual(resp.status_code, 200) self.db.refresh(node) node_from_db = self.db.query(Node).filter( Node.id == node.id ).first() for flag in flags: self.assertEqual(getattr(node_from_db, flag), True) def test_put_returns_400_if_no_body(self): node = self.env.create_node(api=False) resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), "", headers=self.default_headers, expect_errors=True) self.assertEqual(resp.status_code, 400) def test_put_returns_400_if_wrong_status(self): node = self.env.create_node(api=False) params = {'status': 'invalid_status'} resp = self.app.put( reverse('NodeHandler', kwargs={'obj_id': node.id}), jsonutils.dumps(params), headers=self.default_headers, expect_errors=True) self.assertEqual(resp.status_code, 400) def test_do_not_create_notification_if_disks_meta_is_empty(self): def get_notifications_count(**kwargs): return objects.NotificationCollection.count( objects.NotificationCollection.filter_by(None, **kwargs) ) self.env.create( nodes_kwargs=[ {'roles': ['controller'], 'pending_addition': True}, ] ) node = self.env.nodes[0] node.meta['disks'] = [] node = { 'id': node.id, 'meta': node.meta, 'mac': node.mac, 'status': node.status } before_count = get_notifications_count(node_id=node['id']) for i in range(5): response = self.app.put( reverse('NodeAgentHandler'), jsonutils.dumps(node), headers=self.default_headers ) self.assertEqual(response.status_code, 200) # check there's no notification created after_count = get_notifications_count(node_id=node['id']) self.assertEqual(before_count, after_count) def test_no_volumes_changes_if_node_is_locked(self): self.env.create( nodes_kwargs=[ {'roles': ['controller'], 'pending_addition': True, 'status': consts.NODE_STATUSES.ready}, ] ) node = self.env.nodes[0] node_data = { 'id': node.id, 'meta': node.meta, 'mac': node.mac, 'status': node.status } node_data['meta']['disks'] = [] response = self.app.put( reverse('NodeAgentHandler'), jsonutils.dumps(node_data), headers=self.default_headers ) self.assertEqual(response.status_code, 200) # check volumes data wasn't reset self.assertGreater(len(node.meta['disks']), 0) @fake_tasks() def test_interface_changes_for_new_node(self): # Creating cluster with node self.env.create( cluster_kwargs={ 'name': 'test_name' }, nodes_kwargs=[ {'roles': ['controller'], 'pending_addition': True} ] ) cluster = self.env.clusters[0] def filter_changes(chg_type, chg_list): return filter(lambda x: x.get('name') == chg_type, chg_list) changes = filter_changes( consts.CLUSTER_CHANGES.interfaces, cluster['changes'] ) # Checking interfaces change added after node creation self.assertEquals(1, len(changes)) deployment_task = self.env.launch_deployment() self.env.wait_ready(deployment_task) changes = filter_changes( consts.CLUSTER_CHANGES.interfaces, cluster['changes'] ) # Checking no interfaces change after deployment self.assertEquals(0, len(changes)) def test_update_node_with_wrong_ip(self): node = self.env.create_node( api=False, ip='10.20.0.2', status=consts.NODE_STATUSES.deploying) ipaddress = '192.168.0.10' self.app.put( reverse('NodeAgentHandler'), jsonutils.dumps({'id': node.id, 'ip': ipaddress}), headers=self.default_headers) self.assertNotEqual(node.ip, ipaddress) <|fim▁hole|> self.app.put( reverse('NodeAgentHandler'), jsonutils.dumps({'id': node.id, 'ip': ipaddress}), headers=self.default_headers) self.assertEqual(node.ip, ipaddress) def test_update_node_with_none_ip(self): node = self.env.create_node(api=False, ip='10.20.0.2') ipaddress = None resp = self.app.put( reverse('NodeAgentHandler'), jsonutils.dumps({'id': node.id, 'ip': ipaddress}), headers=self.default_headers, expect_errors=True) self.assertEqual(resp.status_code, 400) ipaddress = '10.20.0.4' resp = self.app.put( reverse('NodeAgentHandler'), jsonutils.dumps({'id': node.id, 'ip': ipaddress}), headers=self.default_headers) self.assertEqual(resp.status_code, 200)<|fim▁end|>
ipaddress = '10.20.0.25'
<|file_name|>control_overcloud.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 # Copyright (c) 2016-2021 Dell Inc. or its subsidiaries. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import json import os from ironicclient import client from subprocess import check_output from credential_helper import CredentialHelper def main(): parser = argparse.ArgumentParser() parser.add_argument("--power", required=True, default=None, choices=["on", "off", "reset", "cycle"], help="Control power state of all overcloud nodes") args = parser.parse_args() os_auth_url, os_tenant_name, os_username, os_password, \ os_user_domain_name, os_project_domain_name = \ CredentialHelper.get_undercloud_creds() kwargs = {'os_username': os_username, 'os_password': os_password, 'os_auth_url': os_auth_url, 'os_tenant_name': os_tenant_name, 'os_user_domain_name': os_user_domain_name, 'os_project_domain_name': os_project_domain_name} ironic = client.get_client(1, **kwargs) for node in ironic.node.list(detail=True): ip, username, password = \ CredentialHelper.get_drac_creds_from_node(node) cmd = "ipmitool -H {} -I lanplus -U {} -P '{}' chassis power {}". \ format(ip, username, password, args.power) print(cmd) os.system(cmd) if __name__ == "__main__":<|fim▁hole|><|fim▁end|>
main()
<|file_name|>nutrition.ts<|end_file_name|><|fim▁begin|>import { NutritionInterface } from '../interfaces/nutrition';<|fim▁hole|>import { Injectable } from '@angular/core'; @Injectable() export class Nutrition implements NutritionInterface { public article: Array<Article>; public meal: Array<Meal>; constructor( article: Array<Article> = [], meal: Array<Meal> = [] ) { this.article = article; this.meal = meal; } }<|fim▁end|>
import { Article } from './article'; import { Meal } from './meal';
<|file_name|>container.go<|end_file_name|><|fim▁begin|>package storage // Copyright 2017 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import ( "encoding/xml" "errors" "fmt" "io" "net/http" "net/url" "strconv" "strings" "time" ) // Container represents an Azure container. type Container struct { bsc *BlobStorageClient Name string `xml:"Name"` Properties ContainerProperties `xml:"Properties"` Metadata map[string]string sasuri url.URL } // Client returns the HTTP client used by the Container reference. func (c *Container) Client() *Client { return &c.bsc.client } func (c *Container) buildPath() string { return fmt.Sprintf("/%s", c.Name) } // GetURL gets the canonical URL to the container. // This method does not create a publicly accessible URL if the container // is private and this method does not check if the blob exists. func (c *Container) GetURL() string { container := c.Name if container == "" { container = "$root" } return c.bsc.client.getEndpoint(blobServiceName, pathForResource(container, ""), nil) } // ContainerSASOptions are options to construct a container SAS // URI. // See https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas type ContainerSASOptions struct { ContainerSASPermissions OverrideHeaders SASOptions } // ContainerSASPermissions includes the available permissions for // a container SAS URI. type ContainerSASPermissions struct { BlobServiceSASPermissions List bool } // GetSASURI creates an URL to the container which contains the Shared // Access Signature with the specified options. // // See https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas func (c *Container) GetSASURI(options ContainerSASOptions) (string, error) { uri := c.GetURL() signedResource := "c" canonicalizedResource, err := c.bsc.client.buildCanonicalizedResource(uri, c.bsc.auth, true) if err != nil { return "", err } // build permissions string permissions := options.BlobServiceSASPermissions.buildString() if options.List { permissions += "l" } return c.bsc.client.blobAndFileSASURI(options.SASOptions, uri, permissions, canonicalizedResource, signedResource, options.OverrideHeaders) }<|fim▁hole|>// various endpoints like ListContainers. type ContainerProperties struct { LastModified string `xml:"Last-Modified"` Etag string `xml:"Etag"` LeaseStatus string `xml:"LeaseStatus"` LeaseState string `xml:"LeaseState"` LeaseDuration string `xml:"LeaseDuration"` } // ContainerListResponse contains the response fields from // ListContainers call. // // See https://msdn.microsoft.com/en-us/library/azure/dd179352.aspx type ContainerListResponse struct { XMLName xml.Name `xml:"EnumerationResults"` Xmlns string `xml:"xmlns,attr"` Prefix string `xml:"Prefix"` Marker string `xml:"Marker"` NextMarker string `xml:"NextMarker"` MaxResults int64 `xml:"MaxResults"` Containers []Container `xml:"Containers>Container"` } // BlobListResponse contains the response fields from ListBlobs call. // // See https://msdn.microsoft.com/en-us/library/azure/dd135734.aspx type BlobListResponse struct { XMLName xml.Name `xml:"EnumerationResults"` Xmlns string `xml:"xmlns,attr"` Prefix string `xml:"Prefix"` Marker string `xml:"Marker"` NextMarker string `xml:"NextMarker"` MaxResults int64 `xml:"MaxResults"` Blobs []Blob `xml:"Blobs>Blob"` // BlobPrefix is used to traverse blobs as if it were a file system. // It is returned if ListBlobsParameters.Delimiter is specified. // The list here can be thought of as "folders" that may contain // other folders or blobs. BlobPrefixes []string `xml:"Blobs>BlobPrefix>Name"` // Delimiter is used to traverse blobs as if it were a file system. // It is returned if ListBlobsParameters.Delimiter is specified. Delimiter string `xml:"Delimiter"` } // IncludeBlobDataset has options to include in a list blobs operation type IncludeBlobDataset struct { Snapshots bool Metadata bool UncommittedBlobs bool Copy bool } // ListBlobsParameters defines the set of customizable // parameters to make a List Blobs call. // // See https://msdn.microsoft.com/en-us/library/azure/dd135734.aspx type ListBlobsParameters struct { Prefix string Delimiter string Marker string Include *IncludeBlobDataset MaxResults uint Timeout uint RequestID string } func (p ListBlobsParameters) getParameters() url.Values { out := url.Values{} if p.Prefix != "" { out.Set("prefix", p.Prefix) } if p.Delimiter != "" { out.Set("delimiter", p.Delimiter) } if p.Marker != "" { out.Set("marker", p.Marker) } if p.Include != nil { include := []string{} include = addString(include, p.Include.Snapshots, "snapshots") include = addString(include, p.Include.Metadata, "metadata") include = addString(include, p.Include.UncommittedBlobs, "uncommittedblobs") include = addString(include, p.Include.Copy, "copy") fullInclude := strings.Join(include, ",") out.Set("include", fullInclude) } if p.MaxResults != 0 { out.Set("maxresults", strconv.FormatUint(uint64(p.MaxResults), 10)) } if p.Timeout != 0 { out.Set("timeout", strconv.FormatUint(uint64(p.Timeout), 10)) } return out } func addString(datasets []string, include bool, text string) []string { if include { datasets = append(datasets, text) } return datasets } // ContainerAccessType defines the access level to the container from a public // request. // // See https://msdn.microsoft.com/en-us/library/azure/dd179468.aspx and "x-ms- // blob-public-access" header. type ContainerAccessType string // Access options for containers const ( ContainerAccessTypePrivate ContainerAccessType = "" ContainerAccessTypeBlob ContainerAccessType = "blob" ContainerAccessTypeContainer ContainerAccessType = "container" ) // ContainerAccessPolicy represents each access policy in the container ACL. type ContainerAccessPolicy struct { ID string StartTime time.Time ExpiryTime time.Time CanRead bool CanWrite bool CanDelete bool } // ContainerPermissions represents the container ACLs. type ContainerPermissions struct { AccessType ContainerAccessType AccessPolicies []ContainerAccessPolicy } // ContainerAccessHeader references header used when setting/getting container ACL const ( ContainerAccessHeader string = "x-ms-blob-public-access" ) // GetBlobReference returns a Blob object for the specified blob name. func (c *Container) GetBlobReference(name string) *Blob { return &Blob{ Container: c, Name: name, } } // CreateContainerOptions includes the options for a create container operation type CreateContainerOptions struct { Timeout uint Access ContainerAccessType `header:"x-ms-blob-public-access"` RequestID string `header:"x-ms-client-request-id"` } // Create creates a blob container within the storage account // with given name and access level. Returns error if container already exists. // // See https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/Create-Container func (c *Container) Create(options *CreateContainerOptions) error { resp, err := c.create(options) if err != nil { return err } readAndCloseBody(resp.body) return checkRespCode(resp.statusCode, []int{http.StatusCreated}) } // CreateIfNotExists creates a blob container if it does not exist. Returns // true if container is newly created or false if container already exists. func (c *Container) CreateIfNotExists(options *CreateContainerOptions) (bool, error) { resp, err := c.create(options) if resp != nil { defer readAndCloseBody(resp.body) if resp.statusCode == http.StatusCreated || resp.statusCode == http.StatusConflict { return resp.statusCode == http.StatusCreated, nil } } return false, err } func (c *Container) create(options *CreateContainerOptions) (*storageResponse, error) { query := url.Values{"restype": {"container"}} headers := c.bsc.client.getStandardHeaders() headers = c.bsc.client.addMetadataToHeaders(headers, c.Metadata) if options != nil { query = addTimeout(query, options.Timeout) headers = mergeHeaders(headers, headersFromStruct(*options)) } uri := c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), query) return c.bsc.client.exec(http.MethodPut, uri, headers, nil, c.bsc.auth) } // Exists returns true if a container with given name exists // on the storage account, otherwise returns false. func (c *Container) Exists() (bool, error) { q := url.Values{"restype": {"container"}} var uri string if c.bsc.client.isServiceSASClient() { q = mergeParams(q, c.sasuri.Query()) newURI := c.sasuri newURI.RawQuery = q.Encode() uri = newURI.String() } else { if c.bsc.client.isAccountSASClient() { q = mergeParams(q, c.bsc.client.accountSASToken) } uri = c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), q) } headers := c.bsc.client.getStandardHeaders() resp, err := c.bsc.client.exec(http.MethodHead, uri, headers, nil, c.bsc.auth) if resp != nil { defer readAndCloseBody(resp.body) if resp.statusCode == http.StatusOK || resp.statusCode == http.StatusNotFound { return resp.statusCode == http.StatusOK, nil } } return false, err } // SetContainerPermissionOptions includes options for a set container permissions operation type SetContainerPermissionOptions struct { Timeout uint LeaseID string `header:"x-ms-lease-id"` IfModifiedSince *time.Time `header:"If-Modified-Since"` IfUnmodifiedSince *time.Time `header:"If-Unmodified-Since"` RequestID string `header:"x-ms-client-request-id"` } // SetPermissions sets up container permissions // See https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/Set-Container-ACL func (c *Container) SetPermissions(permissions ContainerPermissions, options *SetContainerPermissionOptions) error { body, length, err := generateContainerACLpayload(permissions.AccessPolicies) if err != nil { return err } params := url.Values{ "restype": {"container"}, "comp": {"acl"}, } headers := c.bsc.client.getStandardHeaders() headers = addToHeaders(headers, ContainerAccessHeader, string(permissions.AccessType)) headers["Content-Length"] = strconv.Itoa(length) if options != nil { params = addTimeout(params, options.Timeout) headers = mergeHeaders(headers, headersFromStruct(*options)) } uri := c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), params) resp, err := c.bsc.client.exec(http.MethodPut, uri, headers, body, c.bsc.auth) if err != nil { return err } defer readAndCloseBody(resp.body) if err := checkRespCode(resp.statusCode, []int{http.StatusOK}); err != nil { return errors.New("Unable to set permissions") } return nil } // GetContainerPermissionOptions includes options for a get container permissions operation type GetContainerPermissionOptions struct { Timeout uint LeaseID string `header:"x-ms-lease-id"` RequestID string `header:"x-ms-client-request-id"` } // GetPermissions gets the container permissions as per https://msdn.microsoft.com/en-us/library/azure/dd179469.aspx // If timeout is 0 then it will not be passed to Azure // leaseID will only be passed to Azure if populated func (c *Container) GetPermissions(options *GetContainerPermissionOptions) (*ContainerPermissions, error) { params := url.Values{ "restype": {"container"}, "comp": {"acl"}, } headers := c.bsc.client.getStandardHeaders() if options != nil { params = addTimeout(params, options.Timeout) headers = mergeHeaders(headers, headersFromStruct(*options)) } uri := c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), params) resp, err := c.bsc.client.exec(http.MethodGet, uri, headers, nil, c.bsc.auth) if err != nil { return nil, err } defer resp.body.Close() var ap AccessPolicy err = xmlUnmarshal(resp.body, &ap.SignedIdentifiersList) if err != nil { return nil, err } return buildAccessPolicy(ap, &resp.headers), nil } func buildAccessPolicy(ap AccessPolicy, headers *http.Header) *ContainerPermissions { // containerAccess. Blob, Container, empty containerAccess := headers.Get(http.CanonicalHeaderKey(ContainerAccessHeader)) permissions := ContainerPermissions{ AccessType: ContainerAccessType(containerAccess), AccessPolicies: []ContainerAccessPolicy{}, } for _, policy := range ap.SignedIdentifiersList.SignedIdentifiers { capd := ContainerAccessPolicy{ ID: policy.ID, StartTime: policy.AccessPolicy.StartTime, ExpiryTime: policy.AccessPolicy.ExpiryTime, } capd.CanRead = updatePermissions(policy.AccessPolicy.Permission, "r") capd.CanWrite = updatePermissions(policy.AccessPolicy.Permission, "w") capd.CanDelete = updatePermissions(policy.AccessPolicy.Permission, "d") permissions.AccessPolicies = append(permissions.AccessPolicies, capd) } return &permissions } // DeleteContainerOptions includes options for a delete container operation type DeleteContainerOptions struct { Timeout uint LeaseID string `header:"x-ms-lease-id"` IfModifiedSince *time.Time `header:"If-Modified-Since"` IfUnmodifiedSince *time.Time `header:"If-Unmodified-Since"` RequestID string `header:"x-ms-client-request-id"` } // Delete deletes the container with given name on the storage // account. If the container does not exist returns error. // // See https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/delete-container func (c *Container) Delete(options *DeleteContainerOptions) error { resp, err := c.delete(options) if err != nil { return err } readAndCloseBody(resp.body) return checkRespCode(resp.statusCode, []int{http.StatusAccepted}) } // DeleteIfExists deletes the container with given name on the storage // account if it exists. Returns true if container is deleted with this call, or // false if the container did not exist at the time of the Delete Container // operation. // // See https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/delete-container func (c *Container) DeleteIfExists(options *DeleteContainerOptions) (bool, error) { resp, err := c.delete(options) if resp != nil { defer readAndCloseBody(resp.body) if resp.statusCode == http.StatusAccepted || resp.statusCode == http.StatusNotFound { return resp.statusCode == http.StatusAccepted, nil } } return false, err } func (c *Container) delete(options *DeleteContainerOptions) (*storageResponse, error) { query := url.Values{"restype": {"container"}} headers := c.bsc.client.getStandardHeaders() if options != nil { query = addTimeout(query, options.Timeout) headers = mergeHeaders(headers, headersFromStruct(*options)) } uri := c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), query) return c.bsc.client.exec(http.MethodDelete, uri, headers, nil, c.bsc.auth) } // ListBlobs returns an object that contains list of blobs in the container, // pagination token and other information in the response of List Blobs call. // // See https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/List-Blobs func (c *Container) ListBlobs(params ListBlobsParameters) (BlobListResponse, error) { q := mergeParams(params.getParameters(), url.Values{ "restype": {"container"}, "comp": {"list"}, }) var uri string if c.bsc.client.isServiceSASClient() { q = mergeParams(q, c.sasuri.Query()) newURI := c.sasuri newURI.RawQuery = q.Encode() uri = newURI.String() } else { if c.bsc.client.isAccountSASClient() { q = mergeParams(q, c.bsc.client.accountSASToken) } uri = c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), q) } headers := c.bsc.client.getStandardHeaders() headers = addToHeaders(headers, "x-ms-client-request-id", params.RequestID) var out BlobListResponse resp, err := c.bsc.client.exec(http.MethodGet, uri, headers, nil, c.bsc.auth) if err != nil { return out, err } defer resp.body.Close() err = xmlUnmarshal(resp.body, &out) for i := range out.Blobs { out.Blobs[i].Container = c } return out, err } // ContainerMetadataOptions includes options for container metadata operations type ContainerMetadataOptions struct { Timeout uint LeaseID string `header:"x-ms-lease-id"` RequestID string `header:"x-ms-client-request-id"` } // SetMetadata replaces the metadata for the specified container. // // Some keys may be converted to Camel-Case before sending. All keys // are returned in lower case by GetBlobMetadata. HTTP header names // are case-insensitive so case munging should not matter to other // applications either. // // See https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata func (c *Container) SetMetadata(options *ContainerMetadataOptions) error { params := url.Values{ "comp": {"metadata"}, "restype": {"container"}, } headers := c.bsc.client.getStandardHeaders() headers = c.bsc.client.addMetadataToHeaders(headers, c.Metadata) if options != nil { params = addTimeout(params, options.Timeout) headers = mergeHeaders(headers, headersFromStruct(*options)) } uri := c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), params) resp, err := c.bsc.client.exec(http.MethodPut, uri, headers, nil, c.bsc.auth) if err != nil { return err } readAndCloseBody(resp.body) return checkRespCode(resp.statusCode, []int{http.StatusOK}) } // GetMetadata returns all user-defined metadata for the specified container. // // All metadata keys will be returned in lower case. (HTTP header // names are case-insensitive.) // // See https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-metadata func (c *Container) GetMetadata(options *ContainerMetadataOptions) error { params := url.Values{ "comp": {"metadata"}, "restype": {"container"}, } headers := c.bsc.client.getStandardHeaders() if options != nil { params = addTimeout(params, options.Timeout) headers = mergeHeaders(headers, headersFromStruct(*options)) } uri := c.bsc.client.getEndpoint(blobServiceName, c.buildPath(), params) resp, err := c.bsc.client.exec(http.MethodGet, uri, headers, nil, c.bsc.auth) if err != nil { return err } readAndCloseBody(resp.body) if err := checkRespCode(resp.statusCode, []int{http.StatusOK}); err != nil { return err } c.writeMetadata(resp.headers) return nil } func (c *Container) writeMetadata(h http.Header) { c.Metadata = writeMetadata(h) } func generateContainerACLpayload(policies []ContainerAccessPolicy) (io.Reader, int, error) { sil := SignedIdentifiers{ SignedIdentifiers: []SignedIdentifier{}, } for _, capd := range policies { permission := capd.generateContainerPermissions() signedIdentifier := convertAccessPolicyToXMLStructs(capd.ID, capd.StartTime, capd.ExpiryTime, permission) sil.SignedIdentifiers = append(sil.SignedIdentifiers, signedIdentifier) } return xmlMarshal(sil) } func (capd *ContainerAccessPolicy) generateContainerPermissions() (permissions string) { // generate the permissions string (rwd). // still want the end user API to have bool flags. permissions = "" if capd.CanRead { permissions += "r" } if capd.CanWrite { permissions += "w" } if capd.CanDelete { permissions += "d" } return permissions }<|fim▁end|>
// ContainerProperties contains various properties of a container returned from
<|file_name|>algorithm_is_sorted.py<|end_file_name|><|fim▁begin|><|fim▁hole|>>>> import random >>> random.sample(range(10), 5) [7, 6, 3, 5, 1] >>> all(a < b for a, b in zip(_,_[1:])) False >>><|fim▁end|>
<|file_name|>routes-wallet.js<|end_file_name|><|fim▁begin|>import TheWalletView from '@/views/TheWalletView'; import Dashboard from '@/views/layouts-wallet/TheDashboardLayout'; import Send from '@/views/layouts-wallet/TheSendTransactionLayout'; import NftManager from '@/views/layouts-wallet/TheNFTManagerLayout'; import Swap from '@/views/layouts-wallet/TheSwapLayout'; import InteractContract from '@/views/layouts-wallet/TheInteractContractLayout'; import DeployContract from '@/views/layouts-wallet/TheDeployContractLayout'; import SignMessage from '@/views/layouts-wallet/TheSignMessageLayout'; import VerifyMessage from '@/views/layouts-wallet/TheVerifyMessageLayout'; import Dapps from '@/views/layouts-wallet/TheDappCenterLayout.vue'; import DappRoutes from '@/dapps/routes-dapps.js'; import Settings from '@/modules/settings/ModuleSettings'; import NftManagerSend from '@/modules/nft-manager/components/NftManagerSend'; // import Notifications from '@/modules/notifications/ModuleNotifications'; import Network from '@/modules/network/ModuleNetwork'; import { swapProps, swapRouterGuard } from './helpers'; import { ROUTES_WALLET } from '../configs/configRoutes'; export default { path: '/wallet', component: TheWalletView, props: true, children: [ { path: ROUTES_WALLET.WALLETS.PATH, name: ROUTES_WALLET.WALLETS.NAME, component: Dashboard, meta: { noAuth: false } }, { path: ROUTES_WALLET.DASHBOARD.PATH, name: ROUTES_WALLET.DASHBOARD.NAME, component: Dashboard, meta: { noAuth: false } }, { path: ROUTES_WALLET.SETTINGS.PATH, name: ROUTES_WALLET.SETTINGS.NAME, component: Settings, meta: { noAuth: false } }, { path: ROUTES_WALLET.SEND_TX.PATH, name: ROUTES_WALLET.SEND_TX.NAME, component: Send, props: true, meta: { noAuth: false } }, { path: ROUTES_WALLET.NFT_MANAGER.PATH, name: ROUTES_WALLET.NFT_MANAGER.NAME, component: NftManager, children: [ { path: ROUTES_WALLET.NFT_MANAGER_SEND.PATH, name: ROUTES_WALLET.NFT_MANAGER_SEND.NAME, component: NftManagerSend, meta: { noAuth: false } } ], meta: { noAuth: false } }, // { // path: ROUTES_WALLET.NOTIFICATIONS.PATH, // name: ROUTES_WALLET.NOTIFICATIONS.NAME, // component: Notifications, // meta: { // noAuth: false // } // }, { path: ROUTES_WALLET.NETWORK.PATH, name: ROUTES_WALLET.NETWORK.NAME, component: Network, meta: { noAuth: false } }, { path: ROUTES_WALLET.SWAP.PATH, name: ROUTES_WALLET.SWAP.NAME, component: Swap, props: swapProps, beforeEnter: swapRouterGuard, meta: { noAuth: false } }, { path: ROUTES_WALLET.DAPPS.PATH, component: Dapps, children: DappRoutes, meta: { noAuth: false } }, { path: ROUTES_WALLET.DEPLOY_CONTRACT.PATH, name: ROUTES_WALLET.DEPLOY_CONTRACT.NAME, component: DeployContract, meta: { noAuth: false } }, { path: ROUTES_WALLET.INTERACT_WITH_CONTRACT.PATH, name: ROUTES_WALLET.INTERACT_WITH_CONTRACT.NAME, component: InteractContract, meta: { noAuth: false } }, { path: ROUTES_WALLET.SIGN_MESSAGE.PATH, name: ROUTES_WALLET.SIGN_MESSAGE.NAME, component: SignMessage, meta: { noAuth: false } }, { path: ROUTES_WALLET.VERIFY_MESSAGE.PATH, name: ROUTES_WALLET.VERIFY_MESSAGE.NAME, component: VerifyMessage,<|fim▁hole|> noAuth: false } } ] };<|fim▁end|>
meta: {
<|file_name|>Log.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*- """ Provide log related functions. You need to Initialize the logger and use the logger to make logs. Example: >>> logger = Initialize() Use logger.level(\*msg) to log like: >>> logger.error("Pickle data writing Failed.") >>> logger.info("Pickle data of ", foo, " written successfully.") The log will be stored into LogFile.log by default. """ __author__ = "Wang Hewen" import sys import logging logging.currentframe = lambda: sys._getframe(5) class Logger(logging.Logger): def debug(self, *args, **kwargs): super().log("".join([str(arg) for arg in args]), **kwargs) <|fim▁hole|> super().info("".join([str(arg) for arg in args]), **kwargs) def warning(self, *args, **kwargs): super().warning("".join([str(arg) for arg in args]), **kwargs) def warn(self, *args, **kwargs): super().warn("".join([str(arg) for arg in args]), **kwargs) def error(self, *args, **kwargs): super().error("".join([str(arg) for arg in args]), **kwargs) def exception(self, *args, exc_info=True, **kwargs): super().exception("".join([str(arg) for arg in args]), exc_info = exc_info, **kwargs) def critical(self, *args, **kwargs): super().critical("".join([str(arg) for arg in args]), **kwargs) def log(self, level, *args, **kwargs): super().log(level, "".join([str(arg) for arg in args]), **kwargs) def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False): super()._log(level, msg, args, exc_info=None, extra=None, stack_info=False) def Initialize(FileName = "LogFile.log", LogLevel = "INFO", WriteToStream = False): ''' Initialize loggers for logging. A logger will be returned. :param String FileName: Path of the log file :param String LogLevel: LogLevel of the logger, which can be "DEBUG", "INFO", "ERROR" :param Boolean WriteToStream: Whether to write to stdout :return: logger: The logger used for logging :rtype: logging.loggger ''' if LogLevel not in ["DEBUG", "INFO", "ERROR"]: raise ValueError("LogLevel is not correctly set.") logging.Logger.manager.setLoggerClass(Logger) logger = logging.getLogger(__name__) #__name__ == CommonModules.Log handlers = logger.handlers[:] for handler in handlers: handler.close() logger.removeHandler(handler) fileHandler = logging.FileHandler(FileName) fileHandler.setFormatter(logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s: %(message)s', datefmt = '%Y/%m/%d %H:%M:%S')) if LogLevel == "DEBUG": streamHandler = logging.StreamHandler(stream = sys.stdout) streamHandler.setLevel(logging.DEBUG) fileHandler.setLevel(logging.DEBUG) logger.setLevel(logging.DEBUG) if LogLevel == "INFO": streamHandler = logging.StreamHandler(stream = sys.stdout) streamHandler.setLevel(logging.INFO) fileHandler.setLevel(logging.INFO) logger.setLevel(logging.INFO) if LogLevel == "ERROR": streamHandler = logging.StreamHandler(stream = sys.stderr) streamHandler.setLevel(logging.ERROR) fileHandler.setLevel(logging.ERROR) logger.setLevel(logging.ERROR) streamHandler.setFormatter(logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s: %(message)s', datefmt = '%Y/%m/%d %H:%M:%S')) if WriteToStream: logger.addHandler(streamHandler) logger.addHandler(fileHandler) return logger<|fim▁end|>
def info(self, *args, **kwargs):
<|file_name|>Heavy Hitters.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import sys from collections import defaultdict import itertools import operator from operator import itemgetter counters = defaultdict(int) trueCounters = defaultdict(int) <|fim▁hole|>wc = 0 for line in fr: line = line.strip() words = ''.join(c for c in line if c.isalpha() or c.isspace()).split() for word in words: wc += 1 thresold = 0.01 * wc # 1st Pass fr.seek(0) for line in fr: line = line.strip() words = ''.join(c for c in line if c.isalpha() or c.isspace()).split() for word in words: if word in counters: counters[word] += 1 elif len(counters) < 99: counters[word] = 1 else: delCounters = [] for key in counters: counters[key] -= 1 if counters[key] == 0: delCounters.append(key) for word in delCounters: del counters[word] # 2nd Pass: True count, Delete by thresold fr.seek(0) for line in fr: line = line.strip() words = ''.join(c for c in line if c.isalpha() or c.isspace()).split() for word in words: if word in counters: if word in trueCounters: trueCounters[word] += 1 else: trueCounters[word] = 1 delCounters = [] for word in trueCounters: if trueCounters[word] < thresold: delCounters.append(word) for word in delCounters: del trueCounters[word] for key, value in sorted(trueCounters.iteritems(), key=operator.itemgetter(1), reverse=True): print key, value<|fim▁end|>
fr = open('allworks','r')
<|file_name|>jquery.fileupload-ui.js<|end_file_name|><|fim▁begin|>/* * jQuery File Upload User Interface Plugin * https://github.com/blueimp/jQuery-File-Upload * * Copyright 2010, Sebastian Tschan * https://blueimp.net * * Licensed under the MIT license: * http://www.opensource.org/licenses/MIT */ /* jshint nomen:false */ /* global define, require, window */ ;(function (factory) {<|fim▁hole|> 'jquery', 'tmpl', './jquery.fileupload-image', './jquery.fileupload-audio', './jquery.fileupload-video', './jquery.fileupload-validate' ], factory); } else if (typeof exports === 'object') { // Node/CommonJS: factory( require('jquery'), require('tmpl') ); } else { // Browser globals: factory( window.jQuery, window.tmpl ); } }(function ($, tmpl) { 'use strict'; jQuery.blueimp.fileupload.prototype._specialOptions.push( 'filesContainer', 'uploadTemplateId', 'downloadTemplateId' ); // The UI version extends the file upload widget // and adds complete user interface interaction: jQuery.widget('blueimp.fileupload', jQuery.blueimp.fileupload, { options: { // By default, files added to the widget are uploaded as soon // as the user clicks on the start buttons. To enable automatic // uploads, set the following option to true: autoUpload: false, // The ID of the upload template: uploadTemplateId: 'template-upload', // The ID of the download template: downloadTemplateId: 'template-download', // The container for the list of files. If undefined, it is set to // an element with class "files" inside of the widget element: filesContainer: undefined, // By default, files are appended to the files container. // Set the following option to true, to prepend files instead: prependFiles: false, // The expected data type of the upload response, sets the dataType // option of the jQuery.ajax upload requests: dataType: 'json', // Error and info messages: messages: { unknownError: 'Unknown error' }, // Function returning the current number of files, // used by the maxNumberOfFiles validation: getNumberOfFiles: function () { return this.filesContainer.children() .not('.processing').length; }, // Callback to retrieve the list of files from the server response: getFilesFromResponse: function (data) { if (data.result && jQuery.isArray(data.result.files)) { return data.result.files; } return []; }, // The add callback is invoked as soon as files are added to the fileupload // widget (via file input selection, drag & drop or add API call). // See the basic file upload widget for more information: add: function (e, data) { if (e.isDefaultPrevented()) { return false; } var $this = jQuery(this), that = $this.data('blueimp-fileupload') || $this.data('fileupload'), options = that.options; data.context = that._renderUpload(data.files) .data('data', data) .addClass('processing'); options.filesContainer[ options.prependFiles ? 'prepend' : 'append' ](data.context); that._forceReflow(data.context); that._transition(data.context); data.process(function () { return $this.fileupload('process', data); }).always(function () { data.context.each(function (index) { jQuery(this).find('.size').text( that._formatFileSize(data.files[index].size) ); }).removeClass('processing'); that._renderPreviews(data); }).done(function () { data.context.find('.start').prop('disabled', false); if ((that._trigger('added', e, data) !== false) && (options.autoUpload || data.autoUpload) && data.autoUpload !== false) { data.submit(); } }).fail(function () { if (data.files.error) { data.context.each(function (index) { var error = data.files[index].error; if (error) { jQuery(this).find('.error').text(error); } }); } }); }, // Callback for the start of each file upload request: send: function (e, data) { if (e.isDefaultPrevented()) { return false; } var that = jQuery(this).data('blueimp-fileupload') || jQuery(this).data('fileupload'); if (data.context && data.dataType && data.dataType.substr(0, 6) === 'iframe') { // Iframe Transport does not support progress events. // In lack of an indeterminate progress bar, we set // the progress to 100%, showing the full animated bar: data.context .find('.progress').addClass( !jQuery.support.transition && 'progress-animated' ) .attr('aria-valuenow', 100) .children().first().css( 'width', '100%' ); } return that._trigger('sent', e, data); }, // Callback for successful uploads: done: function (e, data) { if (e.isDefaultPrevented()) { return false; } var that = jQuery(this).data('blueimp-fileupload') || jQuery(this).data('fileupload'), getFilesFromResponse = data.getFilesFromResponse || that.options.getFilesFromResponse, files = getFilesFromResponse(data), template, deferred; if (data.context) { data.context.each(function (index) { var file = files[index] || {error: 'Empty file upload result'}; deferred = that._addFinishedDeferreds(); that._transition(jQuery(this)).done( function () { var node = jQuery(this); template = that._renderDownload([file]) .replaceAll(node); that._forceReflow(template); that._transition(template).done( function () { data.context = jQuery(this); that._trigger('completed', e, data); that._trigger('finished', e, data); deferred.resolve(); } ); } ); }); } else { template = that._renderDownload(files)[ that.options.prependFiles ? 'prependTo' : 'appendTo' ](that.options.filesContainer); that._forceReflow(template); deferred = that._addFinishedDeferreds(); that._transition(template).done( function () { data.context = jQuery(this); that._trigger('completed', e, data); that._trigger('finished', e, data); deferred.resolve(); } ); } }, // Callback for failed (abort or error) uploads: fail: function (e, data) { if (e.isDefaultPrevented()) { return false; } var that = jQuery(this).data('blueimp-fileupload') || jQuery(this).data('fileupload'), template, deferred; if (data.context) { data.context.each(function (index) { if (data.errorThrown !== 'abort') { var file = data.files[index]; file.error = file.error || data.errorThrown || data.i18n('unknownError'); deferred = that._addFinishedDeferreds(); that._transition(jQuery(this)).done( function () { var node = jQuery(this); template = that._renderDownload([file]) .replaceAll(node); that._forceReflow(template); that._transition(template).done( function () { data.context = jQuery(this); that._trigger('failed', e, data); that._trigger('finished', e, data); deferred.resolve(); } ); } ); } else { deferred = that._addFinishedDeferreds(); that._transition(jQuery(this)).done( function () { jQuery(this).remove(); that._trigger('failed', e, data); that._trigger('finished', e, data); deferred.resolve(); } ); } }); } else if (data.errorThrown !== 'abort') { data.context = that._renderUpload(data.files)[ that.options.prependFiles ? 'prependTo' : 'appendTo' ](that.options.filesContainer) .data('data', data); that._forceReflow(data.context); deferred = that._addFinishedDeferreds(); that._transition(data.context).done( function () { data.context = jQuery(this); that._trigger('failed', e, data); that._trigger('finished', e, data); deferred.resolve(); } ); } else { that._trigger('failed', e, data); that._trigger('finished', e, data); that._addFinishedDeferreds().resolve(); } }, // Callback for upload progress events: progress: function (e, data) { if (e.isDefaultPrevented()) { return false; } var progress = Math.floor(data.loaded / data.total * 100); if (data.context) { data.context.each(function () { jQuery(this).find('.progress') .attr('aria-valuenow', progress) .children().first().css( 'width', progress + '%' ); }); } }, // Callback for global upload progress events: progressall: function (e, data) { if (e.isDefaultPrevented()) { return false; } var $this = jQuery(this), progress = Math.floor(data.loaded / data.total * 100), globalProgressNode = $this.find('.fileupload-progress'), extendedProgressNode = globalProgressNode .find('.progress-extended'); if (extendedProgressNode.length) { extendedProgressNode.html( ($this.data('blueimp-fileupload') || $this.data('fileupload')) ._renderExtendedProgress(data) ); } globalProgressNode .find('.progress') .attr('aria-valuenow', progress) .children().first().css( 'width', progress + '%' ); }, // Callback for uploads start, equivalent to the global ajaxStart event: start: function (e) { if (e.isDefaultPrevented()) { return false; } var that = jQuery(this).data('blueimp-fileupload') || jQuery(this).data('fileupload'); that._resetFinishedDeferreds(); that._transition(jQuery(this).find('.fileupload-progress')).done( function () { that._trigger('started', e); } ); }, // Callback for uploads stop, equivalent to the global ajaxStop event: stop: function (e) { if (e.isDefaultPrevented()) { return false; } var that = jQuery(this).data('blueimp-fileupload') || jQuery(this).data('fileupload'), deferred = that._addFinishedDeferreds(); jQuery.when.apply($, that._getFinishedDeferreds()) .done(function () { that._trigger('stopped', e); }); that._transition(jQuery(this).find('.fileupload-progress')).done( function () { jQuery(this).find('.progress') .attr('aria-valuenow', '0') .children().first().css('width', '0%'); jQuery(this).find('.progress-extended').html('&nbsp;'); deferred.resolve(); } ); }, processstart: function (e) { if (e.isDefaultPrevented()) { return false; } jQuery(this).addClass('fileupload-processing'); }, processstop: function (e) { if (e.isDefaultPrevented()) { return false; } jQuery(this).removeClass('fileupload-processing'); }, // Callback for file deletion: destroy: function (e, data) { if (e.isDefaultPrevented()) { return false; } var that = jQuery(this).data('blueimp-fileupload') || jQuery(this).data('fileupload'), removeNode = function () { that._transition(data.context).done( function () { jQuery(this).remove(); that._trigger('destroyed', e, data); } ); }; if (data.url) { data.dataType = data.dataType || that.options.dataType; jQuery.ajax(data).done(removeNode).fail(function () { that._trigger('destroyfailed', e, data); }); } else { removeNode(); } } }, _resetFinishedDeferreds: function () { this._finishedUploads = []; }, _addFinishedDeferreds: function (deferred) { if (!deferred) { deferred = jQuery.Deferred(); } this._finishedUploads.push(deferred); return deferred; }, _getFinishedDeferreds: function () { return this._finishedUploads; }, // Link handler, that allows to download files // by drag & drop of the links to the desktop: _enableDragToDesktop: function () { var link = jQuery(this), url = link.prop('href'), name = link.prop('download'), type = 'application/octet-stream'; link.bind('dragstart', function (e) { try { e.originalEvent.dataTransfer.setData( 'DownloadURL', [type, name, url].join(':') ); } catch (ignore) {} }); }, _formatFileSize: function (bytes) { if (typeof bytes !== 'number') { return ''; } if (bytes >= 1000000000) { return (bytes / 1000000000).toFixed(2) + ' GB'; } if (bytes >= 1000000) { return (bytes / 1000000).toFixed(2) + ' MB'; } return (bytes / 1000).toFixed(2) + ' KB'; }, _formatBitrate: function (bits) { if (typeof bits !== 'number') { return ''; } if (bits >= 1000000000) { return (bits / 1000000000).toFixed(2) + ' Gbit/s'; } if (bits >= 1000000) { return (bits / 1000000).toFixed(2) + ' Mbit/s'; } if (bits >= 1000) { return (bits / 1000).toFixed(2) + ' kbit/s'; } return bits.toFixed(2) + ' bit/s'; }, _formatTime: function (seconds) { var date = new Date(seconds * 1000), days = Math.floor(seconds / 86400); days = days ? days + 'd ' : ''; return days + ('0' + date.getUTCHours()).slice(-2) + ':' + ('0' + date.getUTCMinutes()).slice(-2) + ':' + ('0' + date.getUTCSeconds()).slice(-2); }, _formatPercentage: function (floatValue) { return (floatValue * 100).toFixed(2) + ' %'; }, _renderExtendedProgress: function (data) { return this._formatBitrate(data.bitrate) + ' | ' + this._formatTime( (data.total - data.loaded) * 8 / data.bitrate ) + ' | ' + this._formatPercentage( data.loaded / data.total ) + ' | ' + this._formatFileSize(data.loaded) + ' / ' + this._formatFileSize(data.total); }, _renderTemplate: function (func, files) { if (!func) { return jQuery(); } var result = func({ files: files, formatFileSize: this._formatFileSize, options: this.options }); if (result instanceof $) { return result; } return jQuery(this.options.templatesContainer).html(result).children(); }, _renderPreviews: function (data) { data.context.find('.preview').each(function (index, elm) { jQuery(elm).append(data.files[index].preview); }); }, _renderUpload: function (files) { return this._renderTemplate( this.options.uploadTemplate, files ); }, _renderDownload: function (files) { return this._renderTemplate( this.options.downloadTemplate, files ).find('a[download]').each(this._enableDragToDesktop).end(); }, _startHandler: function (e) { e.preventDefault(); var button = jQuery(e.currentTarget), template = button.closest('.template-upload'), data = template.data('data'); button.prop('disabled', true); if (data && data.submit) { data.submit(); } }, _cancelHandler: function (e) { e.preventDefault(); var template = jQuery(e.currentTarget) .closest('.template-upload,.template-download'), data = template.data('data') || {}; data.context = data.context || template; if (data.abort) { data.abort(); } else { data.errorThrown = 'abort'; this._trigger('fail', e, data); } }, _deleteHandler: function (e) { e.preventDefault(); var button = jQuery(e.currentTarget); this._trigger('destroy', e, jQuery.extend({ context: button.closest('.template-download'), type: 'DELETE' }, button.data())); }, _forceReflow: function (node) { return jQuery.support.transition && node.length && node[0].offsetWidth; }, _transition: function (node) { var dfd = jQuery.Deferred(); if (jQuery.support.transition && node.hasClass('fade') && node.is(':visible')) { node.bind( jQuery.support.transition.end, function (e) { // Make sure we don't respond to other transitions events // in the container element, e.g. from button elements: if (e.target === node[0]) { node.unbind(jQuery.support.transition.end); dfd.resolveWith(node); } } ).toggleClass('in'); } else { node.toggleClass('in'); dfd.resolveWith(node); } return dfd; }, _initButtonBarEventHandlers: function () { var fileUploadButtonBar = this.element.find('.fileupload-buttonbar'), filesList = this.options.filesContainer; this._on(fileUploadButtonBar.find('.start'), { click: function (e) { e.preventDefault(); filesList.find('.start').click(); } }); this._on(fileUploadButtonBar.find('.cancel'), { click: function (e) { e.preventDefault(); filesList.find('.cancel').click(); } }); this._on(fileUploadButtonBar.find('.delete'), { click: function (e) { e.preventDefault(); filesList.find('.toggle:checked') .closest('.template-download') .find('.delete').click(); fileUploadButtonBar.find('.toggle') .prop('checked', false); } }); this._on(fileUploadButtonBar.find('.toggle'), { change: function (e) { filesList.find('.toggle').prop( 'checked', jQuery(e.currentTarget).is(':checked') ); } }); }, _destroyButtonBarEventHandlers: function () { this._off( this.element.find('.fileupload-buttonbar') .find('.start, .cancel, .delete'), 'click' ); this._off( this.element.find('.fileupload-buttonbar .toggle'), 'change.' ); }, _initEventHandlers: function () { this._super(); this._on(this.options.filesContainer, { 'click .start': this._startHandler, 'click .cancel': this._cancelHandler, 'click .delete': this._deleteHandler }); this._initButtonBarEventHandlers(); }, _destroyEventHandlers: function () { this._destroyButtonBarEventHandlers(); this._off(this.options.filesContainer, 'click'); this._super(); }, _enableFileInputButton: function () { this.element.find('.fileinput-button input') .prop('disabled', false) .parent().removeClass('disabled'); }, _disableFileInputButton: function () { this.element.find('.fileinput-button input') .prop('disabled', true) .parent().addClass('disabled'); }, _initTemplates: function () { var options = this.options; options.templatesContainer = this.document[0].createElement( options.filesContainer.prop('nodeName') ); if (tmpl) { if (options.uploadTemplateId) { options.uploadTemplate = tmpl(options.uploadTemplateId); } if (options.downloadTemplateId) { options.downloadTemplate = tmpl(options.downloadTemplateId); } } }, _initFilesContainer: function () { var options = this.options; if (options.filesContainer === undefined) { options.filesContainer = this.element.find('.files'); } else if (!(options.filesContainer instanceof $)) { options.filesContainer = jQuery(options.filesContainer); } }, _initSpecialOptions: function () { this._super(); this._initFilesContainer(); this._initTemplates(); }, _create: function () { this._super(); this._resetFinishedDeferreds(); if (!jQuery.support.fileInput) { this._disableFileInputButton(); } }, enable: function () { var wasDisabled = false; if (this.options.disabled) { wasDisabled = true; } this._super(); if (wasDisabled) { this.element.find('input, button').prop('disabled', false); this._enableFileInputButton(); } }, disable: function () { if (!this.options.disabled) { this.element.find('input, button').prop('disabled', true); this._disableFileInputButton(); } this._super(); } }); }));<|fim▁end|>
'use strict'; if (typeof define === 'function' && define.amd) { // Register as an anonymous AMD module: define([
<|file_name|>ColorDefinitionDlg.py<|end_file_name|><|fim▁begin|># # Copyright 2001 - 2016 Ludek Smid [http://www.ospace.net/] # # This file is part of Outer Space. # # Outer Space is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Outer Space is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Outer Space; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # import pygameui as ui from osci import client, gdata, res from ige import log class ColorDefinitionDlg: def __init__(self, app): self.app = app self.createUI() def display(self, color = None, confirmAction = None): self.confirmAction = confirmAction if color == None: self.color = (0xff,0xff,0xff) else: self.color = color self.show() def show(self): self.win.vR.text = hex(self.color[0]) self.win.vG.text = hex(self.color[1]) self.win.vB.text = hex(self.color[2]) self.win.vRS.slider.min = 0 self.win.vRS.slider.max = 265 self.win.vRS.slider.position = self.color[0] self.win.vGS.slider.min = 0 self.win.vGS.slider.max = 265 self.win.vGS.slider.position = self.color[1] self.win.vBS.slider.min = 0 self.win.vBS.slider.max = 265 self.win.vBS.slider.position = self.color[2] log.debug("ColorDefinitionDlg(%s,%s,%s)" % (self.win.vR.text,self.win.vG.text,self.win.vB.text)) self.win.show() # colorbox self.win.vColor.color = self.color # register for updates if self not in gdata.updateDlgs: gdata.updateDlgs.append(self) def hide(self): self.win.setStatus(_("Ready.")) self.win.hide() # unregister updates if self in gdata.updateDlgs: gdata.updateDlgs.remove(self) def update(self): self.show() def onChangeRed(self, widget, action, data): self.color = (int(self.win.vRS.slider.position), self.color[1], self.color[2]) self.win.vR.text = hex(self.color[0]) self.win.vColor.color = (int(self.win.vRS.slider.position), self.color[1], self.color[2]) def onChangeGreen(self, widget, action, data): self.color = (self.color[0], int(self.win.vGS.slider.position), self.color[2]) self.win.vG.text = hex(self.color[1]) self.win.vColor.color = (self.color[0], int(self.win.vGS.slider.position), self.color[2]) def onChangeBlue(self, widget, action, data): self.color = ( self.color[0], self.color[1], int(self.win.vBS.slider.position)) self.win.vB.text = hex(self.color[2]) self.win.vColor.color = ( self.color[0], self.color[1], int(self.win.vBS.slider.position)) def onOK(self, widget, action, data): try: r = int(self.win.vR.text,16) g = int(self.win.vG.text,16) b = int(self.win.vB.text,16) if not r in range(0,256): self.app.setFocus(self.win.vR) raise ValueError elif not g in range(0,256): self.app.setFocus(self.win.vG) raise ValueError elif not b in range(0,256): self.app.setFocus(self.win.vB) raise ValueError except ValueError: self.win.setStatus(_("Values must be hexa numbers between 0x00 - 0xff")) return self.hide() self.color = (r, g, b) if self.confirmAction: self.confirmAction() def onCancel(self, widget, action, data): self.color = None self.hide() def createUI(self): w, h = gdata.scrnSize cols = 14 rows = 8 width = cols * 20 + 5 height = rows * 20 + 4 self.win = ui.Window(self.app, modal = 1, escKeyClose = 1, movable = 0, title = _('Color Definition'), rect = ui.Rect((w - width) / 2, (h - height) / 2, width, height), layoutManager = ui.SimpleGridLM(), tabChange = True,<|fim▁hole|> # creating dialog window self.win.subscribeAction('*', self) # R ui.Label(self.win,text = _("Red:"), align = ui.ALIGN_W, layout = (0, 0, 3, 1)) ui.Entry(self.win, id = 'vR',align = ui.ALIGN_W,layout = (7, 0, 3, 1), orderNo = 1, reportValueChanged = True,) ui.Scrollbar(self.win,layout = ( 0,1,10,1), id='vRS',action = "onChangeRed") # G ui.Label(self.win,text = _("Green:"),align = ui.ALIGN_W,layout = (0, 2, 3, 1)) ui.Entry(self.win, id = 'vG',align = ui.ALIGN_W,layout = (7, 2, 3, 1), orderNo = 2, reportValueChanged = True,) ui.Scrollbar(self.win,layout = (0,3,10,1), id='vGS',action = "onChangeGreen") # B ui.Label(self.win,text = _("Blue:"),align = ui.ALIGN_W,layout = (0, 4, 3, 1)) ui.Entry(self.win, id = 'vB',align = ui.ALIGN_W,layout = (7, 4, 3, 1), orderNo = 3, reportValueChanged = True,) ui.Scrollbar(self.win,layout = (0,5,10,1), id='vBS',action = "onChangeBlue") # color example ui.ColorBox(self.win, id = 'vColor', layout = (10, 0, 4, 6), margins = (4, 3, 4, 4)) #i.Title(self.win, layout = (0, 4, 2, 1)) ui.TitleButton(self.win, layout = (0, 6, 7, 1), text = _("Cancel"), action = "onCancel") okBtn = ui.TitleButton(self.win, layout = (7, 6, 7, 1), text = _("OK"), action = 'onOK') self.win.acceptButton = okBtn def onValueChanged(self, widget, action, data): try: r = int(self.win.vR.text,16) g = int(self.win.vG.text,16) b = int(self.win.vB.text,16) except: return if not r in range(0,256) or not g in range(0,256) or not b in range(0,256): return self.win.vColor.color = (r, g, b) self.win.vRS.slider.position = r self.win.vGS.slider.position = g self.win.vBS.slider.position = b<|fim▁end|>
)
<|file_name|>test_hdf5_filters.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013-2021, Freja Nordsiek # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os.path import random import tempfile import h5py import pytest import hdf5storage from asserts import assert_equal from make_randoms import random_numpy, random_numpy_shape, \ max_array_axis_length, dtypes, random_name random.seed() @pytest.mark.parametrize( 'compression,shuffle,fletcher32,gzip_level', [(compression, shuffle, fletcher32, level) for compression in ('gzip', 'lzf') for shuffle in (True, False) for fletcher32 in (True, False) for level in range(10)]) def test_read_filtered_data(compression, shuffle, fletcher32, gzip_level): # Make the filters dict. filts = {'compression': compression, 'shuffle': shuffle, 'fletcher32': fletcher32} if compression == 'gzip': filts['compression_opts'] = gzip_level # Make some random data. dims = random.randint(1, 4) data = random_numpy(shape=random_numpy_shape(dims, max_array_axis_length), dtype=random.choice(tuple( set(dtypes) - set(['U'])))) # Make a random name. name = random_name() # Write the data to the file with the given name with the provided # filters and read it back. with tempfile.TemporaryDirectory() as folder: filename = os.path.join(folder, 'data.h5') with h5py.File(filename, mode='w') as f: f.create_dataset(name, data=data, chunks=True, **filts) out = hdf5storage.read(path=name, filename=filename, matlab_compatible=False) # Compare assert_equal(out, data) @pytest.mark.parametrize( 'compression,shuffle,fletcher32,gzip_level', [(compression, shuffle, fletcher32, level) for compression in ('gzip', 'lzf') for shuffle in (True, False) for fletcher32 in (True, False) for level in range(10)]) def test_write_filtered_data(compression, shuffle, fletcher32, gzip_level): # Make some random data. The dtype must be restricted so that it can # be read back reliably. dims = random.randint(1, 4) dts = tuple(set(dtypes) - set(['U', 'S', 'bool', 'complex64', \ 'complex128'])) data = random_numpy(shape=random_numpy_shape(dims, max_array_axis_length), dtype=random.choice(dts)) # Make a random name. name = random_name() # Write the data to the file with the given name with the provided # filters and read it back. with tempfile.TemporaryDirectory() as folder: filename = os.path.join(folder, 'data.h5') hdf5storage.write(data, path=name, filename=filename, store_python_metadata=False, matlab_compatible=False, compress=True, compress_size_threshold=0, compression_algorithm=compression, gzip_compression_level=gzip_level, shuffle_filter=shuffle, compressed_fletcher32_filter=fletcher32) with h5py.File(filename, mode='r') as f:<|fim▁hole|> d = f[name] filts = {'fletcher32': d.fletcher32, 'shuffle': d.shuffle, 'compression': d.compression, 'gzip_level': d.compression_opts} out = d[...] # Check the filters assert fletcher32 == filts['fletcher32'] assert shuffle == filts['shuffle'] assert compression == filts['compression'] if compression == 'gzip': assert gzip_level == filts['gzip_level'] # Compare assert_equal(out, data) @pytest.mark.parametrize( 'method,uncompressed_fletcher32_filter,compression,shuffle,' 'fletcher32,gzip_level', [(method, uf, compression, shuffle, fletcher32, level) for method in ('compression_disabled', 'data_too_small') for uf in (True, False) for compression in ('gzip', 'lzf') for shuffle in (True, False) for fletcher32 in (True, False) for level in range(10)]) def test_uncompressed_write_filtered_data( method, uncompressed_fletcher32_filter, compression, shuffle, fletcher32, gzip_level): # Make the filters dict. filts = {'compression': compression, 'shuffle': shuffle, 'fletcher32': fletcher32, 'gzip_level': gzip_level} # Make some random data. The dtype must be restricted so that it can # be read back reliably. dims = random.randint(1, 4) dts = tuple(set(dtypes) - set(['U', 'S', 'bool', 'complex64', \ 'complex128'])) data = random_numpy(shape=random_numpy_shape(dims, max_array_axis_length), dtype=random.choice(dts)) # Make a random name. name = random_name() # Make the options to disable compression by the method specified, # which is either that it is outright disabled or that the data is # smaller than the compression threshold. if method == 'compression_disabled': opts = {'compress': False, 'compress_size_threshold': 0} else: opts = {'compress': True, 'compress_size_threshold': data.nbytes + 1} # Write the data to the file with the given name with the provided # filters and read it back. with tempfile.TemporaryDirectory() as folder: filename = os.path.join(folder, 'data.h5') hdf5storage.write(data, path=name, filename=filename, \ store_python_metadata=False, matlab_compatible=False, \ compression_algorithm=filts['compression'], \ gzip_compression_level=filts['gzip_level'], \ shuffle_filter=filts['shuffle'], \ compressed_fletcher32_filter=filts['fletcher32'], \ uncompressed_fletcher32_filter= \ uncompressed_fletcher32_filter, \ **opts) with h5py.File(filename, mode='r') as f: d = f[name] fletcher32 = d.fletcher32 shuffle = d.shuffle compression = d.compression gzip_level = d.compression_opts out = d[...] # Check the filters assert compression is None assert shuffle is False assert fletcher32 == uncompressed_fletcher32_filter # Compare assert_equal(out, data)<|fim▁end|>
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "juisapp.settings") from django.core.management import execute_from_command_line<|fim▁hole|> execute_from_command_line(sys.argv)<|fim▁end|>
<|file_name|>newtype_index.rs<|end_file_name|><|fim▁begin|>#![feature(rustc_attrs, rustc_private, step_trait, min_const_unsafe_fn)] #[macro_use] extern crate rustc_data_structures; extern crate rustc_serialize; use rustc_data_structures::indexed_vec::Idx; <|fim▁hole|> use std::mem::size_of; fn main() { assert_eq!(size_of::<MyIdx>(), 4); assert_eq!(size_of::<Option<MyIdx>>(), 4); assert_eq!(size_of::<Option<Option<MyIdx>>>(), 4); assert_eq!(size_of::<Option<Option<Option<MyIdx>>>>(), 4); assert_eq!(size_of::<Option<Option<Option<Option<MyIdx>>>>>(), 4); assert_eq!(size_of::<Option<Option<Option<Option<Option<MyIdx>>>>>>(), 4); assert_eq!(size_of::<Option<Option<Option<Option<Option<Option<MyIdx>>>>>>>(), 8); }<|fim▁end|>
newtype_index!(struct MyIdx { MAX = 0xFFFF_FFFA });
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># requests-oauth 0.4.0<|fim▁hole|># Original author: Miguel Araujo # Forked from https://github.com/maraujop/requests_oauth # Original license: 3-clause BSD from hook import OAuthHook<|fim▁end|>
# Hacked to support RSA-SHA1 encryption for Atlassian OAuth.
<|file_name|>TimedRebeca2ROSExpressionTransformer.java<|end_file_name|><|fim▁begin|>package org.rebecalang.modeltransformer.ros.timedrebeca; import java.util.HashMap; import java.util.Map; import org.rebecalang.compiler.modelcompiler.corerebeca.CoreRebecaTypeSystem; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.BinaryExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.CastExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.DotPrimary; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.Expression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.FieldDeclaration; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.Literal; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.MsgsrvDeclaration; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.NonDetExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.PlusSubExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.PrimaryExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.ReactiveClassDeclaration; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.RebecInstantiationPrimary; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.RebecaModel; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.TermPrimary; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.TernaryExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.UnaryExpression; import org.rebecalang.compiler.modelcompiler.corerebeca.objectmodel.VariableDeclarator; import org.rebecalang.compiler.modelcompiler.timedrebeca.TimedRebecaTypeSystem; import org.rebecalang.compiler.utils.CodeCompilationException; import org.rebecalang.compiler.utils.ExceptionContainer; import org.rebecalang.compiler.utils.Pair; import org.rebecalang.modeltransformer.StatementTransformingException; import org.rebecalang.modeltransformer.ros.Utilities; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class TimedRebeca2ROSExpressionTransformer { public final static String NEW_LINE = "\r\n"; public final static String TAB = "\t"; static Integer i = 0; private String modelName; private ReactiveClassDeclaration rc; private RebecaModel rebecaModel; private Map <Pair<String, String>, String> methodCalls = new HashMap<Pair<String, String>, String>(); @Autowired TimedRebecaTypeSystem timedRebecaTypeSystem; @Autowired ExceptionContainer exceptionContainer; public void prepare(String modelName, ReactiveClassDeclaration rc, RebecaModel rebecaModel) { this.modelName = modelName; this.rebecaModel = rebecaModel; this.rc = rc; } public String translate(Expression expression) { String retValue = ""; if (expression instanceof TernaryExpression) { TernaryExpression tExpression = (TernaryExpression)expression; Expression condition = tExpression.getCondition(); retValue = "(" + (translate(condition)) + ")"; retValue += " ? " + "(" + translate(tExpression.getLeft()) + ")"; retValue += " : " + "(" + translate(tExpression.getRight()) + ")"; } else if (expression instanceof BinaryExpression) { BinaryExpression bExpression = (BinaryExpression) expression; String op = bExpression.getOperator(); retValue = translate(bExpression.getLeft()) + " " + op + " " + translate(bExpression.getRight()); } else if (expression instanceof UnaryExpression) { UnaryExpression uExpression = (UnaryExpression) expression; retValue = uExpression.getOperator() + " " + translate(uExpression.getExpression()); } else if (expression instanceof CastExpression) { exceptionContainer.addException(new StatementTransformingException("This version of transformer does not supprt " + "\"cast\" expression.", expression.getLineNumber(), expression.getCharacter())); } else if (expression instanceof NonDetExpression) { NonDetExpression nonDetExpression = (NonDetExpression)expression; int numberOfChoices = nonDetExpression.getChoices().size(); retValue += nonDetExpression.getType().getTypeName(); retValue += "int numberOfChoices = " + Integer.toString(numberOfChoices) + ";" + NEW_LINE; retValue += "int choice = " + "rand() % " + Integer.toString(numberOfChoices) + ";" + NEW_LINE; int index = numberOfChoices; for (Expression nonDetChoice : ((NonDetExpression)expression).getChoices()) { retValue += "if (" + "choice ==" + Integer.toString(numberOfChoices - index) + ")" + NEW_LINE; retValue += ((NonDetExpression)nonDetChoice); index ++; } } else if (expression instanceof Literal) { Literal lExpression = (Literal) expression; retValue = lExpression.getLiteralValue(); if (retValue.equals("null")) retValue = "\"dummy\""; } else if (expression instanceof PlusSubExpression) { retValue = translate(((PlusSubExpression)expression).getValue()) + ((PlusSubExpression)expression).getOperator(); } else if (expression instanceof PrimaryExpression) { PrimaryExpression pExpression = (PrimaryExpression) expression; retValue = translatePrimaryExpression(pExpression); } else { exceptionContainer.addException( new StatementTransformingException("Unknown translation rule for expression type " + expression.getClass(), expression.getLineNumber(), expression.getCharacter())); } return retValue; } protected String translatePrimaryExpression(PrimaryExpression pExpression) { String retValue = ""; if (pExpression instanceof DotPrimary) { DotPrimary dotPrimary = (DotPrimary) pExpression; retValue = translateDotPrimary(dotPrimary); } else if (pExpression instanceof TermPrimary) { retValue = translatePrimaryTermExpression((TermPrimary) pExpression); } else if (pExpression instanceof RebecInstantiationPrimary) { RebecInstantiationPrimary rip = (RebecInstantiationPrimary) pExpression; boolean hasMoreVariable = false; String args = ""; try { ReactiveClassDeclaration rcd = (ReactiveClassDeclaration) timedRebecaTypeSystem.getMetaData(rip.getType()); if (!rcd.getStatevars().isEmpty()) { args += " , "; for (FieldDeclaration fd : rcd.getStatevars()) { for (VariableDeclarator vd : fd.getVariableDeclarators()) { hasMoreVariable = true; String typeInit = fd.getType() == CoreRebecaTypeSystem.BOOLEAN_TYPE ? "false" : fd.getType().canTypeCastTo(CoreRebecaTypeSystem.INT_TYPE) ? "0" : "\"dummy\""; args += "(" + rcd.getName() + "-" + vd.getVariableName() + " |-> " + typeInit + ") " ; } } } if (!hasMoreVariable) args += "emptyValuation"; } catch (CodeCompilationException e) { e.printStackTrace(); } args += ","; hasMoreVariable = false; String typeName = rip.getType().getTypeName(); for (Expression expression : rip.getBindings()) { args += " arg(" + translate(expression) + ")"; hasMoreVariable = true; } for (Expression expression : rip.getArguments()) { args += " arg(" + translate(expression) + ")"; hasMoreVariable = true; } if (!hasMoreVariable) args += "noArg"; retValue = " new (" + typeName + args + ")"; } else { exceptionContainer.addException(new StatementTransformingException("Unknown translation rule for initializer type " + pExpression.getClass(), pExpression.getLineNumber(), pExpression.getCharacter())); } return retValue; } private String translateDotPrimary(DotPrimary dotPrimary) { String retValue = ""; if (!(dotPrimary.getLeft() instanceof TermPrimary) || !(dotPrimary.getRight() instanceof TermPrimary)) { exceptionContainer.addException(new StatementTransformingException("This version of transformer does not supprt " + "nested record access expression.", dotPrimary.getLineNumber(), dotPrimary.getCharacter())); } else { // TODO: Modified by Ehsan as the return vlaue type of message servers is always set to MSGSRV_TYPE // if(TypesUtilities.getInstance().getSuperType(dotPrimary.getRight().getType()) == TypesUtilities.MSGSRV_TYPE) { if(dotPrimary.getRight().getType() == CoreRebecaTypeSystem.MSGSRV_TYPE) { retValue = mapToROSPublishing(dotPrimary); } } return retValue; } private String mapToROSPublishing(DotPrimary dotPrimary) { String retValue = ""; /* map to ROS Publishing */ retValue = modelName + "::" + ((TermPrimary)dotPrimary.getRight()).getName() + " " + "pubMsg" + i.toString() + ";" + NEW_LINE; /* fill the ROS message fields with the arguments to be published */ int argumentIndex = 0; for (Expression expression : ((TermPrimary)dotPrimary.getRight()).getParentSuffixPrimary().getArguments()) { ReactiveClassDeclaration toClass = null; TermPrimary toRebec = (TermPrimary)dotPrimary.getLeft(); toClass = Utilities.findKnownReactiveClass(rc, toRebec.getName(), rebecaModel); String toMsgsrvName = ((TermPrimary)dotPrimary.getRight()).getName(); MsgsrvDeclaration toMsgsrv = Utilities.findTheMsgsrv(toClass, toMsgsrvName); String argumentName = toMsgsrv.getFormalParameters().get(argumentIndex).getName(); retValue += "pubMsg" + i.toString() + "." + argumentName + " = " + translate(expression) + ";" + NEW_LINE; argumentIndex ++; } retValue += "pubMsg" + i.toString() + "." + "sender" + "=" + "sender" + ";" + NEW_LINE; retValue += ((TermPrimary) dotPrimary.getLeft()).getName() + "_" + ((TermPrimary)dotPrimary.getRight()).getName() + "_pub" + "." + "publish(" + "pubMsg" + i.toString() + ")" + ";" + NEW_LINE; i ++; /* to prevent from repeated names */ /* end of publishing */ /* storing the name of callee rebec and the name of called msgsrv in order to declare publishers */ Pair<String, String> methodCall = new Pair<String, String>( ((TermPrimary)dotPrimary.getLeft()).getName(), ((TermPrimary)dotPrimary.getRight()).getName() ); methodCalls.put(methodCall, ""); //ReactiveClassDeclaration rcd = (ReactiveClassDeclaration) TransformingContext.getInstance().lookupInContext("current-reactive-class"); //retValue = ((TermPrimary) dotPrimary.getLeft()).getName(); //String typeName = TypesUtilities.getTypeName(((TermPrimary) dotPrimary.getLeft()).getType()); //System.out.println(typeName); return retValue; } private String translatePrimaryTermExpression(TermPrimary pExpression) { String retValue = ""; if(pExpression.getName().equals("assertion") || pExpression.getName().equals("after") || pExpression.getName().equals("deadline")){ return retValue; } if(pExpression.getName().equals("delay")) retValue += "sleep"; else if(pExpression.getName().equals("sender")) return "thisMsg.sender"; else retValue += pExpression.getName(); if( pExpression.getParentSuffixPrimary() != null) {<|fim▁hole|> retValue += translate(argument) + ","; } if(! pExpression.getParentSuffixPrimary().getArguments().isEmpty()) { retValue = retValue.substring(0, retValue.length() - 1); } retValue += ")"; } //To support movement in ROS if (retValue.compareTo("Move(1,0)")==0) { //ROSCode to publish on CM_Vel topic } else if (retValue.compareTo("Move(0,1)")==0) { //ROSCode to publish on CM_Vel topic } else if (retValue.compareTo("Move(-1,0)")==0) { //ROSCode to publish on CM_Vel topic } else if (retValue.compareTo("Move(0,-1)")==0) { //ROSCode to publish on CM_Vel topic } else if(retValue.compareTo("Move(1,1)")==0) { //ROSCode to publish on CM_Vel topic } else if(retValue.compareTo("Move(1,-1)")==0) { //ROSCode to publish on CM_Vel topic } else if(retValue.compareTo("Move(-1,1)")==0) { //ROSCode to publish on CM_Vel topic } else if(retValue.compareTo("Move(-1,-1)")==0) { //ROSCode to publish on CM_Vel topic } /* to support arrays */ for(Expression ex: pExpression.getIndices()) { retValue += "[" + translate(ex) + "]"; } return retValue; } public Map <Pair<String, String>, String> getMethodCalls() { return methodCalls; } }<|fim▁end|>
retValue += "("; for(Expression argument: pExpression.getParentSuffixPrimary().getArguments()) {
<|file_name|>TokenMatcherFactory.java<|end_file_name|><|fim▁begin|>/* * SonarQube * Copyright (C) 2009-2022 SonarSource SA * mailto:info AT sonarsource DOT com * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ package org.sonar.duplications.statement; import org.sonar.duplications.statement.matcher.AnyTokenMatcher; import org.sonar.duplications.statement.matcher.BridgeTokenMatcher; import org.sonar.duplications.statement.matcher.ExactTokenMatcher; import org.sonar.duplications.statement.matcher.ForgetLastTokenMatcher; import org.sonar.duplications.statement.matcher.OptTokenMatcher; import org.sonar.duplications.statement.matcher.TokenMatcher; import org.sonar.duplications.statement.matcher.UptoTokenMatcher; public final class TokenMatcherFactory { private TokenMatcherFactory() { } public static TokenMatcher from(String token) { return new ExactTokenMatcher(token); } public static TokenMatcher to(String... tokens) { return new UptoTokenMatcher(tokens); } public static TokenMatcher bridge(String lToken, String rToken) { return new BridgeTokenMatcher(lToken, rToken);<|fim▁hole|> return new AnyTokenMatcher(); } public static TokenMatcher opt(TokenMatcher optMatcher) { return new OptTokenMatcher(optMatcher); } public static TokenMatcher forgetLastToken() { // TODO Godin: we can return singleton instance return new ForgetLastTokenMatcher(); } public static TokenMatcher token(String token) { return new ExactTokenMatcher(token); } }<|fim▁end|>
} public static TokenMatcher anyToken() { // TODO Godin: we can return singleton instance
<|file_name|>HTTP_Service_Handler.cpp<|end_file_name|><|fim▁begin|>// $Id: HTTP_Service_Handler.cpp 82739 2008-09-16 12:20:46Z johnnyw $ #define ACE_BUILD_SVC_DLL #include "ace/OS.h" #include "ace/Get_Opt.h" #include "jaws3/Concurrency.h" #include "HTTP_Service_Handler.h" #include "HTTP_States.h" #include "HTTP_Data.h" JAWS_HTTP_Service_Handler::JAWS_HTTP_Service_Handler (void) : JAWS_Protocol_Handler (JAWS_HTTP_Read_Request::instance (), & this->data_) , data_ (this) { } int<|fim▁hole|> if (result < 0) return -1; return 0; } int JAWS_HTTP_Service_Handler::close (unsigned long) { delete this; return 0; } int JAWS_HTTP_Acceptor::init (int argc, ACE_TCHAR *argv[]) { ACE_Get_Opt opt (argc, argv, ACE_TEXT("p:")); unsigned short p = 0; int c; while ((c = opt ()) != -1) switch (c) { case 'p': p = (unsigned short) ACE_OS::atoi (opt.optarg); break; default: break; } if (p == 0) p = 8000; if (this->open (ACE_INET_Addr (p)) == -1) { ACE_DEBUG ((LM_DEBUG, "%p\n", "ACE_Acceptor::open")); return -1; } return 0; } ACE_SVC_FACTORY_DEFINE (JAWS_HTTP_Acceptor)<|fim▁end|>
JAWS_HTTP_Service_Handler::open (void *) { int result = JAWS_Concurrency::instance ()->putq (this);
<|file_name|>darts_game.js<|end_file_name|><|fim▁begin|>var mongoose = require('mongoose'), _ = require('underscore'), Schema = mongoose.Schema, ObjectId = Schema.ObjectId; var Throw = new Schema({ score: { type: Number, required: true, min: 0, max: 25 }, modifier: { type: Number, required: true, min: 1, max: 3 }, }); var DartsPlayer = new Schema({ name: { type: String, required: true }, throws: [Throw], }); DartsPlayer.virtual('score') .get( function() { var game = this.parentArray._parent; if (_.isEmpty(this.throws)) return +game.startingScore; return _.reduce(this.throws, function(memo, t) { var potentialScore = memo - t.score * t.modifier; if (potentialScore < 0 || potentialScore == 0 && game.out == 2 && t.modifier != 2 || potentialScore == 1 && game.out == 2) return memo; else return potentialScore; }, +game.startingScore); }); var DartsGame = new Schema({ startingScore: { type: Number, required: true, min: 301, max: 1001, default: 501 }, out: { type: Number, required: true, min: 1, max: 2, default: 2 }, players: [DartsPlayer], throwNumber: { type: Number, required: true, min: 0, max: 2, default: 0 }, currentPlayer: { type: Number, required: true, default: 0 }, userId: { type: ObjectId, required: true }, }); DartsGame.method('setPlayers', function(players) { for (var i in players) { this.players.push({ id: players[i].id, name: players[i].name, }); } }); DartsGame.method('throw', function(score, modifier) { function validate(score, modifier) { if (score == 25 && (modifier == 1 || modifier == 2)) return; if (score > 20) throw 'Can\'t score higher than 20'; if (score < 0) throw 'Can\'t score lower than 0'; if (modifier > 4) throw 'Modifier bigger than 3 is not allowed'; if (modifier < 0) throw 'Negative modifer is not allowed'; }; function nextThrow(game) { if (game.throwNumber == 2) { game.throwNumber = 0; if (game.currentPlayer == game.players.length - 1) { game.currentPlayer = 0; } else game.currentPlayer++; } else game.throwNumber++; } if (!this.isOver()) { if (modifier == null) modifier = 1; validate(score, modifier); var player = this.players[this.currentPlayer]; player.throws.push({score: score, modifier: modifier}); nextThrow(this); } }); String.prototype.startsWith = function(str) { return (this.indexOf(str) === 0); }; DartsGame.method('parseThrow', function(score) { if (score.startsWith('D')) { this.throw(score.substring(1), 2); } else if (score.startsWith('T')) { this.throw(score.substring(1), 3); } else { if (_.isNumber(+score)) this.throw(+score); else throw 'Not a legal score'; } }); DartsGame.method('isOver', function() { return _.any(this.players, function(player) { return player.score == 0; }); }); DartsGame.method('winner', function() { return _.detect(this.players, function(player) { return player.score == 0; }); }); DartsGame.method('isStarted', function() { return _.any(this.players, function(player) { return !_.isEmpty(player.throws); }); }); DartsGame.method('lastThrower', function() { if (this.throwNumber == 0) { if (this.currentPlayer == 0) { return this.players[this.players.length - 1]; } return this.players[this.currentPlayer - 1]; } else { return this.players[this.currentPlayer]; } }); DartsGame.method('undoThrow', function() {<|fim▁hole|> if (this.isStarted()) { if (this.throwNumber == 0) { this.throwNumber = 2; if (this.currentPlayer == 0) { this.currentPlayer = this.players.length - 1; } else { this.currentPlayer--; } } else { this.throwNumber--; } _.last(this.players[this.currentPlayer].throws).remove(); } }); mongoose.model('DartsGame', DartsGame);<|fim▁end|>
<|file_name|>global_spec.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */<|fim▁hole|>declare var globalThis: any /** TODO #9100 */; { describe('global', () => { it('should be global this value', () => { const _global = new Function('return this')(); expect(global).toBe(_global); }); if (typeof globalThis !== 'undefined') { it('should use globalThis as global reference', () => { expect(global).toBe(globalThis); }); } }); }<|fim▁end|>
import {global} from '../../src/util/global'; // Not yet available in TypeScript: https://github.com/Microsoft/TypeScript/pull/29332
<|file_name|>dassault.cpp<|end_file_name|><|fim▁begin|>// license:BSD-3-Clause // copyright-holders:Bryan McPhail /*************************************************************************** Desert Assault Video emulation - Bryan McPhail, [email protected] I'm not sure if one of the alpha blending effects is correct (mode 0x8000, the usual mode 0x4000 should be correct). It may be some kind of orthogonal priority effect where it should cut a hole in other higher priority sprites to reveal a non-alpha'd hole, or alpha against a further back tilemap. (is this the helicopter shadow at the end of lv.1 ?) Also, some priorities are still a little questionable. ****************************************************************************/ #include "emu.h" #include "includes/dassault.h" /******************************************************************************/ void dassault_state::video_start() { m_sprgen1->alloc_sprite_bitmap(); m_sprgen2->alloc_sprite_bitmap(); } void dassault_state::mixdassaultlayer(bitmap_rgb32 &bitmap, bitmap_ind16* sprite_bitmap, const rectangle &cliprect, UINT16 pri, UINT16 primask, UINT16 penbase, UINT8 alpha) { int y, x; const pen_t *paldata = &m_palette->pen(0); UINT16* srcline; UINT32* dstline; for (y=cliprect.min_y;y<=cliprect.max_y;y++) { srcline=&sprite_bitmap->pix16(y,0); dstline=&bitmap.pix32(y,0); for (x=cliprect.min_x;x<=cliprect.max_x;x++) { UINT16 pix = srcline[x]; if ((pix & primask) != pri) continue; if (pix&0xf) { UINT16 pen = pix&0x1ff; if (pix & 0x800) pen += 0x200; if (alpha!=0xff) { if (pix&0x600) { UINT32 base = dstline[x]; dstline[x] = alpha_blend_r32(base, paldata[pen+penbase], alpha); } else { dstline[x] = paldata[pen+penbase]; } } else { dstline[x] = paldata[pen+penbase]; } } } } } /* are the priorities 100% correct? they're the same as they were before conversion to DECO52 sprite device, but if (for example) you walk to the side of the crates in the first part of the game you appear over them... */ UINT32 dassault_state::screen_update_dassault(screen_device &screen, bitmap_rgb32 &bitmap, const rectangle &cliprect) { address_space &space = machine().driver_data()->generic_space(); UINT16 flip = m_deco_tilegen1->pf_control_r(space, 0, 0xffff); UINT16 priority = m_decocomn->priority_r(space, 0, 0xffff); m_sprgen2->draw_sprites(bitmap, cliprect, m_spriteram2->buffer(), 0x400, false); m_sprgen1->draw_sprites(bitmap, cliprect, m_spriteram->buffer(), 0x400, false); bitmap_ind16* sprite_bitmap1 = &m_sprgen1->get_sprite_temp_bitmap(); bitmap_ind16* sprite_bitmap2 = &m_sprgen2->get_sprite_temp_bitmap(); /* Update tilemaps */ flip_screen_set(BIT(flip, 7)); m_deco_tilegen1->pf_update(nullptr, m_pf2_rowscroll); m_deco_tilegen2->pf_update(nullptr, m_pf4_rowscroll); /* Draw playfields/update priority bitmap */ screen.priority().fill(0, cliprect); bitmap.fill(m_palette->pen(3072), cliprect); m_deco_tilegen2->tilemap_2_draw(screen, bitmap, cliprect, TILEMAP_DRAW_OPAQUE, 0); /* The middle playfields can be swapped priority-wise */ if ((priority & 3) == 0) { mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0600, 0x0600, 0x400, 0xff); // 1<|fim▁hole|> mixdassaultlayer(bitmap, sprite_bitmap2, cliprect, 0x0000, 0x0000, 0x800, 0x80); // 64? mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0000, 0x0600, 0x400, 0xff); // 128 } else if ((priority & 3) == 1) { mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0600, 0x0600, 0x400, 0xff); // 1 m_deco_tilegen2->tilemap_1_draw(screen, bitmap, cliprect, 0, 2); // 2 mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0400, 0x0600, 0x400, 0xff); // 8 mixdassaultlayer(bitmap, sprite_bitmap2, cliprect, 0x0000, 0x0000, 0x800, 0x80); // 16? mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0200, 0x0600, 0x400, 0xff); // 32 m_deco_tilegen1->tilemap_2_draw(screen, bitmap, cliprect, 0, 64); // 64 mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0000, 0x0600, 0x400, 0xff); // 128 } else if ((priority & 3) == 3) { mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0600, 0x0600, 0x400, 0xff); // 1 m_deco_tilegen2->tilemap_1_draw(screen, bitmap, cliprect, 0, 2); // 2 mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0400, 0x0600, 0x400, 0xff); // 8 m_deco_tilegen1->tilemap_2_draw(screen, bitmap, cliprect, 0, 16); // 16 mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0200, 0x0600, 0x400, 0xff); // 32 mixdassaultlayer(bitmap, sprite_bitmap2, cliprect, 0x0000, 0x0000, 0x800, 0x80); // 64? mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0000, 0x0600, 0x400, 0xff); // 128 } else { /* Unused */ } m_deco_tilegen1->tilemap_1_draw(screen, bitmap, cliprect, 0, 0); return 0; }<|fim▁end|>
m_deco_tilegen1->tilemap_2_draw(screen, bitmap, cliprect, 0, 2); // 2 mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0400, 0x0600, 0x400, 0xff); // 8 m_deco_tilegen2->tilemap_1_draw(screen, bitmap, cliprect, 0, 16); // 16 mixdassaultlayer(bitmap, sprite_bitmap1, cliprect, 0x0200, 0x0600, 0x400, 0xff); // 32
<|file_name|>providers.py<|end_file_name|><|fim▁begin|>""" Helper functions related to the creation, listing, filtering and destruction of providers The list_providers function in this module depend on a (by default global) dict of filters. If you are writing tests or fixtures, you want to depend on this function as a de facto gateway. The rest of the functions, such as get_mgmt, get_crud, get_provider_keys etc ignore this global dict and will provide you with whatever you ask for with no limitations. The main clue to know what is limited by the filters and what isn't is the 'filters' parameter. """ import operator import six from collections import Mapping, OrderedDict from copy import copy from cfme.common.provider import all_types from cfme.exceptions import UnknownProviderType from utils import conf, version from utils.log import logger providers_data = conf.cfme_data.get("management_systems", {}) # Dict of active provider filters {name: ProviderFilter} global_filters = {} def load_setuptools_entrypoints(): """ Load modules from querying the specified setuptools entrypoint name.""" from pkg_resources import (iter_entry_points, DistributionNotFound, VersionConflict) for ep in iter_entry_points('manageiq_integration_tests'): # is the plugin registered or blocked? try: ep.load() except DistributionNotFound: continue except VersionConflict as e: raise Exception( "Plugin {} could not be loaded: {}!".format(ep.name, e)) class ProviderFilter(object): """ Filter used to obtain only providers matching given requirements Args: keys: List of acceptable provider keys, all if `None` categories: List of acceptable provider categories, all if `None` types: List of acceptable provider types, all if `None` required_fields: List of required fields, see :py:func:`providers_by_class` restrict_version: Checks provider version in yamls if `True` required_tags: List of tags that must be set in yamls inverted: Inclusive if `False`, exclusive otherwise conjunctive: If true, all subfilters are applied and all must match (default) If false (disjunctive), at least one of the subfilters must match """ _version_operator_map = OrderedDict([('>=', operator.ge), ('<=', operator.le), ('==', operator.eq), ('!=', operator.ne), ('>', operator.gt), ('<', operator.lt)]) def __init__(self, keys=None, classes=None, required_fields=None, required_tags=None, required_flags=None, restrict_version=False, inverted=False, conjunctive=True): self.keys = keys self.classes = classes self.required_fields = required_fields self.required_tags = required_tags self.required_flags = required_flags self.restrict_version = restrict_version self.inverted = inverted self.conjunctive = conjunctive def _filter_keys(self, provider): """ Filters by provider keys """ if self.keys is None: return None return provider.key in self.keys def _filter_classes(self, provider): """ Filters by provider (base) classes """ if self.classes is None: return None return any([provider.one_of(prov_class) for prov_class in self.classes]) def _filter_required_fields(self, provider): """ Filters by required yaml fields (specified usually during test parametrization) """ if self.required_fields is None: return None for field_or_fields in self.required_fields: if isinstance(field_or_fields, tuple): field_ident, field_value = field_or_fields else: field_ident, field_value = field_or_fields, None if isinstance(field_ident, six.string_types): if field_ident not in provider.data: return False else: if field_value: if provider.data[field_ident] != field_value: return False else: o = provider.data try: for field in field_ident: o = o[field] if field_value: if o != field_value: return False except (IndexError, KeyError): return False return True def _filter_required_tags(self, provider): """ Filters by required yaml tags """ prov_tags = provider.data.get('tags', []) if self.required_tags is None: return None if set(self.required_tags) & set(prov_tags): return True return False def _filter_required_flags(self, provider): """ Filters by required yaml flags """ if self.required_flags is None: return None if self.required_flags: test_flags = [flag.strip() for flag in self.required_flags] defined_flags = conf.cfme_data.get('test_flags', '') if isinstance(defined_flags, six.string_types): defined_flags = defined_flags.split(',') defined_flags = [flag.strip() for flag in defined_flags] excluded_flags = provider.data.get('excluded_test_flags', '') if isinstance(excluded_flags, six.string_types): excluded_flags = excluded_flags.split(',') excluded_flags = [flag.strip() for flag in excluded_flags] allowed_flags = set(defined_flags) - set(excluded_flags) if set(test_flags) - allowed_flags: logger.info("Filtering Provider %s out because it does not have the right flags, " "%s does not contain %s", provider.name, list(allowed_flags), list(set(test_flags) - allowed_flags)) return False return True def _filter_restricted_version(self, provider): """ Filters by yaml version restriction; not applied if SSH is not available """ if self.restrict_version: # TODO # get rid of this since_version hotfix by translating since_version # to restricted_version; in addition, restricted_version should turn into # "version_restrictions" and it should be a sequence of restrictions with operators # so that we can create ranges like ">= 5.6" and "<= 5.8" version_restrictions = [] since_version = provider.data.get('since_version') if since_version: version_restrictions.append('>= {}'.format(since_version)) restricted_version = provider.data.get('restricted_version') if restricted_version: version_restrictions.append(restricted_version) for restriction in version_restrictions: for op, comparator in ProviderFilter._version_operator_map.items(): # split string by op; if the split works, version won't be empty head, op, ver = restriction.partition(op) if not ver: # This means that the operator was not found continue try: curr_ver = version.current_version() except: return True if not comparator(curr_ver, ver): return False break else: raise Exception('Operator not found in {}'.format(restriction)) return None def __call__(self, provider): """ Applies this filter on a given provider Usage: pf = ProviderFilter('cloud_infra', categories=['cloud', 'infra']) providers = list_providers([pf]) pf2 = ProviderFilter( classes=[GCEProvider, EC2Provider], required_fields=['small_template']) provider_keys = [prov.key for prov in list_providers([pf, pf2])] ^ this will list keys of all GCE and EC2 providers ...or... pf = ProviderFilter(required_tags=['openstack', 'complete']) pf_inverted = ProviderFilter(required_tags=['disabled'], inverted=True) providers = list_providers([pf, pf_inverted]) ^ this will return providers that have both the "openstack" and "complete" tags set and at the same time don't have the "disabled" tag ...or... pf = ProviderFilter(keys=['rhevm34'], class=CloudProvider, conjunctive=False) providers = list_providers([pf]) ^ this will list all providers that either have the 'rhevm34' key or are an instance of the CloudProvider class and therefore are a cloud provider Returns: `True` if provider passed all checks and was not filtered out, `False` otherwise. The result is opposite if the 'inverted' attribute is set to `True`. """ keys_l = self._filter_keys(provider) classes_l = self._filter_classes(provider) fields_l = self._filter_required_fields(provider) tags_l = self._filter_required_tags(provider) flags_l = self._filter_required_flags(provider) version_l = self._filter_restricted_version(provider) results = [keys_l, classes_l, fields_l, tags_l, flags_l, version_l] relevant_results = [res for res in results if res in [True, False]] compiling_fn = all if self.conjunctive else any # If all / any filters return true, the provider was not blocked (unless inverted) if compiling_fn(relevant_results): return not self.inverted return self.inverted def copy(self): return copy(self) # Only providers without the 'disabled' tag global_filters['enabled_only'] = ProviderFilter(required_tags=['disabled'], inverted=True) # Only providers relevant for current appliance version (requires SSH access when used) global_filters['restrict_version'] = ProviderFilter(restrict_version=True) def list_providers(filters=None, use_global_filters=True, appliance=None): """ Lists provider crud objects, global filter optional Args: filters: List if :py:class:`ProviderFilter` or None use_global_filters: Will apply global filters as well if `True`, will not otherwise appliance: Optional :py:class:`utils.appliance.IPAppliance` to be passed to provider CRUD objects Note: Requires the framework to be pointed at an appliance to succeed. Returns: List of provider crud objects. """ if isinstance(filters, six.string_types): raise TypeError( 'You are probably using the old-style invocation of provider setup functions! ' 'You need to change it appropriately.') filters = filters or [] if use_global_filters: filters = filters + global_filters.values() providers = [get_crud(prov_key, appliance=appliance) for prov_key in providers_data] for prov_filter in filters: providers = filter(prov_filter, providers) return providers def list_providers_by_class(prov_class, use_global_filters=True, appliance=None): """ Lists provider crud objects of a specific class (or its subclasses), global filter optional Args: prov_class: Provider class to apply for filtering use_global_filters: See :py:func:`list_providers` appliance: Optional :py:class:`utils.appliance.IPAppliance` to be passed to provider CRUD objects Note: Requires the framework to be pointed at an appliance to succeed. Returns: List of provider crud objects. """ pf = ProviderFilter(classes=[prov_class]) return list_providers(filters=[pf], use_global_filters=use_global_filters, appliance=appliance) def list_provider_keys(provider_type=None): """ Lists provider keys from conf (yamls) Args: provider_type: Optional filtering by 'type' string (from yaml); disabled by default Note: Doesn't require the framework to be pointed at an appliance to succeed. Returns: List of provider keys (strings). """ try: all_keys = conf.cfme_data.management_systems.keys() except: all_keys = [] if provider_type: filtered_keys = [] for key in all_keys: if conf.cfme_data.management_systems[key].type == provider_type: filtered_keys.append(key) return filtered_keys else: return all_keys def get_class_from_type(prov_type): try: return all_types()[prov_type] except KeyError: raise UnknownProviderType("Unknown provider type: {}!".format(prov_type)) def get_crud(provider_key, appliance=None): """ Creates a Provider object given a management_system key in cfme_data. Usage: get_crud('ec2east') Returns: A Provider object that has methods that operate on CFME """ prov_config = providers_data[provider_key] prov_type = prov_config.get('type') return get_class_from_type(prov_type).from_config( prov_config, provider_key, appliance=appliance) def get_crud_by_name(provider_name, appliance=None): """ Creates a Provider object given a management_system name in cfme_data. <|fim▁hole|> Usage: get_crud_by_name('My RHEV 3.6 Provider') Returns: A Provider object that has methods that operate on CFME """ for provider_key, provider_data in providers_data.items(): if provider_data.get("name") == provider_name: return get_crud(provider_key, appliance=appliance) raise NameError("Could not find provider {}".format(provider_name)) def get_mgmt(provider_key, providers=None, credentials=None): """ Provides a ``wrapanapi`` object, based on the request. Args: provider_key: The name of a provider, as supplied in the yaml configuration files. You can also use the dictionary if you want to pass the provider data directly. providers: A set of data in the same format as the ``management_systems`` section in the configuration yamls. If ``None`` then the configuration is loaded from the default locations. Expects a dict. credentials: A set of credentials in the same format as the ``credentials`` yamls files. If ``None`` then credentials are loaded from the default locations. Expects a dict. Return: A provider instance of the appropriate ``wrapanapi.WrapanapiAPIBase`` subclass """ if providers is None: providers = providers_data # provider_key can also be provider_data for some reason # TODO rename the parameter; might break things if isinstance(provider_key, Mapping): provider_data = provider_key else: provider_data = providers[provider_key] if credentials is None: # We need to handle the in-place credentials if provider_data.get('endpoints'): credentials = provider_data['endpoints']['default']['credentials'] else: credentials = provider_data['credentials'] # If it is not a mapping, it most likely points to a credentials yaml (as by default) if not isinstance(credentials, Mapping): credentials = conf.credentials[credentials] # Otherwise it is a mapping and therefore we consider it credentials # Munge together provider dict and creds, # Let the provider do whatever they need with them provider_kwargs = provider_data.copy() provider_kwargs.update(credentials) if not provider_kwargs.get('username') and provider_kwargs.get('principal'): provider_kwargs['username'] = provider_kwargs['principal'] provider_kwargs['password'] = provider_kwargs['secret'] if isinstance(provider_key, six.string_types): provider_kwargs['provider_key'] = provider_key provider_kwargs['logger'] = logger return get_class_from_type(provider_data['type']).mgmt_class(**provider_kwargs) class UnknownProvider(Exception): def __init__(self, provider_key, *args, **kwargs): super(UnknownProvider, self).__init__(provider_key, *args, **kwargs) self.provider_key = provider_key def __str__(self): return ('Unknown provider: "{}"'.format(self.provider_key))<|fim▁end|>
<|file_name|>prometheus_defaults.go<|end_file_name|><|fim▁begin|>/* Copyright 2020 The Knative Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 import ( "context" ) func (s *PrometheusSource) SetDefaults(ctx context.Context) {<|fim▁hole|>func (s *PrometheusSourceSpec) SetDefaults(ctx context.Context) { // Nothing yet. }<|fim▁end|>
s.Spec.SetDefaults(ctx) }
<|file_name|>timezone.py<|end_file_name|><|fim▁begin|>"""Phone number to time zone mapping functionality >>> import phonenumbers >>> from phonenumbers.timezone import time_zones_for_number >>> ro_number = phonenumbers.parse("+40721234567", "RO") >>> tzlist = time_zones_for_number(ro_number) >>> len(tzlist) 1 >>> str(tzlist[0]) 'Europe/Bucharest' >>> mx_number = phonenumbers.parse("+523291234567", "GB") >>> tzlist = time_zones_for_number(mx_number) >>> len(tzlist) 2 >>> str(tzlist[0]) 'America/Mazatlan' >>> str(tzlist[1]) 'America/Mexico_City' """ # Based very loosely on original Java code: # java/geocoder/src/com/google/i18n/phonenumbers/PhoneNumberToTimeZonesMapper.java # Copyright (C) 2013 The Libphonenumber Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .util import prnt, u, U_PLUS from .phonenumberutil import PhoneNumberType, number_type from .phonenumberutil import PhoneNumberFormat, format_number from .phonenumberutil import is_number_type_geographical try: from .tzdata import TIMEZONE_DATA, TIMEZONE_LONGEST_PREFIX except ImportError: # pragma no cover # Before the generated code exists, the carrierdata/ directory is empty. # The generation process imports this module, creating a circular # dependency. The hack below works around this. import os import sys if (os.path.basename(sys.argv[0]) == "buildmetadatafromxml.py" or os.path.basename(sys.argv[0]) == "buildprefixdata.py"): prnt("Failed to import generated data (but OK as during autogeneration)", file=sys.stderr) TIMEZONE_DATA = {'4411': u('Europe/London')} TIMEZONE_LONGEST_PREFIX = 4 else: raise __all__ = ['UNKNOWN_TIMEZONE', 'time_zones_for_geographical_number', 'time_zones_for_number'] # This is defined by ICU as the unknown time zone. UNKNOWN_TIMEZONE = u("Etc/Unknown") _UNKNOWN_TIME_ZONE_LIST = (UNKNOWN_TIMEZONE,) def time_zones_for_geographical_number(numobj): """Returns a list of time zones to which a phone number belongs. This method assumes the validity of the number passed in has already been checked, and that the number is geo-localizable. We consider fixed-line and mobile numbers possible candidates for geo-localization. Arguments: numobj -- a valid phone number for which we want to get the time zones to which it belongs Returns a list of the corresponding time zones or a single element list with the default unknown time zone if no other time zone was found or if<|fim▁hole|> the number was invalid""" e164_num = format_number(numobj, PhoneNumberFormat.E164) if not e164_num.startswith(U_PLUS): # pragma no cover # Can only hit this arm if there's an internal error in the rest of # the library raise Exception("Expect E164 number to start with +") for prefix_len in range(TIMEZONE_LONGEST_PREFIX, 0, -1): prefix = e164_num[1:(1 + prefix_len)] if prefix in TIMEZONE_DATA: return TIMEZONE_DATA[prefix] return _UNKNOWN_TIME_ZONE_LIST def time_zones_for_number(numobj): """As time_zones_for_geographical_number() but explicitly checks the validity of the number passed in. Arguments: numobj -- a valid phone number for which we want to get the time zones to which it belongs Returns a list of the corresponding time zones or a single element list with the default unknown time zone if no other time zone was found or if the number was invalid""" ntype = number_type(numobj) if ntype == PhoneNumberType.UNKNOWN: return _UNKNOWN_TIME_ZONE_LIST elif not is_number_type_geographical(ntype, numobj.country_code): return _country_level_time_zones_for_number(numobj) return time_zones_for_geographical_number(numobj) def _country_level_time_zones_for_number(numobj): """Returns the list of time zones corresponding to the country calling code of a number. Arguments: numobj -- the phone number to look up Returns a list of the corresponding time zones or a single element list with the default unknown time zone if no other time zone was found or if the number was invalid""" cc = str(numobj.country_code) for prefix_len in range(TIMEZONE_LONGEST_PREFIX, 0, -1): prefix = cc[:(1 + prefix_len)] if prefix in TIMEZONE_DATA: return TIMEZONE_DATA[prefix] return _UNKNOWN_TIME_ZONE_LIST if __name__ == '__main__': # pragma no cover import doctest doctest.testmod()<|fim▁end|>
<|file_name|>DKTSTruncation.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) Mike Espig * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ // DKTSTruncation.cpp: Implementierung der Klasse DKTSTruncation. // ////////////////////////////////////////////////////////////////////// #include "DKTSTruncation.hpp" DKTSTruncation::DKTSTruncation(const LongInt& d, const LongInt& r, const LongInt& R, const LongInt& n) { (*this) .allocateDataSpace(d, r, R, n) .generateRandomExample(d, r, R, n) .setDefaultParameter() ; } DKTSTruncation::DKTSTruncation(const IString& fileName, const LongInt& r) { DKTS A; A.readDataFrom(fileName); const LongInt R = A.k(); attr_indexSet = (LongIntPointer) new LongInt[R]; attr_values = (LongRealPointer) new LongReal[R]; (*this) .setInputFileName(fileName) .prepareInputData(A) .setInitialGuestBiggestSum(r) .setDefaultParameter() ; } DKTSTruncation::DKTSTruncation(const IString& fileName, const IString& coFileName, const IString& ortFileName, const LongInt& r) { attr_Z .readDataFrom(ortFileName) ; attr_a .readDataFrom(coFileName) ; const LongInt R = attr_a.k(); (*this) .setOriginalRank(R) ; attr_indexSet = (LongIntPointer) new LongInt[R]; attr_values = (LongRealPointer) new LongReal[R]; (*this) .setInputFileName(fileName) .sortIndexSet() //.computeMainPart(1.0e-16) .setInitialGuestBiggestSum(r) .setDefaultParameter() ; } DKTSTruncation::DKTSTruncation(const DKTS& A, const LongInt& r) { const LongInt R = A.k(); attr_indexSet = (LongIntPointer) new LongInt[R]; attr_values = (LongRealPointer) new LongReal[R]; (*this) .prepareInputData(A) .sortIndexSet() .setInitialGuestBiggestSum(r) .setDefaultParameter() ; } DKTSTruncation::DKTSTruncation(const DKTS& a, const DKTS& A, const DKTS& Z, const LongInt& r) { attr_a = a; attr_Z = Z; const LongInt R = attr_a.k(); attr_indexSet = (LongIntPointer) new LongInt[R]; attr_values = (LongRealPointer) new LongReal[R]; (*this) .sortIndexSet() .setInitialGuestBiggestSum(r) .setDefaultParameter() ; } DKTSTruncation::~DKTSTruncation() { (*this) .deleteDataSpace() ; } DKTSTruncation& DKTSTruncation::setDefaultParameter() { ProtocolFormat pF; pF .setTablePosition("H") .setMaxTableSize(25) ; ProtocolProperties pP(pF); pP .setTopicString("R1 Updates") .setTeXFileName("lastRunR1.tex") ; attr_truncationLogR1.setProperties(pP); return (*this); } DKTSDIterationInfo DKTSTruncation::bestR1Truncation(const LongInt& r, DKTS& a, DKTS& x, const LongReal& normD) { Random rGenerator; decomposer() .setPrintCout(false) ; LongReal error = 1.0e20; LongReal errorMin = error; Timer time; const LongInt d = a.d(); const LongInt n = a.n(); x.resize(d, 1, n); DKTS x0(x); DKTSDIterationInfo infoEntry; time.startTiming(); const LongInt R = attr_a.k(); const LongInt mR = MIN(7+1, R); LongInt index = 0; LongInt max = 0; LongInt maxR = 23; LongInt quality = -1; bool noOne = true; DKTSDDataBlock dataBlock; addBeginTrancationR1(r, attr_truncationLogR1); cout << "Initial guess : "; for(LongInt i=1; i<mR || max==maxR; i++) { cout << "."; addTrancationR1(i, r, attr_truncationLogR1); const LongInt rIndex = rGenerator.randomLongInt(R); index = rIndex; x0.copyFromTo(attr_a, -index, index, index); const LongReal normX0 = frobeniusNorm(x0); x0 *= (normD/normX0); dataBlock.removeAllEntrys(); infoEntry = attr_decomposer.decompose(a, x0, normD, dataBlock); attr_truncationLogR1.add(dataBlock); error = infoEntry.error(); if(1.0 < error && noOne) { i--; } else { if(error < errorMin) { noOne = false; x = x0; errorMin = error; quality = attr_decomposer.quality(); if(errorMin < 0.92) { i = mR; } } } } LongReal sec = time.elapsedTimeSec(); cout << errorMin << ", (" << quality << "), " << sec << "." << endl << endl; addEndTrancation(infoEntry, r, attr_truncationLogR1); infoEntry.setCalculationTime(sec); decomposer() .setPrintCout(true) ; return infoEntry; } /* DKTSDIterationInfo DKTSTruncation::bestR1Truncation(const LongInt& r, DKTS& a, DKTS& x, const LongReal& normD) { decomposer() .setPrintCout(false) ; LongReal error = 1.0e20; Timer time; const LongInt d = a.d(); const LongInt R = a.k(); const LongInt n = a.n(); LongInt quality = -1; x.resize(d, 1, n); for(LongInt mu=0; mu<d; mu++) { DKTVector& v = x(0,mu); for(LongInt i=0; i<R; i++) { v += a(i,mu); } v.normalized(); } //x *= normD; DKTSDIterationInfo infoEntry; time.startTiming(); DKTSDDataBlock dataBlock; addBeginTrancationR1(r, attr_truncationLogR1); cout << "Initial guess : "; addTrancationR1(1, r, attr_truncationLogR1); dataBlock.removeAllEntrys(); infoEntry = attr_decomposer.decompose(a, x, normD, dataBlock); attr_truncationLogR1.add(dataBlock); error = infoEntry.error(); quality = attr_decomposer.quality(); LongReal sec = time.elapsedTimeSec(); cout << error << ", (" << quality << "), " << sec << "." << endl << endl; addEndTrancation(infoEntry, r, attr_truncationLogR1); infoEntry.setCalculationTime(sec); decomposer() .setPrintCout(true) ; return infoEntry; } */ DKTSDIterationInfo DKTSTruncation::truncate2(const LongInt& rT, DKTS& a, DKTS& x, const LongReal& normA)//, Protocol& protocol) { const LongInt d = a.d(); const LongInt R = a.k(); const LongInt n = a.n(); DKTSDIterationInfo infoEntry; DKTSDDataBlock dataBlock; infoEntry = bestR1Truncation(1, a, x, normA); LongReal error = infoEntry.error(); DKTS x0(d, 1, n), residuum(d, R, n); for(LongInt r=1; r<rT; r++) { residuum.setSumOf(1.0, a, -1.0, x); bestR1Truncation(r, residuum, x0, normA*error); DKTS xt(x); x.setSumOf(1.0, xt, 1.0, x0); x.reScaled(); decomposer() .setPrintCout(false) ; infoEntry = attr_decomposer.decompose(a, x, normA, dataBlock); error = infoEntry.error(); } decomposer() .setPrintCout(true) ; return infoEntry; } DKTSDIterationInfo DKTSTruncation::startTruncation(DKTS& a, DKTS& x, const LongReal& normA) { DKTSDIterationInfo infoEntry; Timer time; DKTSDDataBlock dataBlock; time.startTiming(); infoEntry = attr_decomposer.decompose(a, x, normA, dataBlock); LongReal sec = time.elapsedTimeSec(); if(attr_decomposer.printCout()) { cout << "Time [sec] = " << sec << endl; } infoEntry.setCalculationTime(sec); addDataBlock(dataBlock, infoEntry, x.k(), attr_truncationLog); return infoEntry; } DKTSDIterationInfo DKTSTruncation::truncate() { if(attr_decomposer.printCout()) { writeParameter(cout); } return startTruncation(attr_a, attr_x, normA()); } DKTSTruncation& DKTSTruncation::writeSolutionTo(const IString& fileName) { DKTS X; X .regeneratedBy(attr_x, attr_Z) .reScaled() .writeDataTo(fileName) ; return (*this); } DKTSTruncation& DKTSTruncation::allocateDataSpace(const LongInt& d, const LongInt& r, const LongInt& R, const LongInt& n) { attr_indexSet = (LongIntPointer) new LongInt [R]; attr_values = (LongRealPointer) new LongReal[R]; attr_Z.resize(d, R, n); attr_a.resize(d, R, R); attr_x.resize(d, r, R); return (*this); } DKTSTruncation& DKTSTruncation::deleteDataSpace() { delete [] attr_indexSet; delete [] attr_values; return (*this); } DKTSTruncation& DKTSTruncation::resize(const LongInt& d, const LongInt& r, const LongInt& R, const LongInt& n) { const LongReal d1 = attr_a.d(); const LongReal R1 = attr_a.k(); const LongReal r1 = attr_x.k(); const LongReal n1 = attr_Z.n(); if(d1==d && r1==r && R1==R && n1==n) { attr_a.setNull(); attr_x.setNull(); attr_Z.setNull(); } else { (*this) .deleteDataSpace() .allocateDataSpace(d, r, R, n) ; } return (*this); } DKTSTruncation& DKTSTruncation::prepareInputData(const DKTS& A) { (*this) .prepareInputData(A, 1) ; return (*this); } DKTSTruncation& DKTSTruncation::prepareInputData(const DKTS& A, const LongInt& r) { const LongInt d = A.d(); const LongInt R = A.k(); const LongInt n = A.n(); (*this) .resize(d, r, R, n) .setOriginalRank(R) ; Timer time; time.startTiming(); IString date(time.date()); attr_Z.setOrthogonal2(A); attr_a.setCoefficientsSystemOf(A, attr_Z); const LongReal sec = time.elapsedTimeSec(); (*this) .setPreCalculationTime(sec) .sortIndexSet() //.computeMainPart(1.0e-16); ; return (*this); } DKTSTruncation& DKTSTruncation::computeMainPart(const LongReal& eps) { const LongInt R = attr_a.k(); // compute the new Rank LongReal normA = DKTSTruncation::normA(); LongReal epsA = eps*normA; LongReal rest = attr_values[attr_indexSet[R-1]]; LongInt diff = 0; LongInt i = R-2; while(rest<epsA && 0<i) { rest += attr_values[attr_indexSet[i]]; diff++; i--; } const LongInt rNew = R - diff; if(0<rNew) { setOriginalRank(R); LongIntPointer indexSet = (LongIntPointer) new LongInt [rNew]; LongRealPointer values = (LongRealPointer) new LongReal[rNew]; for(i=0; i<rNew; i++) { indexSet[i] = attr_indexSet[i]; values[i] = attr_values[i]; } delete [] attr_indexSet; delete [] attr_values; attr_indexSet = (LongIntPointer) new LongInt [rNew]; attr_values = (LongRealPointer) new LongReal[rNew]; for(i=0; i<rNew; i++) { attr_indexSet[i] = i; attr_values[i] = values[i]; } DKTS a(attr_a); const LongInt d = a.d(); attr_a.resize(d, rNew, a.n()); for(LongInt mu=0; mu<d; mu++) { for(i=0; i<rNew; i++) { attr_a(i, mu) = a(indexSet[i], mu); } } delete [] indexSet; delete [] values; } return (*this); } DKTSTruncation& DKTSTruncation::prepareInputData(const DKTS& A, const DKTS& X) { const LongInt d = A.d(); const LongInt R = A.k(); const LongInt r = X.k(); const LongInt n = A.n(); (*this) .resize(d, r, R, n) ; Timer time; time.startTiming(); IString date(time.date()); attr_Z.setOrthogonal2(A); attr_a.setCoefficientsSystemOf(A, attr_Z); attr_x.setCoefficientsSystemOf(X, attr_Z); attr_x.reScaled(); const LongReal sec = time.elapsedTimeSec(); (*this) .sortIndexSet() .setPreCalculationTime(sec) //.computeMainPart(1.0e-16); ; return (*this); } DKTSTruncation& DKTSTruncation::generateRandomExample(const LongInt& d, const LongInt& r, const LongInt& R, const LongInt& n, const LongReal& eps) { DKTS A(d, R, n); A .setRand() .scale(eps) ; (*this) .prepareInputData(A, r) .setInitialGuestBiggestSum(r) ; return (*this); } DKTSTruncation& DKTSTruncation::writePreComputationDataTo (const IString& fileNameCt, const IString& fileNameOb) { (*this) .writeCoefficientSystemTo(fileNameCt) .writeOrthogonalBasisTo(fileNameOb) ; return (*this); } DKTSTruncation& DKTSTruncation::resizeInitialGuest(const LongInt& d, const LongInt& r, const LongInt& R, const LongInt& n) { const LongInt rank = MIN(r, R); attr_x.resize(d, rank, n); return (*this); } DKTSTruncation& DKTSTruncation::writeCoefficientSystemTo(const IString& fileName) { attr_a.writeDataTo(fileName); return (*this); } DKTSTruncation& DKTSTruncation::writeOrthogonalBasisTo(const IString& fileName) { attr_Z.writeDataTo(fileName); return (*this); } DKTSTruncation& DKTSTruncation::setInitialGuestBiggestSum(const LongInt& r) { resizeInitialGuest(attr_a.d(), r, attr_a.k(), attr_a.n()); const LongInt k = MIN(attr_x.k(), attr_a.k()); const LongInt d = attr_x.d(); for(LongInt j=0; j<k; j++) { const LongInt index = attr_indexSet[j]; for(LongInt mu=0; mu<d; mu++) { attr_x(j,mu) = attr_a(index,mu); } } attr_x.reScaled(); return (*this); } DKTSTruncation& DKTSTruncation::readInitialGuestFrom(const IString& fileName) { DKTS X; X.readDataFrom(fileName); X.writeIndexSet(cout); attr_x.setCoefficientsSystemOf(X, attr_Z); attr_x.reScaled(); return (*this); } DKTSTruncation& DKTSTruncation::sortIndexSet() { const LongInt R = attr_a.k(); const LongReal normA = frobeniusNorm(attr_a); (*this) .setNormA(normA) ; for(LongInt i=0; i<R; i++) { attr_values[i] = attr_a.frobeniusNormOfSummand(i); attr_indexSet[i] = i; } quickSort(0, R-1, attr_values); return (*this); } LongInt DKTSTruncation::partition(LongInt low, LongInt high, LongRealPointer f) { LongInt i,j; LongReal pivot = f[attr_indexSet[low]]; i = low; for(j=low+1; j<=high; j++) { if (pivot<=f[attr_indexSet[j]]) { i++; swapIndex(i, j); } } swapIndex(i, low); return i; } DKTSTruncation& DKTSTruncation::quickSort(LongInt low, LongInt high, LongRealPointer f) { LongInt m = 0; if (low < high) { m = partition(low, high, f); quickSort(low, m-1, f); quickSort(m+1, high, f); } return (*this); } DKTSTruncation& DKTSTruncation::swapIndex (const LongInt& i, const LongInt& j) { LongInt& a_i = attr_indexSet[i]; LongInt& a_j = attr_indexSet[j]; const LongInt temp = a_i; a_i = a_j; a_j = temp; return (*this); } bool DKTSTruncation::writeParameter(ostream& s) const { bool value = true; const LongInt d = attr_x.d(); const LongInt k = attr_x.k(); const LongInt m = attr_x.n(); const LongInt l = originalRank(); const LongInt n = attr_Z.n(); s << "d = " << d << endl; s << "R = " << l << endl; s << "r = " << k << endl; s << "N = " << n << endl; s << "m = " << m << endl; s << setprecision(2); s << "Data Storage Memory = " << setw(4) << (LongReal)( (2*d*l*n + d*k*n + d*k*l + d*l*l)*sizeof(LongReal))/(LongReal)1048576 << " MByte" << endl; return value; } DescriptionData DKTSTruncation::parameterDescription() const { DescriptionData dD; const LongInt d = attr_x.d(); const LongInt k = attr_x.k(); const LongInt m = attr_x.n(); const LongInt l = attr_a.k(); const LongInt n = attr_Z.n(); dD.addString (IString("d ") + IString("$ = ") + IString(d) + IString("$,") + IString("\\hspace{15pt}") + IString("R ") + IString("$ = ") + IString(l) + IString("$,") + IString("\\hspace{15pt}") + IString("r ") + IString("$ = ") + IString(k) + IString("$,") + IString("\\hspace{15pt}") + IString("N ") + IString("$ = ") + IString(n) + IString("$,") + IString("\\hspace{15pt}") + IString("m ") + IString("$ = ") + IString(m) + IString("$")); dD.addString (""); return dD; } bool DKTSTruncation::writeNormsOfA(DescriptionData& dDIn) const { const LongInt R = attr_a.k(); for(LongInt i=0; i<R; i++) { dDIn.addString (IString(attr_indexSet[i]) + IString(" : ") + IString(attr_a.frobeniusNormOfSummand(attr_indexSet[i])) + IString("\\\\")); } return true; } bool DKTSTruncation::writeNormsOfA(ostream& s) const { const LongInt R = attr_a.k(); for(LongInt i=0; i<R; i++) { s << setw(5) << attr_indexSet[i] << " : " << attr_a.frobeniusNormOfSummand(attr_indexSet[i]) << endl; } return true; } DKTSTruncation& DKTSTruncation::addInputTensorInformation(const IString& date, Protocol& truncationLog) { const LongInt d = attr_a.d(); const LongInt R = attr_a.k(); const LongInt l = attr_a.n(); const LongInt k = attr_x.k(); const LongInt n = attr_Z.n(); const LongInt oR = originalRank(); DescriptionData dD; dD.addString(IString("\\chapter{Input Tensor Sum Information}")); dD.addString(IString("Date : ") + IString(date) + IString("\\\\")); // toDo kein _ im FileName dD.addString(IString("Reading Initial-Tensor from : ") + IString("$") + IString("tensor\\_input.ten") + IString("$")); dD.addString(IString("")); dD.addString(IString("Time for Data Precalculation [sec.]") + IString("$\\ =\\ ") + IString(preCalculationTime()) + IString("$")); dD.addString(IString("")); dD.addString( IString("$\\|A\\| = $") + IString(normA())); dD.addString(""); dD.addString (IString("d ") + IString("$ = ") + IString(d) + IString(",$") + IString("\\hspace{15pt}") + IString("R ") + IString("$ = ") + IString(R) + IString(",$") + IString("\\hspace{15pt}") + IString("oR ") + IString("$ = ") + IString(oR) + IString(",$") + IString("\\hspace{15pt}") + IString("n ") + IString("$ = ") + IString(n) + IString("$")); dD.addString(""); dD.addString( IString("Data\\ Storage\\ Memory\\ ") + IString("$\\ =\\ ") + IString((LongReal)( (2*d*l*n + d*k*n + d*k*l + d*l*l)*sizeof(LongReal))/(LongReal)1048576) + IString("$") + IString("\\ MByte")); dD.addString(""); dD.addString(IString("$i:\\|A_i\\|$") + IString("\\\\")); (*this) .writeNormsOfA(dD) ; dD.addString(IString("")); dD.addString(IString("$i:\\|A_i\\|$") + IString("\\\\")); attr_a.writeIndexSet(dD); truncationLog.add(dD); return (*this); } DKTSTruncation& DKTSTruncation::addBeginTrancation (const LongInt& r, Protocol& truncationLog) { DescriptionData dD; dD.addString(IString("\\chapter{Best Rank ") + IString(r) + IString(" Truncation}")); truncationLog.add(dD); return (*this); } DKTSTruncation& DKTSTruncation::addBeginTrancationR1(const LongInt& r, Protocol& truncationLog) { DescriptionData dD; dD.addString(IString("\\chapter{Compute Initial Guest for Best ") + IString(r) + IString(" Truncation}")); truncationLog.add(dD); return (*this); } DKTSTruncation& DKTSTruncation::addTrancationR1(const LongInt& run ,const LongInt& rank, Protocol& truncationLog) { DescriptionData dD; dD.addString (IString("\\section{$ ") + IString(run) + IString(" ^{st}$ Test ") + IString("Best ") + IString(rank) + IString(" Truncation}")); truncationLog.add(dD); return (*this); } DKTSTruncation& DKTSTruncation::addDataBlock(const DKTSDDataBlock& dataBlock, const DKTSDIterationInfo& infoBlock, const LongInt& k, Protocol& truncationLog) { addBeginTrancation(k, truncationLog); truncationLog.add(dataBlock); addEndTrancation(infoBlock, k, truncationLog); return (*this); } DKTSTruncation& DKTSTruncation::addEndTrancation(const DKTSDIterationInfo& infoBlock, const LongInt& r, Protocol& truncationLog) { DescriptionData dD; const LongInt nSte = infoBlock.numberOfNewtonSteps(); const LongReal dOld = infoBlock.startError(); const LongReal dNew = infoBlock.error(); const LongReal diff = dOld-dNew; const LongReal time = infoBlock.calculationTime(); dD.addString(IString("Working Memory\\ \\ =\\ ") + IString("$\\ \\ ") + IString((LongReal)(attr_decomposer.memory()*sizeof(LongReal))/(LongReal)1048576) + IString("$ MByte")); <|fim▁hole|> dD.addString("\\\\"); dD.addString(IString("Time\\ [sec]\\ =\\ ") + IString("$\\ \\ ") + IString(time) + IString("$")); dD.addString(""); dD.addString( IString("$") + IString("\\frac{\\|A-X_0\\|}{\\|A\\|} = ") + IString(dOld) + IString("$") + IString("\\hspace{15pt}") + IString("$") + IString("\\frac{\\|A-X_{") + IString(nSte) + IString("}\\|}{\\|A\\|} = ") + IString(dNew) + IString("$") + IString("\\hspace{15pt}") + IString("\\\\")); dD.addString( IString("diff ") + IString("$\\ =\\ ") + IString(diff) + IString("$") + IString("\\hspace{15pt}") + IString("diff[\\%] ") + IString("$\\ =\\ ") + IString(diff/dOld*100) + IString("$") + IString("\\%")); dD.addString(""); dD.addMathString("i:\\|X_i\\|"); dD.addString(""); attr_x.writeIndexSet(dD); //dD.addString(IString("Write Solution to\\ File\\ :\\ ") + IString("tensor\\_best\\_r=") + IString(r) + IString("\\_.ten")); truncationLog.add(dD); return (*this); } DKTSTruncation& DKTSTruncation::addInfoBlock(const DKTSDIterationInfoBlock& infoBlock, const LongReal& totalTime, const LongReal& eps, const LongReal& epsN, const LongReal& preC) { const LongInt d = attr_a.d(); const LongInt R = originalRank(); const LongInt l = attr_a.n(); const LongInt k = attr_x.k(); const LongInt n = attr_Z.n(); DescriptionData dD, dE; dD.addString(IString("\\chapter{Summary}")); dE.addString (IString("d ") + IString("$ = ") + IString(d) + IString(",$") + IString("\\hspace{15pt}") + IString("R ") + IString("$ = ") + IString(R) + IString(",$") + IString("\\hspace{15pt}") + IString("n ") + IString("$ = ") + IString(n) + IString("$")); dE.addString (""); dE.addString (IString("eps ") + IString("$ = ") + IString(eps) + IString("$") + IString("\\\\")); dE.addString (IString("minPrecision ") + IString("$ = ") + IString(preC) + IString("$") + IString("\\\\")); dE.addString (IString("epsNewton ") + IString("$ = ") + IString(epsN) + IString("$") + IString("\\\\")); dE.addString (IString("Total Time\\ [sec]\\ \\ ") + IString("$\\ =\\ ") + IString(totalTime) + IString("$")); attr_truncationLog.add(dD); attr_truncationLog.add(infoBlock); attr_truncationLog.add(dE); return (*this); } DKTSTruncation& DKTSTruncation::addInfoBlockR1(const DKTSDIterationInfoBlock& infoBlock) { DescriptionData dD; dD.addString(IString("\\appendix")); dD.addString(IString("\\chapter{Best Rank 1 Updates}")); attr_truncationLog.add(dD); attr_truncationLog.add(infoBlock); return (*this); }<|fim▁end|>
<|file_name|>kubefed.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package app import ( "fmt" "os" "k8s.io/kubernetes/federation/pkg/kubefed" _ "k8s.io/kubernetes/pkg/client/metrics/prometheus" // for client metric registration cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/util/logs" "k8s.io/kubernetes/pkg/version" _ "k8s.io/kubernetes/pkg/version/prometheus" // for version metric registration ) const hyperkubeImageName = "gcr.io/google_containers/hyperkube-amd64" func Run() error { logs.InitLogs() defer logs.FlushLogs() defaultImage := fmt.Sprintf("%s:%s", hyperkubeImageName, version.Get()) cmd := kubefed.NewKubeFedCommand(cmdutil.NewFactory(nil), os.Stdin, os.Stdout, os.Stderr, defaultImage) return cmd.Execute() }<|fim▁end|>
you may not use this file except in compliance with the License. You may obtain a copy of the License at
<|file_name|>filereader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::cell::DOMRefCell; use dom::bindings::codegen::Bindings::BlobBinding::BlobMethods; use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull; use dom::bindings::codegen::Bindings::FileReaderBinding::{self, FileReaderConstants, FileReaderMethods}; use dom::bindings::error::{Error, ErrorResult, Fallible}; use dom::bindings::global::{GlobalField, GlobalRef}; use dom::bindings::inheritance::Castable; use dom::bindings::js::{JS, MutNullableHeap, Root}; use dom::bindings::refcounted::Trusted; use dom::bindings::reflector::{Reflectable, reflect_dom_object}; use dom::blob::{Blob, DataSlice}; use dom::domexception::{DOMErrorName, DOMException}; use dom::event::{Event, EventBubbles, EventCancelable}; use dom::eventtarget::EventTarget; use dom::progressevent::ProgressEvent; use encoding::all::UTF_8; use encoding::label::encoding_from_whatwg_label; use encoding::types::{DecoderTrap, EncodingRef}; use hyper::mime::{Attr, Mime}; use rustc_serialize::base64::{CharacterSet, Config, Newline, ToBase64}; use script_thread::ScriptThreadEventCategory::FileRead; use script_thread::{CommonScriptMsg, Runnable, ScriptChan}; use std::cell::Cell; use string_cache::Atom; use util::str::DOMString; use util::thread::spawn_named; #[derive(PartialEq, Clone, Copy, JSTraceable, HeapSizeOf)] pub enum FileReaderFunction { ReadAsText, ReadAsDataUrl, } pub type TrustedFileReader = Trusted<FileReader>; #[derive(Clone, HeapSizeOf)] pub struct ReadMetaData { pub blobtype: String, pub label: Option<String>, pub function: FileReaderFunction } impl ReadMetaData { pub fn new(blobtype: String, label: Option<String>, function: FileReaderFunction) -> ReadMetaData { ReadMetaData { blobtype: blobtype, label: label, function: function, } } } #[derive(PartialEq, Clone, Copy, JSTraceable, HeapSizeOf)] pub struct GenerationId(u32); #[repr(u16)] #[derive(Copy, Clone, Debug, PartialEq, JSTraceable, HeapSizeOf)] pub enum FileReaderReadyState { Empty = FileReaderConstants::EMPTY, Loading = FileReaderConstants::LOADING, Done = FileReaderConstants::DONE, }<|fim▁hole|> global: GlobalField, ready_state: Cell<FileReaderReadyState>, error: MutNullableHeap<JS<DOMException>>, result: DOMRefCell<Option<DOMString>>, generation_id: Cell<GenerationId>, } impl FileReader { pub fn new_inherited(global: GlobalRef) -> FileReader { FileReader { eventtarget: EventTarget::new_inherited(),//? global: GlobalField::from_rooted(&global), ready_state: Cell::new(FileReaderReadyState::Empty), error: MutNullableHeap::new(None), result: DOMRefCell::new(None), generation_id: Cell::new(GenerationId(0)), } } pub fn new(global: GlobalRef) -> Root<FileReader> { reflect_dom_object(box FileReader::new_inherited(global), global, FileReaderBinding::Wrap) } pub fn Constructor(global: GlobalRef) -> Fallible<Root<FileReader>> { Ok(FileReader::new(global)) } //https://w3c.github.io/FileAPI/#dfn-error-steps pub fn process_read_error(filereader: TrustedFileReader, gen_id: GenerationId, error: DOMErrorName) { let fr = filereader.root(); macro_rules! return_on_abort( () => ( if gen_id != fr.generation_id.get() { return } ); ); return_on_abort!(); // Step 1 fr.change_ready_state(FileReaderReadyState::Done); *fr.result.borrow_mut() = None; let global = fr.global.root(); let exception = DOMException::new(global.r(), error); fr.error.set(Some(&exception)); fr.dispatch_progress_event(atom!("error"), 0, None); return_on_abort!(); // Step 3 fr.dispatch_progress_event(atom!("loadend"), 0, None); return_on_abort!(); // Step 4 fr.terminate_ongoing_reading(); } // https://w3c.github.io/FileAPI/#dfn-readAsText pub fn process_read_data(filereader: TrustedFileReader, gen_id: GenerationId) { let fr = filereader.root(); macro_rules! return_on_abort( () => ( if gen_id != fr.generation_id.get() { return } ); ); return_on_abort!(); //FIXME Step 7 send current progress fr.dispatch_progress_event(atom!("progress"), 0, None); } // https://w3c.github.io/FileAPI/#dfn-readAsText pub fn process_read(filereader: TrustedFileReader, gen_id: GenerationId) { let fr = filereader.root(); macro_rules! return_on_abort( () => ( if gen_id != fr.generation_id.get() { return } ); ); return_on_abort!(); // Step 6 fr.dispatch_progress_event(atom!("loadstart"), 0, None); } // https://w3c.github.io/FileAPI/#dfn-readAsText pub fn process_read_eof(filereader: TrustedFileReader, gen_id: GenerationId, data: ReadMetaData, blob_contents: DataSlice) { let fr = filereader.root(); macro_rules! return_on_abort( () => ( if gen_id != fr.generation_id.get() { return } ); ); return_on_abort!(); // Step 8.1 fr.change_ready_state(FileReaderReadyState::Done); // Step 8.2 let bytes = blob_contents.get_bytes(); let output = match data.function { FileReaderFunction::ReadAsDataUrl => FileReader::perform_readasdataurl(data, bytes), FileReaderFunction::ReadAsText => FileReader::perform_readastext(data, bytes), }; *fr.result.borrow_mut() = Some(output); // Step 8.3 fr.dispatch_progress_event(atom!("load"), 0, None); return_on_abort!(); // Step 8.4 if fr.ready_state.get() != FileReaderReadyState::Loading { fr.dispatch_progress_event(atom!("loadend"), 0, None); } return_on_abort!(); // Step 9 fr.terminate_ongoing_reading(); } // https://w3c.github.io/FileAPI/#dfn-readAsText fn perform_readastext(data: ReadMetaData, blob_bytes: &[u8]) -> DOMString { let blob_label = &data.label; let blob_type = &data.blobtype; //https://w3c.github.io/FileAPI/#encoding-determination // Steps 1 & 2 & 3 let mut encoding = blob_label.as_ref() .map(|string| &**string) .and_then(encoding_from_whatwg_label); // Step 4 & 5 encoding = encoding.or_else(|| { let resultmime = blob_type.parse::<Mime>().ok(); resultmime.and_then(|Mime(_, _, ref parameters)| { parameters.iter() .find(|&&(ref k, _)| &Attr::Charset == k) .and_then(|&(_, ref v)| encoding_from_whatwg_label(&v.to_string())) }) }); // Step 6 let enc = encoding.unwrap_or(UTF_8 as EncodingRef); let convert = blob_bytes; // Step 7 let output = enc.decode(convert, DecoderTrap::Replace).unwrap(); DOMString::from(output) } //https://w3c.github.io/FileAPI/#dfn-readAsDataURL fn perform_readasdataurl(data: ReadMetaData, bytes: &[u8]) -> DOMString { let config = Config { char_set: CharacterSet::UrlSafe, newline: Newline::LF, pad: true, line_length: None }; let base64 = bytes.to_base64(config); let output = if data.blobtype.is_empty() { format!("data:base64,{}", base64) } else { format!("data:{};base64,{}", data.blobtype, base64) }; DOMString::from(output) } } impl FileReaderMethods for FileReader { // https://w3c.github.io/FileAPI/#dfn-onloadstart event_handler!(loadstart, GetOnloadstart, SetOnloadstart); // https://w3c.github.io/FileAPI/#dfn-onprogress event_handler!(progress, GetOnprogress, SetOnprogress); // https://w3c.github.io/FileAPI/#dfn-onload event_handler!(load, GetOnload, SetOnload); // https://w3c.github.io/FileAPI/#dfn-onabort event_handler!(abort, GetOnabort, SetOnabort); // https://w3c.github.io/FileAPI/#dfn-onerror event_handler!(error, GetOnerror, SetOnerror); // https://w3c.github.io/FileAPI/#dfn-onloadend event_handler!(loadend, GetOnloadend, SetOnloadend); //TODO https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer // https://w3c.github.io/FileAPI/#dfn-readAsDataURL fn ReadAsDataURL(&self, blob: &Blob) -> ErrorResult { self.read(FileReaderFunction::ReadAsDataUrl, blob, None) } // https://w3c.github.io/FileAPI/#dfn-readAsText fn ReadAsText(&self, blob: &Blob, label: Option<DOMString>) -> ErrorResult { self.read(FileReaderFunction::ReadAsText, blob, label) } // https://w3c.github.io/FileAPI/#dfn-abort fn Abort(&self) { // Step 2 if self.ready_state.get() == FileReaderReadyState::Loading { self.change_ready_state(FileReaderReadyState::Done); } // Steps 1 & 3 *self.result.borrow_mut() = None; let global = self.global.root(); let exception = DOMException::new(global.r(), DOMErrorName::AbortError); self.error.set(Some(&exception)); self.terminate_ongoing_reading(); // Steps 5 & 6 self.dispatch_progress_event(atom!("abort"), 0, None); self.dispatch_progress_event(atom!("loadend"), 0, None); } // https://w3c.github.io/FileAPI/#dfn-error fn GetError(&self) -> Option<Root<DOMException>> { self.error.get() } // https://w3c.github.io/FileAPI/#dfn-result fn GetResult(&self) -> Option<DOMString> { self.result.borrow().clone() } // https://w3c.github.io/FileAPI/#dfn-readyState fn ReadyState(&self) -> u16 { self.ready_state.get() as u16 } } impl FileReader { fn dispatch_progress_event(&self, type_: Atom, loaded: u64, total: Option<u64>) { let global = self.global.root(); let progressevent = ProgressEvent::new(global.r(), type_, EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, total.is_some(), loaded, total.unwrap_or(0)); progressevent.upcast::<Event>().fire(self.upcast()); } fn terminate_ongoing_reading(&self) { let GenerationId(prev_id) = self.generation_id.get(); self.generation_id.set(GenerationId(prev_id + 1)); } fn read(&self, function: FileReaderFunction, blob: &Blob, label: Option<DOMString>) -> ErrorResult { let root = self.global.root(); let global = root.r(); // Step 1 if self.ready_state.get() == FileReaderReadyState::Loading { return Err(Error::InvalidState); } // Step 2 if blob.IsClosed() { let global = self.global.root(); let exception = DOMException::new(global.r(), DOMErrorName::InvalidStateError); self.error.set(Some(&exception)); self.dispatch_progress_event(atom!("error"), 0, None); return Ok(()); } // Step 3 self.change_ready_state(FileReaderReadyState::Loading); // Step 4 let blob_contents = blob.get_data().clone(); let type_ = blob.Type(); let load_data = ReadMetaData::new(String::from(type_), label.map(String::from), function); let fr = Trusted::new(self, global.file_reading_thread_source()); let gen_id = self.generation_id.get(); let script_chan = global.file_reading_thread_source(); spawn_named("file reader async operation".to_owned(), move || { perform_annotated_read_operation(gen_id, load_data, blob_contents, fr, script_chan) }); Ok(()) } fn change_ready_state(&self, state: FileReaderReadyState) { self.ready_state.set(state); } } #[derive(Clone)] pub enum FileReaderEvent { ProcessRead(TrustedFileReader, GenerationId), ProcessReadData(TrustedFileReader, GenerationId), ProcessReadError(TrustedFileReader, GenerationId, DOMErrorName), ProcessReadEOF(TrustedFileReader, GenerationId, ReadMetaData, DataSlice) } impl Runnable for FileReaderEvent { fn handler(self: Box<FileReaderEvent>) { let file_reader_event = *self; match file_reader_event { FileReaderEvent::ProcessRead(filereader, gen_id) => { FileReader::process_read(filereader, gen_id); }, FileReaderEvent::ProcessReadData(filereader, gen_id) => { FileReader::process_read_data(filereader, gen_id); }, FileReaderEvent::ProcessReadError(filereader, gen_id, error) => { FileReader::process_read_error(filereader, gen_id, error); }, FileReaderEvent::ProcessReadEOF(filereader, gen_id, data, blob_contents) => { FileReader::process_read_eof(filereader, gen_id, data, blob_contents); } } } } // https://w3c.github.io/FileAPI/#thread-read-operation fn perform_annotated_read_operation(gen_id: GenerationId, data: ReadMetaData, blob_contents: DataSlice, filereader: TrustedFileReader, script_chan: Box<ScriptChan + Send>) { let chan = &script_chan; // Step 4 let thread = box FileReaderEvent::ProcessRead(filereader.clone(), gen_id); chan.send(CommonScriptMsg::RunnableMsg(FileRead, thread)).unwrap(); let thread = box FileReaderEvent::ProcessReadData(filereader.clone(), gen_id); chan.send(CommonScriptMsg::RunnableMsg(FileRead, thread)).unwrap(); let thread = box FileReaderEvent::ProcessReadEOF(filereader, gen_id, data, blob_contents); chan.send(CommonScriptMsg::RunnableMsg(FileRead, thread)).unwrap(); }<|fim▁end|>
#[dom_struct] pub struct FileReader { eventtarget: EventTarget,
<|file_name|>lessonEntity.py<|end_file_name|><|fim▁begin|>__author__ = 'Paolo Bellagente' # Documentation for this module. # # More details. ################################## DATABASE ############################################## from sqlalchemy import * from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship, sessionmaker import datetime ## Database name db_name = "testDatabase" ## Database user db_uid = "root" ## Database user's password db_passwd = "" ## Database host db_host = "localhost" ## # set the database connection engine engine = create_engine('mysql+pymysql://'+db_uid+':'+db_passwd+'@'+db_host+'/'+db_name) ## Classe base per l'ereditarieta' delle tabelle # # Permette di istanziare una volta la classe base e riutilizzarla Base = declarative_base() class Lesson(Base): __tablename__ = 'lessons' id = Column(INTEGER, primary_key=True) semesterStartDate = Column(DATE) semesterEndDate = Column(DATE)<|fim▁hole|> subject = Column(VARCHAR(200)) rooms = Column(VARCHAR(30)) address = Column(VARCHAR(50)) teacher = Column(VARCHAR(50)) def __init__(self): self.teacher = '' # persist the entity into the database def persist(self): Session = sessionmaker(bind=engine) session = Session() session.add(self) session.commit() session.close() # todo: create new entity here ## Create the necesary tables into the databse Base.metadata.create_all(engine)<|fim▁end|>
# lesson's start hour hour = Column(TIME) # lesson's day of the week coded form 0 to 6 where 0 is monday and 6 is sunday. day = Column(INTEGER)
<|file_name|>errors.js<|end_file_name|><|fim▁begin|>/** * Error for services to through when they encounter a problem with the request. * Distinguishes between a bad service request and a general error */ function ServiceError(message) { this.name = "ServiceError"; this.message = (message || ""); } ServiceError.prototype = Object.create(Error.prototype, { constructor: {value: ServiceError} }); /** * Error for when an item is not found<|fim▁hole|> this.name = "NotFoundError"; this.message = (message || "Not found"); } NotFoundError.prototype = Object.create(ServiceError.prototype, { constructor: { value: NotFoundError} }); exports.ServiceError = ServiceError; exports.NotFoundError = NotFoundError;<|fim▁end|>
*/ function NotFoundError(message) {
<|file_name|>leetcode.125.valid-palindrome.py<|end_file_name|><|fim▁begin|>class Solution(object): def isPalindrome(self, s): """ :type s: str :rtype: bool """ if not s: return True start = 0<|fim▁hole|> end = len(s)-1 s = s.lower() while start < end: while start < end and not s[start].isalnum(): start += 1 while start < end and not s[end].isalnum(): end -= 1 if s[start] == s[end]: start += 1 end -= 1 else: return False return True<|fim▁end|>
<|file_name|>Entry.ts<|end_file_name|><|fim▁begin|>export interface Entry { title: string date: string slug: string url?: string tags: string[] type: EntryType } export enum EntryType { CombinedGitHubReleases, GitHubPullRequest, GithubRelease, GithubRepository, StackOverflowEntry, BlogPost,<|fim▁hole|>}<|fim▁end|>
BlogPostPreview, AppPreview, AppRelease,
<|file_name|>sre_constants.py<|end_file_name|><|fim▁begin|># # Secret Labs' Regular Expression Engine # # various symbols used by the regular expression engine. # run this script to update the _sre include files! # # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. # # See the sre.py file for information on usage and redistribution. # """Internal support module for sre""" # update when constants are added or removed MAGIC = 20140917 from _sre import MAXREPEAT, MAXGROUPS # SRE standard exception (access as sre.error) # should this really be here? class error(Exception): """Exception raised for invalid regular expressions. Attributes: msg: The unformatted error message pattern: The regular expression pattern pos: The index in the pattern where compilation failed (may be None) lineno: The line corresponding to pos (may be None) colno: The column corresponding to pos (may be None) """ def __init__(self, msg, pattern=None, pos=None): self.msg = msg self.pattern = pattern self.pos = pos if pattern is not None and pos is not None: msg = '%s at position %d' % (msg, pos) if isinstance(pattern, str): newline = '\n' else: newline = b'\n' self.lineno = pattern.count(newline, 0, pos) + 1 self.colno = pos - pattern.rfind(newline, 0, pos) if newline in pattern: msg = '%s (line %d, column %d)' % (msg, self.lineno, self.colno) else: self.lineno = self.colno = None super().__init__(msg) class _NamedIntConstant(int): def __new__(cls, value, name): self = super(_NamedIntConstant, cls).__new__(cls, value) self.name = name return self def __str__(self): return self.name __repr__ = __str__ MAXREPEAT = _NamedIntConstant(MAXREPEAT, 'MAXREPEAT') def _makecodes(names): names = names.strip().split() items = [_NamedIntConstant(i, name) for i, name in enumerate(names)] globals().update({item.name: item for item in items}) return items # operators # failure=0 success=1 (just because it looks better that way :-) OPCODES = _makecodes(""" FAILURE SUCCESS ANY ANY_ALL ASSERT ASSERT_NOT AT BRANCH CALL CATEGORY CHARSET BIGCHARSET GROUPREF GROUPREF_EXISTS GROUPREF_IGNORE IN IN_IGNORE INFO JUMP LITERAL LITERAL_IGNORE MARK MAX_UNTIL MIN_UNTIL NOT_LITERAL NOT_LITERAL_IGNORE NEGATE RANGE REPEAT REPEAT_ONE SUBPATTERN MIN_REPEAT_ONE RANGE_IGNORE MIN_REPEAT MAX_REPEAT """) del OPCODES[-2:] # remove MIN_REPEAT and MAX_REPEAT # positions ATCODES = _makecodes(""" AT_BEGINNING AT_BEGINNING_LINE AT_BEGINNING_STRING AT_BOUNDARY AT_NON_BOUNDARY AT_END AT_END_LINE AT_END_STRING AT_LOC_BOUNDARY AT_LOC_NON_BOUNDARY AT_UNI_BOUNDARY AT_UNI_NON_BOUNDARY """) # categories CHCODES = _makecodes(""" CATEGORY_DIGIT CATEGORY_NOT_DIGIT CATEGORY_SPACE CATEGORY_NOT_SPACE CATEGORY_WORD CATEGORY_NOT_WORD CATEGORY_LINEBREAK CATEGORY_NOT_LINEBREAK CATEGORY_LOC_WORD CATEGORY_LOC_NOT_WORD CATEGORY_UNI_DIGIT CATEGORY_UNI_NOT_DIGIT CATEGORY_UNI_SPACE CATEGORY_UNI_NOT_SPACE CATEGORY_UNI_WORD CATEGORY_UNI_NOT_WORD CATEGORY_UNI_LINEBREAK CATEGORY_UNI_NOT_LINEBREAK """) # replacement operations for "ignore case" mode OP_IGNORE = { GROUPREF: GROUPREF_IGNORE, IN: IN_IGNORE, LITERAL: LITERAL_IGNORE, NOT_LITERAL: NOT_LITERAL_IGNORE, RANGE: RANGE_IGNORE, } AT_MULTILINE = { AT_BEGINNING: AT_BEGINNING_LINE, AT_END: AT_END_LINE } AT_LOCALE = { AT_BOUNDARY: AT_LOC_BOUNDARY, AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY } AT_UNICODE = { AT_BOUNDARY: AT_UNI_BOUNDARY, AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY } CH_LOCALE = { CATEGORY_DIGIT: CATEGORY_DIGIT, CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT, CATEGORY_SPACE: CATEGORY_SPACE, CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE, CATEGORY_WORD: CATEGORY_LOC_WORD, CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD, CATEGORY_LINEBREAK: CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK } CH_UNICODE = { CATEGORY_DIGIT: CATEGORY_UNI_DIGIT, CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT, CATEGORY_SPACE: CATEGORY_UNI_SPACE, CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE, CATEGORY_WORD: CATEGORY_UNI_WORD, CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD, CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK, CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK } # flags SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking) SRE_FLAG_IGNORECASE = 2 # case insensitive SRE_FLAG_LOCALE = 4 # honour system locale SRE_FLAG_MULTILINE = 8 # treat target as multiline string SRE_FLAG_DOTALL = 16 # treat target as a single string SRE_FLAG_UNICODE = 32 # use unicode "locale" SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments SRE_FLAG_DEBUG = 128 # debugging SRE_FLAG_ASCII = 256 # use ascii "locale" # flags for INFO primitive SRE_INFO_PREFIX = 1 # has prefix SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix) <|fim▁hole|> items = sorted(d) for item in items: f.write("#define %s_%s %d\n" % (prefix, item, item)) with open("sre_constants.h", "w") as f: f.write("""\ /* * Secret Labs' Regular Expression Engine * * regular expression matching engine * * NOTE: This file is generated by sre_constants.py. If you need * to change anything in here, edit sre_constants.py and run it. * * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. * * See the _sre.c file for information on usage and redistribution. */ """) f.write("#define SRE_MAGIC %d\n" % MAGIC) dump(f, OPCODES, "SRE_OP") dump(f, ATCODES, "SRE") dump(f, CHCODES, "SRE") f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE) f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE) f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE) f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE) f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL) f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE) f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE) f.write("#define SRE_FLAG_DEBUG %d\n" % SRE_FLAG_DEBUG) f.write("#define SRE_FLAG_ASCII %d\n" % SRE_FLAG_ASCII) f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX) f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL) f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET) print("done")<|fim▁end|>
SRE_INFO_CHARSET = 4 # pattern starts with character from given set if __name__ == "__main__": def dump(f, d, prefix):
<|file_name|>do_nothing.py<|end_file_name|><|fim▁begin|>from controlscript import * print "This is a simple control script. It just does nothing and exits successfully." print "Start parameter is %s, additional parameters are %s" % (start, arguments) class DoNothing(ControlAction): """ Control script action for exiting with error 1 on stop """ def __init__(self): ControlAction.__init__(self, "Do nothing") def start(self): print "Do nothing on start" print def stop(self): print "Do nothing on stop"<|fim▁hole|> ControlScript([ DoNothing() ])<|fim▁end|>
print
<|file_name|>asset_assign_category.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2012-2014 Didotech (<http://www.didotech.com>) # All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import orm, fields class asset_assign_category(orm.TransientModel): _name = 'asset.assign.category' _description = 'Assign category to a new asset product' _columns = { 'category_id': fields.many2one('asset.category', 'Asset Category', required=False), } def assign_category(self, cr, uid, ids, context=None): category = self.browse(cr, uid, ids, context=context)[0].category_id # add row to assets_product table asset_product_id = self.pool['asset.product'].create(cr, uid, { 'create_uid': uid, 'has_date_option': False, 'asset_category_id': category.id, 'product_product_id': context['product_id'], }) ## create asset.asset self.pool['asset.asset'].create(cr, uid, { 'asset_product_id': asset_product_id, 'serial_number': context['serial_number'], 'company_id': context['company_id'],<|fim▁hole|> 'location': context['location'], 'has_date_option': False, }) new_context = { 'lang': context['lang'], 'tz': context['tz'], 'uid': context['uid'], 'section_id': context['section_id'], 'project_id': context['project_id'], 'department_id': context['department_id'], 'asset_created': True } self.pool.get('stock.move').action_done(cr, uid, context['move_ids'], new_context) return {'type': 'ir.actions.act_window_close'}<|fim▁end|>
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core'; import { Cordova, CordovaInstance, CordovaCheck, Plugin, InstanceProperty, InstanceCheck, checkAvailability, IonicNativePlugin } from '@ionic-native/core'; import { Observable } from 'rxjs/Observable'; import 'rxjs/add/observable/fromEvent'; declare var plugin: any; /** * @hidden * You can listen to these events where appropriate */ export const GoogleMapsEvent = { MAP_CLICK: 'click', MAP_LONG_CLICK: 'long_click', MY_LOCATION_CHANGE: 'my_location_change', MY_LOCATION_BUTTON_CLICK: 'my_location_button_click', INDOOR_BUILDING_FOCUSED: 'indoor_building_focused', INDOOR_LEVEL_ACTIVATED: 'indoor_level_activated', CAMERA_CHANGE: 'camera_change', CAMERA_IDLE: 'camera_idle', MAP_READY: 'map_ready', MAP_LOADED: 'map_loaded', MAP_WILL_MOVE: 'will_move', MAP_CLOSE: 'map_close', MARKER_CLICK: 'click', OVERLAY_CLICK: 'overlay_click', INFO_CLICK: 'info_click', MARKER_DRAG: 'drag', MARKER_DRAG_START: 'drag_start', MARKER_DRAG_END: 'drag_end' }; /** * @hidden */ export const GoogleMapsAnimation = { BOUNCE: 'BOUNCE', DROP: 'DROP' }; /** * @hidden */ export const GoogleMapsMapTypeId = { HYBRID: 'MAP_TYPE_HYBRID', NONE: 'MAP_TYPE_NONE', NORMAL: 'MAP_TYPE_NORMAL', ROADMAP: 'MAP_TYPE_ROADMAP', SATELLITE: 'MAP_TYPE_SATELLITE', TERAIN: 'MAP_TYPE_TERRAIN' }; /** * @hidden */ @Plugin({ pluginName: 'GoogleMaps', plugin: 'cordova-plugin-googlemaps' }) export class GoogleMap { _objectInstance: any; constructor(element: string | HTMLElement, options?: any) { if (checkAvailability('plugin.google.maps.Map', null, 'GoogleMaps') === true) { if (typeof element === 'string') { element = document.getElementById(<string>element); } this._objectInstance = plugin.google.maps.Map.getMap(element, options); } } /** * Adds an event listener. * * @returns {Observable<any>} */ @InstanceCheck() addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ @InstanceCheck() addListenerOnce(eventName: string): Promise<any> { return new Promise<any>(resolve => this._objectInstance.addListenerOnce(eventName, resolve)); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ @InstanceCheck({ observable: true }) on(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ @InstanceCheck() one(eventName: string): Promise<any> { return new Promise<any>(resolve => this._objectInstance.one(eventName, resolve)); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) setDebuggable(isDebuggable: boolean): void { } @CordovaInstance({ sync: true }) setClickable(isClickable: boolean): void { } /** * Get the position of the camera. * * @returns {Promise<CameraPosition>} */ @CordovaInstance() getCameraPosition(): Promise<CameraPosition> { return; } /** * Get the location of the user. * * @returns {Promise<MyLocation>} */ @CordovaInstance() getMyLocation(options?: MyLocationOptions): Promise<MyLocation> { return; } /** * Get the visible region. * * @returns {Promise<VisibleRegion>} */ @CordovaInstance() getVisibleRegion(): Promise<VisibleRegion> { return; } @CordovaInstance({ sync: true }) showDialog(): void { } @CordovaInstance({ sync: true }) closeDialog(): void { } @CordovaInstance() getLicenseInfo(): Promise<string> { return; } @CordovaInstance({ sync: true }) setCenter(latLng: LatLng): void { } @CordovaInstance({ sync: true }) setZoom(zoomLevel: number): void { } @CordovaInstance({ sync: true }) setMapTypeId(mapTypeId: string): void { } @CordovaInstance({ sync: true }) setTilt(tiltLevel: number): void { } /** * @returns {Promise<any>} */ @CordovaInstance() animateCamera(animateCameraOptions: AnimateCameraOptions): Promise<any> { return; } /** * @returns {Promise<any>} */ @CordovaInstance() moveCamera(cameraPosition: CameraPosition): Promise<any> { return; } @CordovaInstance({ sync: true })<|fim▁hole|> @CordovaInstance({ sync: true }) setIndoorEnabled(enabled: boolean): void { } @CordovaInstance({ sync: true }) setTrafficEnabled(enabled: boolean): void { } @CordovaInstance({ sync: true }) setCompassEnabled(enabled: boolean): void { } @CordovaInstance({ sync: true }) setAllGesturesEnabled(enabled: boolean): void { } /** * @returns {Promise<Marker | any>} */ @InstanceCheck() addMarker(options: MarkerOptions): Promise<Marker | any> { return new Promise<Marker>((resolve, reject) => { this._objectInstance.addMarker(options, (marker: any) => { if (marker) { resolve(new Marker(marker)); } else { reject(); } }); }); } /** * @returns {Promise<Circle | any>} */ @InstanceCheck() addCircle(options: CircleOptions): Promise<Circle | any> { return new Promise<Circle>((resolve, reject) => { this._objectInstance.addCircle(options, (circle: any) => { if (circle) { resolve(new Circle(circle)); } else { reject(); } }); }); } /** * @returns {Promise<Polygon | any>} */ @InstanceCheck() addPolygon(options: PolygonOptions): Promise<Polygon | any> { return new Promise<Polygon>((resolve, reject) => { this._objectInstance.addPolygon(options, (polygon: any) => { if (polygon) { resolve(new Polygon(polygon)); } else { reject(); } }); }); } /** * @returns {Promise<Polyline | any>} */ @InstanceCheck() addPolyline(options: PolylineOptions): Promise<Polyline | any> { return new Promise<Polyline>((resolve, reject) => { this._objectInstance.addPolyline(options, (polyline: any) => { if (polyline) { resolve(new Polyline(polyline)); } else { reject(); } }); }); } /** * @returns {Promise<TileOverlay | any>} */ @InstanceCheck() addTileOverlay(options: TileOverlayOptions): Promise<TileOverlay | any> { return new Promise<TileOverlay>((resolve, reject) => { this._objectInstance.addTileOverlay(options, (tileOverlay: any) => { if (tileOverlay) { resolve(new TileOverlay(tileOverlay)); } else { reject(); } }); }); } /** * @returns {Promise<GroundOverlay | any>} */ @InstanceCheck() addGroundOverlay(options: GroundOverlayOptions): Promise<GroundOverlay | any> { return new Promise<GroundOverlay>((resolve, reject) => { this._objectInstance.addGroundOverlay(options, (groundOverlay: any) => { if (groundOverlay) { resolve(new GroundOverlay(groundOverlay)); } else { reject(); } }); }); } /** * @returns {Promise<KmlOverlay | any>} */ @InstanceCheck() addKmlOverlay(options: KmlOverlayOptions): Promise<KmlOverlay | any> { return new Promise<KmlOverlay>((resolve, reject) => { this._objectInstance.addKmlOverlay(options, (kmlOverlay: any) => { if (kmlOverlay) { resolve(new KmlOverlay(kmlOverlay)); } else { reject(); } }); }); } @CordovaInstance({ sync: true }) setDiv(domNode: HTMLElement): void { } @CordovaInstance({ sync: true }) setVisible(visible: boolean): void { } @CordovaInstance({ sync: true }) setOptions(options: any): void { } @CordovaInstance({ sync: true }) setBackgroundColor(backgroundColor: string): void { } @CordovaInstance({ sync: true }) setPadding(top?: number, right?: number, bottom?: number, left?: number): void { } @CordovaInstance({ sync: true }) clear(): void { } @CordovaInstance({ sync: true }) refreshLayout(): void { } /** * @returns {Promise<any>} */ @CordovaInstance() fromLatLngToPoint(latLng: LatLng, point: any): Promise<any> { return; } /** * @returns {Promise<LatLng>} */ @CordovaInstance() fromPointToLatLng(point: any, latLng: LatLng): Promise<LatLng> { return; } /** * @returns {Promise<any>} */ @CordovaInstance() toDataURL(): Promise<any> { return; } @CordovaInstance({ sync: true }) remove(): void { } @CordovaInstance({ sync: true }) panBy(): void { } } /** * @name Google Maps * @description This plugin uses the native Google Maps SDK * @usage * ```typescript * import { * GoogleMaps, * GoogleMap, * GoogleMapsEvent, * LatLng, * CameraPosition, * MarkerOptions, * Marker * } from '@ionic-native/google-maps'; * * export class MapPage { * constructor(private googleMaps: GoogleMaps) {} * * // Load map only after view is initialized * ngAfterViewInit() { * this.loadMap(); * } * * loadMap() { * // make sure to create following structure in your view.html file * // and add a height (for example 100%) to it, else the map won't be visible * // <ion-content> * // <div #map id="map" style="height:100%;"></div> * // </ion-content> * * // create a new map by passing HTMLElement * let element: HTMLElement = document.getElementById('map'); * * let map: GoogleMap = this.googleMaps.create(element); * * // listen to MAP_READY event * // You must wait for this event to fire before adding something to the map or modifying it in anyway * map.one(GoogleMapsEvent.MAP_READY).then(() => console.log('Map is ready!')); * * // create LatLng object * let ionic: LatLng = new LatLng(43.0741904,-89.3809802); * * // create CameraPosition * let position: CameraPosition = { * target: ionic, * zoom: 18, * tilt: 30 * }; * * // move the map's camera to position * map.moveCamera(position); * * // create new marker * let markerOptions: MarkerOptions = { * position: ionic, * title: 'Ionic' * }; * * const marker: Marker = map.addMarker(markerOptions) * .then((marker: Marker) => { * marker.showInfoWindow(); * }); * } * * } * ``` * @classes * GoogleMap * Marker * LatLng * Geocoder * @interfaces * AnimateCameraOptions * MarkerOptions * MyLocation * MyLocationOptions * VisibleRegion * */ @Plugin({ pluginName: 'GoogleMaps', pluginRef: 'plugin.google.maps.Map', plugin: 'cordova-plugin-googlemaps', repo: 'https://github.com/mapsplugin/cordova-plugin-googlemaps', install: 'ionic plugin add cordova-plugin-googlemaps --variable API_KEY_FOR_ANDROID="YOUR_ANDROID_API_KEY_IS_HERE" --variable API_KEY_FOR_IOS="YOUR_IOS_API_KEY_IS_HERE"', installVariables: ['API_KEY_FOR_ANDROID', 'API_KEY_FOR_IOS'], platforms: ['Android', 'iOS'] }) @Injectable() export class GoogleMaps extends IonicNativePlugin { /** * Checks if a map object has been created and is available. * * @returns {Promise<boolean>} */ @Cordova() isAvailable(): Promise<boolean> { return; } /** * Creates a new GoogleMap instance * @param element {string | HTMLElement} Element ID or reference to attach the map to * @param options {any} Options * @returns {GoogleMap} */ create(element: string | HTMLElement, options?: any): GoogleMap { return new GoogleMap(element, options); } } /** * @hidden */ export interface AnimateCameraOptions { target?: LatLng | Array<Marker> | LatLngBounds; tilt?: number; zoom?: number; bearing?: number; duration?: number; } /** * @hidden */ export interface CameraPosition { target?: LatLng | LatLngBounds | LatLng[]; zoom?: number; tilt?: number; bearing?: number; } /** * @hidden */ export interface MyLocation { latLng?: LatLng; speed?: number; time?: string; bearing?: number; } /** * @hidden */ export interface MyLocationOptions { enableHighAccuracy?: boolean; } /** * @hidden */ export interface VisibleRegion { northeast?: any; southwest?: any; } /** * @hidden */ export interface MarkerOptions { /** * The icon image url or properties. Also you can specify HTML Color values. Alternatively you can specify the image as Base64 */ icon?: any; /** * The content of the infoWindow. */ title?: string; /** * The snippet of the infoWindow. */ snippet?: string; /** * The position of the marker. */ position?: LatLng; /** * Specify the anchor of the InfoWindow */ infoWindowAnchor?: number[]; /** * Set true if you want to enable to drag the marker. (Default: false) Important! Drag starts after long pressed on the marker. */ draggable?: boolean; /** * Set true if you want to use a flat marker. (Default: false) */ flat?: boolean; /** * Set rotation angle. (Default: 0) */ rotation?: number; /** * Set false if you want to hide. (Default: true) */ visible?: boolean; /** * Specify the options for title. */ styles?: any; /** * Which animation to play when marker is added to a map. */ animation?: string; /** * iOS only, Plugin Version >= 1.3.3 Higher zIndex value overlays will be drawn on top of lower zIndex value tile layers and overlays. (You're able to run this on Android, but it will have no effect) */ zIndex?: number; /** * Set to true to disable auto panning when the marker is clicked. */ disableAutoPan?: boolean; /** * Function to be invoked when the user clicks on the marker */ markerClick?: Function; /** * Function to be invoked when the user clicks on the info box */ infoClick?: Function; } /** * @hidden */ export interface MarkerIcon { url?: string; size?: { width?: number; height?: number; }; } /** * @hidden */ export class Marker { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } /** * Return true if the marker is visible */ @CordovaInstance({ sync: true }) isVisible(): boolean { return; } /** * Set false if you want to hide the marker. * @param visible */ @CordovaInstance() setVisible(visible: boolean): void { } /** * Return the marker hash code. * @return {string} Marker hash code */ @CordovaInstance({ sync: true }) getHashCode(): string { return; } /** * Remove the marker completely. */ @CordovaInstance({ sync: true }) remove(): void { } /** * Change the marker opacity. * @param alpha {number} Opacity */ @CordovaInstance({ sync: true }) setOpacity(alpha: number): void { } /** * Return the marker opacity. * @return {number} Opacity */ @CordovaInstance({ sync: true }) getOpacity(): number { return; } /** * iOS only, Plugin Version >= 1.3.3 Higher zIndex value overlays will be drawn on top of lower zIndex value tile layers and overlays. (You're able to run this on Android, but it will have no effect) * @return {number} */ @CordovaInstance({ sync: true }) setZIndex(): number { return; } /** * Change the info window anchor. This defaults to 50% from the left of the image and at the bottom of the image. * @param x {number} * @param y {number} */ @CordovaInstance({ sync: true }) setIconAnchor(x: number, y: number): void { } /** * Change the info window anchor. This defaults to 50% from the left of the image and at the top of the image. * @param x {number} * @param y {number} */ @CordovaInstance({ sync: true }) setInfoWindowAnchor(x: number, y: number): void { } /** * Set true if you allows all users to drag the marker. * @param draggable {boolean} */ @CordovaInstance({ sync: true }) setDraggable(draggable: boolean): void { } /** * Return true if the marker drag is enabled. * @return {boolean} */ @CordovaInstance({ sync: true }) isDraggable(): boolean { return; } /** * Set true if you want to be flat marker. * @param flat {boolean} */ @CordovaInstance({ sync: true }) setFlat(flat: boolean): void { return; } /** * Change icon url and/or size * @param icon */ @CordovaInstance({ sync: true }) setIcon(icon: MarkerIcon): void { return; } /** * Change title of the infoWindow. * @param title {string} */ @CordovaInstance({ sync: true }) setTitle(title: string): void { } /** * Return the title strings. * @return {string} */ @CordovaInstance({ sync: true }) getTitle(): string { return; } /** * Change snippet of the infoWindow. * @param snippet {string} */ @CordovaInstance({ sync: true }) setSnippet(snippet: string): void { } /** * Return the snippet strings. * @return {string} */ @CordovaInstance({ sync: true }) getSnippet(): string { return; } /** * Set the marker rotation angle. * @param rotation {number} */ @CordovaInstance({ sync: true }) setRotation(rotation: number): void { } /** * Return the marker rotation angle. * @return {number} */ @CordovaInstance({ sync: true }) getRotation(): number { return; } /** * Show the infoWindow of the marker. * @return {number} */ @CordovaInstance({ sync: true }) showInfoWindow(): number { return; } /** * Hide the infoWindow of the marker. * @return {number} */ @CordovaInstance({ sync: true }) hideInfoWindow(): number { return; } /** * Set the marker position. * @param latLng {GoogleMapLatLng} */ @CordovaInstance({ sync: true }) setPosition(latLng: LatLng): void { return; } /** * Return the marker position. * @return {Promise<GoogleMapLatLng>} */ @CordovaInstance() getPosition(): Promise<LatLng> { return; } /** * Return the map instance. * @return {GoogleMap} */ @CordovaInstance({ sync: true }) getMap(): GoogleMap { return; } /** * Specify the animation either `DROP` or `BOUNCE` * @param animation {string} */ @CordovaInstance({ sync: true }) setAnimation(animation: string): void { } } /** * @hidden */ export interface CircleOptions { center?: LatLng; radius?: number; strokeColor?: string; strokeWidth?: number; fillColor?: string; visible?: boolean; zIndex?: number; } /** * @hidden */ export class Circle { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) getCenter(): LatLng { return; } @CordovaInstance({ sync: true }) getRadius(): number { return; } @CordovaInstance({ sync: true }) getStrokeColor(): string { return; } @CordovaInstance({ sync: true }) getVisible(): boolean { return; } @CordovaInstance({ sync: true }) getZIndex(): number { return; } @CordovaInstance({ sync: true }) remove(): void { } @CordovaInstance({ sync: true }) setCenter(latLng: LatLng): void { } @CordovaInstance({ sync: true }) setFillColor(fillColor: string): void { } @CordovaInstance({ sync: true }) setStrokeColor(strokeColor: string): void { } @CordovaInstance({ sync: true }) setStrokeWidth(strokeWidth: number): void { } @CordovaInstance({ sync: true }) setVisible(visible: boolean): void { } @CordovaInstance({ sync: true }) setZIndex(zIndex: number): void { } @CordovaInstance({ sync: true }) setRadius(radius: number): void { } @CordovaInstance({ sync: true }) getMap(): GoogleMap { return; } } /** * @hidden */ export interface PolylineOptions { points?: Array<LatLng>; visible?: boolean; geodesic?: boolean; color?: string; width?: number; zIndex?: number; } /** * @hidden */ export class Polyline { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) getPoints(): Array<LatLng> { return; } @CordovaInstance({ sync: true }) getCOlor(): string { return; } @CordovaInstance({ sync: true }) getWidth(): number { return; } @CordovaInstance({ sync: true }) getGeodesic(): boolean { return; } @CordovaInstance({ sync: true }) getZIndex(): number { return; } @CordovaInstance({ sync: true }) remove(): void { } @CordovaInstance({ sync: true }) setPoints(points: Array<LatLng>): void { } @CordovaInstance({ sync: true }) setColor(color: string): void { } @CordovaInstance({ sync: true }) setWidth(width: number): void { } @CordovaInstance({ sync: true }) setVisible(visible: boolean): void { } @CordovaInstance({ sync: true }) setZIndex(zIndex: number): void { } @CordovaInstance({ sync: true }) setGeoDesic(geoDesic: boolean): void { } @CordovaInstance({ sync: true }) getMap(): GoogleMap { return; } } /** * @hidden */ export interface PolygonOptions { points?: Array<LatLng>; geodesic?: boolean; strokeColor?: string; strokeWidth?: number; fillColor?: string; visible?: boolean; zIndex?: number; addHole?: Array<LatLng>; } /** * @hidden */ export class Polygon { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) getPoints(): Array<LatLng> { return; } @CordovaInstance({ sync: true }) getStrokeColor(): string { return; } @CordovaInstance({ sync: true }) getFillColor(): string { return; } @CordovaInstance({ sync: true }) getStrokeWidth(): number { return; } @CordovaInstance({ sync: true }) getGeodesic(): boolean { return; } @CordovaInstance({ sync: true }) getVisible(): boolean { return; } @CordovaInstance({ sync: true }) getZIndex(): boolean { return; } @CordovaInstance({ sync: true }) remove(): void { } @CordovaInstance({ sync: true }) setPoints(points: Array<LatLng>): void { } @CordovaInstance({ sync: true }) setStrokeColor(strokeColor: string): void { } @CordovaInstance({ sync: true }) setFillColor(fillColor: string): void { } @CordovaInstance({ sync: true }) setStrokeWidth(strokeWidth: number): void { } @CordovaInstance({ sync: true }) setVisible(visible: boolean): void { } @CordovaInstance({ sync: true }) setZIndex(zIndex: number): void { } @CordovaInstance({ sync: true }) setGeodesic(geodesic: boolean): void { } } /** * @hidden */ export interface TileOverlayOptions { tileUrlFormat?: string; visible?: boolean; zIndex?: number; tileSize?: number; opacity?: number; } /** * @hidden */ export class TileOverlay { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) getVisible(): boolean { return; } @CordovaInstance({ sync: true }) setVisible(visible: boolean): void { } @CordovaInstance({ sync: true }) getFadeIn(): boolean { return; } @CordovaInstance({ sync: true }) setFadeIn(fadeIn: boolean): void { } @CordovaInstance({ sync: true }) getZIndex(): number { return; } @CordovaInstance({ sync: true }) setZIndex(zIndex: number): void { } @CordovaInstance({ sync: true }) getOpacity(): number { return; } @CordovaInstance({ sync: true }) setOpacity(opacity: number): void { } @CordovaInstance({ sync: true }) clearTileCache(): void { } @CordovaInstance({ sync: true }) remove(): void { } } /** * @hidden */ export interface GroundOverlayOptions { url?: string; bounds?: Array<LatLng>; visible?: boolean; opacity?: number; bearing?: number; zIndex?: number; } /** * @hidden */ export class GroundOverlay { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) setBearing(bearing: number): void { } @CordovaInstance({ sync: true }) getBearing(): number { return; } @CordovaInstance({ sync: true }) setOpacity(opacity: number): void { } @CordovaInstance({ sync: true }) getOpacity(): number { return; } @CordovaInstance({ sync: true }) setVisible(visible: boolean): void { } @CordovaInstance({ sync: true }) getVisible(): boolean { return; } @CordovaInstance({ sync: true }) setImage(image: string): void { }; @CordovaInstance({ sync: true }) remove(): void { } } /** * @hidden */ export interface KmlOverlayOptions { url?: string; preserveViewport?: boolean; animation?: boolean; } /** * @hidden */ export class KmlOverlay { constructor(private _objectInstance: any) { } /** * Adds an event listener. * * @returns {Observable<any>} */ addEventListener(eventName: string): Observable<any> { return Observable.fromEvent(this._objectInstance, eventName); } /** * Adds an event listener that works once. * * @returns {Promise<any>} */ addListenerOnce(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.addListenerOnce(eventName, resolve) ); } /** * Gets a value * @param key */ @CordovaInstance({ sync: true }) get(key: string): any { return; } /** * Sets a value * @param key * @param value */ @CordovaInstance({ sync: true }) set(key: string, value: any): void { } /** * Listen to a map event. * * @returns {Observable<any>} */ on(eventName: string): Observable<any> { if (!this._objectInstance) { return new Observable((observer) => { observer.error({ error: 'plugin_not_installed' }); }); } return new Observable( (observer) => { this._objectInstance.on(eventName, observer.next.bind(observer)); return () => this._objectInstance.off(event); } ); } /** * Listen to a map event only once. * * @returns {Promise<any>} */ one(eventName: string): Promise<any> { if (!this._objectInstance) { return Promise.reject({ error: 'plugin_not_installed' }); } return new Promise<any>( resolve => this._objectInstance.one(eventName, resolve) ); } /** * Clears all stored values */ @CordovaInstance({ sync: true }) empty(): void { } @CordovaInstance({ sync: true }) remove(): void { } @CordovaInstance({ sync: true }) getOverlays(): Array<Polyline | Polygon | Marker> { return; } } /** * @hidden */ export class LatLngBounds { private _objectInstance: any; @InstanceProperty northeast: LatLng; @InstanceProperty southwest: LatLng; @InstanceProperty type: string; constructor(southwestOrArrayOfLatLng: LatLng | LatLng[], northeast?: LatLng) { let args = !!northeast ? [southwestOrArrayOfLatLng, northeast] : southwestOrArrayOfLatLng; this._objectInstance = new plugin.google.maps.LatLngBounds(args); } @CordovaInstance({ sync: true }) toString(): string { return; } @CordovaInstance({ sync: true }) toUrlValue(precision?: number): string { return; } @CordovaInstance({ sync: true }) extend(LatLng: LatLng): void { } @CordovaInstance({ sync: true }) contains(LatLng: LatLng): boolean { return; } @CordovaInstance({ sync: true }) getCenter(): LatLng { return; } } /** * @hidden */ export class LatLng { lat: number; lng: number; constructor(lat: number, lng: number) { this.lat = lat; this.lng = lng; } equals(other: LatLng): boolean { return this.lat === other.lat && this.lng === other.lng; } toString(): string { return this.lat + ',' + this.lng; } toUrlValue(precision?: number): string { precision = precision || 6; return this.lat.toFixed(precision) + ',' + this.lng.toFixed(precision); } } /** * @hidden */ export interface GeocoderRequest { address?: string; bounds?: LatLng[]; position?: { lat: number; lng: number }; } /** * @hidden */ export interface GeocoderResult { adminArea?: string; country?: string; countryCode?: string; extra?: { featureName?: string; lines?: Array<string>; permises?: string; phone?: string; url?: string }; locale?: string; locality?: string; position?: { lat: number; lng: number }; postalCode?: string; subAdminArea?: string; subLocality?: string; subThoroughfare?: string; thoroughfare?: string; } /** * @hidden */ @Plugin({ pluginName: 'Geocoder', pluginRef: 'plugin.google.maps.Geocoder', plugin: 'cordova-plugin-googlemaps', repo: '' }) export class Geocoder { /** * Converts position to address and vice versa * @param {GeocoderRequest} request Request object with either an address or a position * @returns {Promise<GeocoderResult[]>} */ @CordovaCheck() geocode(request: GeocoderRequest): Promise<GeocoderResult[] | any> { return new Promise<GeocoderResult[]>(resolve => { plugin.google.maps.Geocoder.geocode(request, resolve); }); } }<|fim▁end|>
setMyLocationEnabled(enabled: boolean): void { }
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>from unittest import TestCase from paramiko import SSHException from pyinfra.api import Config, State from pyinfra.api.connect import connect_all from pyinfra.api.exceptions import NoGroupError, NoHostError, PyinfraError from ..paramiko_util import PatchSSHTestCase from ..util import make_inventory <|fim▁hole|> def test_inventory_creation(self): inventory = make_inventory() # Check length assert len(inventory.hosts) == 2 # Get a host host = inventory.get_host('somehost') assert host.data.ssh_user == 'vagrant' # Check our group data assert inventory.get_group_data('test_group') == { 'group_data': 'hello world', } def test_tuple_host_group_inventory_creation(self): inventory = make_inventory( hosts=[ ('somehost', {'some_data': 'hello'}), ], tuple_group=([ ('somehost', {'another_data': 'world'}), ], { 'tuple_group_data': 'word', }), ) # Check host data host = inventory.get_host('somehost') assert host.data.some_data == 'hello' assert host.data.another_data == 'world' # Check group data assert host.data.tuple_group_data == 'word' def test_host_and_group_errors(self): inventory = make_inventory() with self.assertRaises(NoHostError): inventory.get_host('i-dont-exist') with self.assertRaises(NoGroupError): inventory.get_group('i-dont-exist') class TestStateApi(PatchSSHTestCase): def test_fail_percent(self): inventory = make_inventory(( 'somehost', ('thinghost', {'ssh_hostname': SSHException}), 'anotherhost', )) state = State(inventory, Config(FAIL_PERCENT=1)) # Ensure we would fail at this point with self.assertRaises(PyinfraError) as context: connect_all(state) assert context.exception.args[0] == 'Over 1% of hosts failed (33%)' # Ensure the other two did connect assert len(state.active_hosts) == 2<|fim▁end|>
class TestInventoryApi(TestCase):
<|file_name|>struct-no-fields-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern: Unit-like struct construction is written with no trailing `{ }` struct Foo; fn f2() {<|fim▁hole|> let _end_stmt = Foo { }; } fn main() {}<|fim▁end|>
<|file_name|>UTF32_Encoding_Converter.cpp<|end_file_name|><|fim▁begin|>// $Id: UTF32_Encoding_Converter.cpp 80826 2008-03-04 14:51:23Z wotte $ // ====================================================================== // // The actual conversion methods are covered by the copyright information // below. It is not the actual code provided by Unicode, Inc. but is an // ACE-ified and only slightly modified version. // // Chad Elliott 4/28/2005 // // Copyright 2001-2004 Unicode, Inc. // // Limitations on Rights to Redistribute This Code // // Unicode, Inc. hereby grants the right to freely use the information // supplied in this file in the creation of products supporting the // Unicode Standard, and to make copies of this file in any form // for internal or external distribution as long as this notice // remains attached. // // ====================================================================== #include "ace/UTF32_Encoding_Converter.h" #if defined (ACE_USES_WCHAR) #include "ace/OS_NS_stdio.h" #include "ace/OS_Memory.h" #include "ace/Min_Max.h" ACE_BEGIN_VERSIONED_NAMESPACE_DECL static const ACE_UINT32 UNI_MAX_LEGAL_UTF32 = 0x0010FFFF; ACE_UTF32_Encoding_Converter::ACE_UTF32_Encoding_Converter (bool swap) : ACE_UTF16_Encoding_Converter (swap) { } ACE_UTF32_Encoding_Converter::~ACE_UTF32_Encoding_Converter (void) { } ACE_UTF32_Encoding_Converter::Result ACE_UTF32_Encoding_Converter::to_utf8 (const void* source, size_t source_size, ACE_Byte* target, size_t target_size, bool strict) { static const ACE_UINT32 byteMask = 0xBF; static const ACE_UINT32 byteMark = 0x80; static const ACE_UINT32 UNI_SUR_HIGH_START = get_UNI_SUR_HIGH_START (); static const ACE_UINT32 UNI_SUR_LOW_END = get_UNI_SUR_LOW_END (); static const ACE_Byte* firstByteMark = get_first_byte_mark (); Result result = CONVERSION_OK; ACE_Byte* targetEnd = target + target_size; const ACE_UINT32* sourceStart = static_cast<const ACE_UINT32*> (source); const ACE_UINT32* sourceEnd = sourceStart + (source_size / sizeof (ACE_UINT32)); while (sourceStart < sourceEnd) { ACE_UINT32 nw = *sourceStart++; ACE_UINT32 ch = (this->swap_ ? ACE_SWAP_LONG (nw) : nw); unsigned short bytesToWrite = 0; if (strict) { // UTF-16 surrogate values are illegal in UTF-32 if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { result = SOURCE_ILLEGAL; break; } } // Figure out how many bytes the result will require. Turn any // illegally large ACE_UINT32 things (> Plane 17) into replacement // chars. if (ch < 0x80) { bytesToWrite = 1; } else if (ch < 0x800) { bytesToWrite = 2; } else if (ch < 0x10000) { bytesToWrite = 3; } else if (ch <= UNI_MAX_LEGAL_UTF32) { bytesToWrite = 4; } else { result = SOURCE_ILLEGAL; break; } target += bytesToWrite; if (target > targetEnd) { result = TARGET_EXHAUSTED; break; } // NOTE: everything falls through. switch (bytesToWrite) { case 4: *--target = (ACE_Byte)((ch | byteMark) & byteMask); ch >>= 6; case 3: *--target = (ACE_Byte)((ch | byteMark) & byteMask); ch >>= 6; case 2: *--target = (ACE_Byte)((ch | byteMark) & byteMask); ch >>= 6; case 1: <|fim▁hole|> *--target = (ACE_Byte) (ch | firstByteMark[bytesToWrite]); } target += bytesToWrite; } return result; } ACE_UTF32_Encoding_Converter::Result ACE_UTF32_Encoding_Converter::from_utf8 (const ACE_Byte* source, size_t source_size, void* target, size_t target_size, bool strict) { static const ACE_UINT32 UNI_SUR_HIGH_START = get_UNI_SUR_HIGH_START (); static const ACE_UINT32 UNI_SUR_LOW_END = get_UNI_SUR_LOW_END (); static const ACE_UINT32 UNI_REPLACEMENT_CHAR = get_UNI_REPLACEMENT_CHAR (); static const ACE_Byte* trailingBytesForUTF8 = get_trailing_bytes_for_utf8 (); static const ACE_UINT32* offsetsFromUTF8 = get_offsets_from_utf8 (); Result result = CONVERSION_OK; const ACE_Byte* sourceEnd = source + source_size; ACE_UINT32* targetStart = static_cast<ACE_UINT32*> (target); ACE_UINT32* targetEnd = targetStart + target_size; while (source < sourceEnd) { ACE_UINT32 ch = 0; unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; if (source + extraBytesToRead >= sourceEnd) { result = SOURCE_EXHAUSTED; break; } // Do this check whether lenient or strict if (!this->is_legal_utf8 (source, extraBytesToRead + 1)) { result = SOURCE_ILLEGAL; break; } // The cases all fall through. See "Note A" below. switch (extraBytesToRead) { case 5: ch += *source++; ch <<= 6; case 4: ch += *source++; ch <<= 6; case 3: ch += *source++; ch <<= 6; case 2: ch += *source++; ch <<= 6; case 1: ch += *source++; ch <<= 6; case 0: ch += *source++; } ch -= offsetsFromUTF8[extraBytesToRead]; if (targetStart >= targetEnd) { result = TARGET_EXHAUSTED; break; } if (ch <= UNI_MAX_LEGAL_UTF32) { // UTF-16 surrogate values are illegal in UTF-32, and anything // over Plane 17 (> 0x10FFFF) is illegal. if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { if (strict) { result = SOURCE_ILLEGAL; break; } else { *targetStart++ = UNI_REPLACEMENT_CHAR; } } else { *targetStart++ = ch; } } else { result = SOURCE_ILLEGAL; break; } } return result; } ACE_UTF32_Encoding_Converter* ACE_UTF32_Encoding_Converter::encoded (const ACE_Byte* source, size_t source_size) { static const size_t begin = 16; static const size_t converted = begin * 4; ACE_Byte target[converted]; ACE_UTF32_Encoding_Converter* converter = 0; ACE_NEW_RETURN (converter, ACE_UTF32_Encoding_Converter (false), 0); if (converter->to_utf8 (source, ACE_MIN (begin, source_size), target, converted) == CONVERSION_OK) { return converter; } else { delete converter; } return 0; } ACE_END_VERSIONED_NAMESPACE_DECL #endif /* ACE_USES_WCHAR */<|fim▁end|>
<|file_name|>fragment.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `Fragment` type, which represents the leaves of the layout tree. #![deny(unsafe_code)] use StyleArc; use app_units::Au; use canvas_traits::CanvasMsg; use context::{LayoutContext, with_thread_local_font_context}; use euclid::{Transform3D, Point2D, Vector2D, Radians, Rect, Size2D}; use floats::ClearType; use flow::{self, ImmutableFlowUtils}; use flow_ref::FlowRef; use gfx; use gfx::display_list::{BLUR_INFLATION_FACTOR, OpaqueNode}; use gfx::text::glyph::ByteIndex; use gfx::text::text_run::{TextRun, TextRunSlice}; use gfx_traits::StackingContextId; use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFragmentContext, InlineFragmentNodeInfo}; use inline::{InlineMetrics, LAST_FRAGMENT_OF_ELEMENT, LineMetrics}; use ipc_channel::ipc::IpcSender; #[cfg(debug_assertions)] use layout_debug; use model::{self, IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto, SizeConstraint}; use model::{style_length, ToGfxMatrix}; use msg::constellation_msg::{BrowsingContextId, PipelineId}; use net_traits::image::base::{Image, ImageMetadata}; use net_traits::image_cache::{ImageOrMetadataAvailable, UsePlaceholder}; use range::*; use script_layout_interface::HTMLCanvasData; use script_layout_interface::SVGSVGData; use script_layout_interface::wrapper_traits::{PseudoElementType, ThreadSafeLayoutElement, ThreadSafeLayoutNode}; use serde::ser::{Serialize, SerializeStruct, Serializer}; use servo_url::ServoUrl; use std::{f32, fmt}; use std::borrow::ToOwned; use std::cmp::{Ordering, max, min}; use std::collections::LinkedList; use std::sync::{Arc, Mutex}; use style::computed_values::{border_collapse, box_sizing, clear, color, display, mix_blend_mode}; use style::computed_values::{overflow_wrap, overflow_x, position, text_decoration_line, transform}; use style::computed_values::{transform_style, vertical_align, white_space, word_break}; use style::computed_values::content::ContentItem; use style::logical_geometry::{Direction, LogicalMargin, LogicalRect, LogicalSize, WritingMode}; use style::properties::ServoComputedValues; use style::selector_parser::RestyleDamage; use style::servo::restyle_damage::RECONSTRUCT_FLOW; use style::str::char_is_whitespace; use style::values::{self, Either, Auto}; use style::values::computed::{LengthOrPercentage, LengthOrPercentageOrAuto}; use text; use text::TextRunScanner; use wrapper::ThreadSafeLayoutNodeHelpers; // From gfxFontConstants.h in Firefox. static FONT_SUBSCRIPT_OFFSET_RATIO: f32 = 0.20; static FONT_SUPERSCRIPT_OFFSET_RATIO: f32 = 0.34; // https://drafts.csswg.org/css-images/#default-object-size static DEFAULT_REPLACED_WIDTH: i32 = 300; static DEFAULT_REPLACED_HEIGHT: i32 = 150; /// Fragments (`struct Fragment`) are the leaves of the layout tree. They cannot position /// themselves. In general, fragments do not have a simple correspondence with CSS fragments in the /// specification: /// /// * Several fragments may correspond to the same CSS box or DOM node. For example, a CSS text box /// broken across two lines is represented by two fragments. /// /// * Some CSS fragments are not created at all, such as some anonymous block fragments induced by /// inline fragments with block-level sibling fragments. In that case, Servo uses an `InlineFlow` /// with `BlockFlow` siblings; the `InlineFlow` is block-level, but not a block container. It is /// positioned as if it were a block fragment, but its children are positioned according to /// inline flow. /// /// A `SpecificFragmentInfo::Generic` is an empty fragment that contributes only borders, margins, /// padding, and backgrounds. It is analogous to a CSS nonreplaced content box. /// /// A fragment's type influences how its styles are interpreted during layout. For example, /// replaced content such as images are resized differently from tables, text, or other content. /// Different types of fragments may also contain custom data; for example, text fragments contain /// text. /// /// Do not add fields to this structure unless they're really really mega necessary! Fragments get /// moved around a lot and thus their size impacts performance of layout quite a bit. /// /// FIXME(#2260, pcwalton): This can be slimmed down some by (at least) moving `inline_context` /// to be on `InlineFlow` only. #[derive(Clone)] pub struct Fragment { /// An opaque reference to the DOM node that this `Fragment` originates from. pub node: OpaqueNode, /// The CSS style of this fragment. pub style: StyleArc<ServoComputedValues>, /// The CSS style of this fragment when it's selected pub selected_style: StyleArc<ServoComputedValues>, /// The position of this fragment relative to its owning flow. The size includes padding and /// border, but not margin. /// /// NB: This does not account for relative positioning. /// NB: Collapsed borders are not included in this. pub border_box: LogicalRect<Au>, /// The sum of border and padding; i.e. the distance from the edge of the border box to the /// content edge of the fragment. pub border_padding: LogicalMargin<Au>, /// The margin of the content box. pub margin: LogicalMargin<Au>, /// Info specific to the kind of fragment. Keep this enum small. pub specific: SpecificFragmentInfo, /// Holds the style context information for fragments that are part of an inline formatting /// context. pub inline_context: Option<InlineFragmentContext>, /// How damaged this fragment is since last reflow. pub restyle_damage: RestyleDamage, /// The pseudo-element that this fragment represents. pub pseudo: PseudoElementType<()>, /// Various flags for this fragment. pub flags: FragmentFlags, /// A debug ID that is consistent for the life of this fragment (via transform etc). /// This ID should not be considered stable across multiple layouts or fragment /// manipulations. debug_id: DebugId, /// The ID of the StackingContext that contains this fragment. This is initialized /// to 0, but it assigned during the collect_stacking_contexts phase of display /// list construction. pub stacking_context_id: StackingContextId, } impl Serialize for Fragment { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { let mut serializer = serializer.serialize_struct("fragment", 3)?; serializer.serialize_field("id", &self.debug_id)?; serializer.serialize_field("border_box", &self.border_box)?; serializer.serialize_field("margin", &self.margin)?; serializer.end() } } /// Info specific to the kind of fragment. /// /// Keep this enum small. As in, no more than one word. Or pcwalton will yell at you. #[derive(Clone)] pub enum SpecificFragmentInfo { Generic, /// A piece of generated content that cannot be resolved into `ScannedText` until the generated /// content resolution phase (e.g. an ordered list item marker). GeneratedContent(Box<GeneratedContentInfo>), Iframe(IframeFragmentInfo), Image(Box<ImageFragmentInfo>), Canvas(Box<CanvasFragmentInfo>), Svg(Box<SvgFragmentInfo>), /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was /// declared with `display: inline;`. InlineAbsoluteHypothetical(InlineAbsoluteHypotheticalFragmentInfo), InlineBlock(InlineBlockFragmentInfo), /// An inline fragment that establishes an absolute containing block for its descendants (i.e. /// a positioned inline fragment). InlineAbsolute(InlineAbsoluteFragmentInfo), ScannedText(Box<ScannedTextFragmentInfo>), Table, TableCell, TableColumn(TableColumnFragmentInfo), TableRow, TableWrapper, Multicol, MulticolColumn, UnscannedText(Box<UnscannedTextFragmentInfo>), /// A container for a fragment that got truncated by text-overflow. /// "Totally truncated fragments" are not rendered at all. /// Text fragments may be partially truncated (in which case this renders like a text fragment). /// Other fragments can only be totally truncated or not truncated at all. TruncatedFragment(Box<TruncatedFragmentInfo>), } impl SpecificFragmentInfo { fn restyle_damage(&self) -> RestyleDamage { let flow = match *self { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::UnscannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Generic => return RestyleDamage::empty(), SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => &info.flow_ref, SpecificFragmentInfo::InlineAbsolute(ref info) => &info.flow_ref, SpecificFragmentInfo::InlineBlock(ref info) => &info.flow_ref, }; flow::base(&**flow).restyle_damage } pub fn get_type(&self) -> &'static str { match *self { SpecificFragmentInfo::Canvas(_) => "SpecificFragmentInfo::Canvas", SpecificFragmentInfo::Generic => "SpecificFragmentInfo::Generic", SpecificFragmentInfo::GeneratedContent(_) => "SpecificFragmentInfo::GeneratedContent", SpecificFragmentInfo::Iframe(_) => "SpecificFragmentInfo::Iframe", SpecificFragmentInfo::Image(_) => "SpecificFragmentInfo::Image", SpecificFragmentInfo::InlineAbsolute(_) => "SpecificFragmentInfo::InlineAbsolute", SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => { "SpecificFragmentInfo::InlineAbsoluteHypothetical" } SpecificFragmentInfo::InlineBlock(_) => "SpecificFragmentInfo::InlineBlock", SpecificFragmentInfo::ScannedText(_) => "SpecificFragmentInfo::ScannedText", SpecificFragmentInfo::Svg(_) => "SpecificFragmentInfo::Svg", SpecificFragmentInfo::Table => "SpecificFragmentInfo::Table", SpecificFragmentInfo::TableCell => "SpecificFragmentInfo::TableCell", SpecificFragmentInfo::TableColumn(_) => "SpecificFragmentInfo::TableColumn", SpecificFragmentInfo::TableRow => "SpecificFragmentInfo::TableRow", SpecificFragmentInfo::TableWrapper => "SpecificFragmentInfo::TableWrapper", SpecificFragmentInfo::Multicol => "SpecificFragmentInfo::Multicol", SpecificFragmentInfo::MulticolColumn => "SpecificFragmentInfo::MulticolColumn", SpecificFragmentInfo::UnscannedText(_) => "SpecificFragmentInfo::UnscannedText", SpecificFragmentInfo::TruncatedFragment(_) => "SpecificFragmentInfo::TruncatedFragment" } } } impl fmt::Debug for SpecificFragmentInfo { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { SpecificFragmentInfo::ScannedText(ref info) => write!(f, "{:?}", info.text()), SpecificFragmentInfo::UnscannedText(ref info) => write!(f, "{:?}", info.text), _ => Ok(()) } } } /// Information for generated content. #[derive(Clone)] pub enum GeneratedContentInfo { ListItem, ContentItem(ContentItem), /// Placeholder for elements with generated content that did not generate any fragments. Empty, } /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was declared /// with `display: inline;`. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineAbsoluteHypotheticalFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteHypotheticalFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteHypotheticalFragmentInfo { InlineAbsoluteHypotheticalFragmentInfo { flow_ref: flow_ref, } } } /// A fragment that represents an inline-block element. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineBlockFragmentInfo { pub flow_ref: FlowRef, } impl InlineBlockFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineBlockFragmentInfo { InlineBlockFragmentInfo { flow_ref: flow_ref, } } } /// An inline fragment that establishes an absolute containing block for its descendants (i.e. /// a positioned inline fragment). /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineAbsoluteFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteFragmentInfo { InlineAbsoluteFragmentInfo { flow_ref: flow_ref, } } } #[derive(Clone)] pub struct CanvasFragmentInfo { pub ipc_renderer: Option<Arc<Mutex<IpcSender<CanvasMsg>>>>, pub dom_width: Au, pub dom_height: Au, } impl CanvasFragmentInfo { pub fn new(data: HTMLCanvasData) -> CanvasFragmentInfo { CanvasFragmentInfo { ipc_renderer: data.ipc_renderer .map(|renderer| Arc::new(Mutex::new(renderer))), dom_width: Au::from_px(data.width as i32), dom_height: Au::from_px(data.height as i32), } } } #[derive(Clone)] pub struct SvgFragmentInfo { pub dom_width: Au, pub dom_height: Au, } impl SvgFragmentInfo { pub fn new(data: SVGSVGData) -> SvgFragmentInfo { SvgFragmentInfo { dom_width: Au::from_px(data.width as i32), dom_height: Au::from_px(data.height as i32), } } } /// A fragment that represents a replaced content image and its accompanying borders, shadows, etc. #[derive(Clone)] pub struct ImageFragmentInfo { pub image: Option<Arc<Image>>, pub metadata: Option<ImageMetadata>, } impl ImageFragmentInfo { /// Creates a new image fragment from the given URL and local image cache. /// /// FIXME(pcwalton): The fact that image fragments store the cache in the fragment makes little /// sense to me. pub fn new<N: ThreadSafeLayoutNode>(url: Option<ServoUrl>, node: &N, layout_context: &LayoutContext) -> ImageFragmentInfo { let image_or_metadata = url.and_then(|url| { layout_context.get_or_request_image_or_meta(node.opaque(), url, UsePlaceholder::Yes) }); let (image, metadata) = match image_or_metadata { Some(ImageOrMetadataAvailable::ImageAvailable(i, _)) => { (Some(i.clone()), Some(ImageMetadata { height: i.height, width: i.width } )) } Some(ImageOrMetadataAvailable::MetadataAvailable(m)) => { (None, Some(m)) } None => { (None, None) } }; ImageFragmentInfo { image: image, metadata: metadata, } } pub fn tile_image_round(position: &mut Au, size: &mut Au, absolute_anchor_origin: Au, image_size: &mut Au) { if *size == Au(0) || *image_size == Au(0) { *position = Au(0); *size =Au(0); return; } let number_of_tiles = (size.to_f32_px() / image_size.to_f32_px()).round().max(1.0); *image_size = *size / (number_of_tiles as i32); ImageFragmentInfo::tile_image(position, size, absolute_anchor_origin, *image_size); } pub fn tile_image_spaced(position: &mut Au, size: &mut Au, tile_spacing: &mut Au, absolute_anchor_origin: Au, image_size: Au) { if *size == Au(0) || image_size == Au(0) { *position = Au(0); *size = Au(0); *tile_spacing = Au(0); return; } // Per the spec, if the space available is not enough for two images, just tile as // normal but only display a single tile. if image_size * 2 >= *size { ImageFragmentInfo::tile_image(position, size, absolute_anchor_origin, image_size); *tile_spacing = Au(0); *size = image_size; return; } // Take the box size, remove room for two tiles on the edges, and then calculate how many // other tiles fit in between them. let size_remaining = *size - (image_size * 2); let num_middle_tiles = (size_remaining.to_f32_px() / image_size.to_f32_px()).floor() as i32; // Allocate the remaining space as padding between tiles. background-position is ignored // as per the spec, so the position is just the box origin. We are also ignoring // background-attachment here, which seems unspecced when combined with // background-repeat: space. let space_for_middle_tiles = image_size * num_middle_tiles; *tile_spacing = (size_remaining - space_for_middle_tiles) / (num_middle_tiles + 1); } <|fim▁hole|> absolute_anchor_origin: Au, image_size: Au) { // Avoid division by zero below! if image_size == Au(0) { return } let delta_pixels = absolute_anchor_origin - *position; let image_size_px = image_size.to_f32_px(); let tile_count = ((delta_pixels.to_f32_px() + image_size_px - 1.0) / image_size_px).floor(); let offset = image_size * (tile_count as i32); let new_position = absolute_anchor_origin - offset; *size = *position - new_position + *size; *position = new_position; } } /// A fragment that represents an inline frame (iframe). This stores the frame ID so that the /// size of this iframe can be communicated via the constellation to the iframe's own layout thread. #[derive(Clone)] pub struct IframeFragmentInfo { /// The frame ID of this iframe. pub browsing_context_id: BrowsingContextId, /// The pipelineID of this iframe. pub pipeline_id: PipelineId, } impl IframeFragmentInfo { /// Creates the information specific to an iframe fragment. pub fn new<N: ThreadSafeLayoutNode>(node: &N) -> IframeFragmentInfo { let browsing_context_id = node.iframe_browsing_context_id(); let pipeline_id = node.iframe_pipeline_id(); IframeFragmentInfo { browsing_context_id: browsing_context_id, pipeline_id: pipeline_id, } } } /// A scanned text fragment represents a single run of text with a distinct style. A `TextFragment` /// may be split into two or more fragments across line breaks. Several `TextFragment`s may /// correspond to a single DOM text node. Split text fragments are implemented by referring to /// subsets of a single `TextRun` object. #[derive(Clone)] pub struct ScannedTextFragmentInfo { /// The text run that this represents. pub run: Arc<TextRun>, /// The intrinsic size of the text fragment. pub content_size: LogicalSize<Au>, /// The byte offset of the insertion point, if any. pub insertion_point: Option<ByteIndex>, /// The range within the above text run that this represents. pub range: Range<ByteIndex>, /// The endpoint of the above range, including whitespace that was stripped out. This exists /// so that we can restore the range to its original value (before line breaking occurred) when /// performing incremental reflow. pub range_end_including_stripped_whitespace: ByteIndex, pub flags: ScannedTextFlags, } bitflags! { pub flags ScannedTextFlags: u8 { /// Whether a line break is required after this fragment if wrapping on newlines (e.g. if /// `white-space: pre` is in effect). const REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES = 0x01, /// Is this fragment selected? const SELECTED = 0x02, } } impl ScannedTextFragmentInfo { /// Creates the information specific to a scanned text fragment from a range and a text run. pub fn new(run: Arc<TextRun>, range: Range<ByteIndex>, content_size: LogicalSize<Au>, insertion_point: Option<ByteIndex>, flags: ScannedTextFlags) -> ScannedTextFragmentInfo { ScannedTextFragmentInfo { run: run, range: range, insertion_point: insertion_point, content_size: content_size, range_end_including_stripped_whitespace: range.end(), flags: flags, } } pub fn text(&self) -> &str { &self.run.text[self.range.begin().to_usize() .. self.range.end().to_usize()] } pub fn requires_line_break_afterward_if_wrapping_on_newlines(&self) -> bool { self.flags.contains(REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES) } pub fn selected(&self) -> bool { self.flags.contains(SELECTED) } } /// Describes how to split a fragment. This is used during line breaking as part of the return /// value of `find_split_info_for_inline_size()`. #[derive(Debug, Clone)] pub struct SplitInfo { // TODO(bjz): this should only need to be a single character index, but both values are // currently needed for splitting in the `inline::try_append_*` functions. pub range: Range<ByteIndex>, pub inline_size: Au, } impl SplitInfo { fn new(range: Range<ByteIndex>, info: &ScannedTextFragmentInfo) -> SplitInfo { let inline_size = info.run.advance_for_range(&range); SplitInfo { range: range, inline_size: inline_size, } } } /// Describes how to split a fragment into two. This contains up to two `SplitInfo`s. pub struct SplitResult { /// The part of the fragment that goes on the first line. pub inline_start: Option<SplitInfo>, /// The part of the fragment that goes on the second line. pub inline_end: Option<SplitInfo>, /// The text run which is being split. pub text_run: Arc<TextRun>, } /// Describes how a fragment should be truncated. struct TruncationResult { /// The part of the fragment remaining after truncation. split: SplitInfo, /// The text run which is being truncated. text_run: Arc<TextRun>, } /// Data for an unscanned text fragment. Unscanned text fragments are the results of flow /// construction that have not yet had their inline-size determined. #[derive(Clone)] pub struct UnscannedTextFragmentInfo { /// The text inside the fragment. pub text: Box<str>, /// The selected text range. An empty range represents the insertion point. pub selection: Option<Range<ByteIndex>>, } impl UnscannedTextFragmentInfo { /// Creates a new instance of `UnscannedTextFragmentInfo` from the given text. #[inline] pub fn new(text: String, selection: Option<Range<ByteIndex>>) -> UnscannedTextFragmentInfo { UnscannedTextFragmentInfo { text: text.into_boxed_str(), selection: selection, } } } /// A fragment that represents a table column. #[derive(Copy, Clone)] pub struct TableColumnFragmentInfo { /// the number of columns a <col> element should span pub span: u32, } impl TableColumnFragmentInfo { /// Create the information specific to an table column fragment. pub fn new<N: ThreadSafeLayoutNode>(node: &N) -> TableColumnFragmentInfo { let element = node.as_element().unwrap(); let span = element.get_attr(&ns!(), &local_name!("span")) .and_then(|string| string.parse().ok()) .unwrap_or(0); TableColumnFragmentInfo { span: span, } } } /// A wrapper for fragments that have been truncated by the `text-overflow` property. /// This may have an associated text node, or, if the fragment was completely truncated, /// it may act as an invisible marker for incremental reflow. #[derive(Clone)] pub struct TruncatedFragmentInfo { pub text_info: Option<ScannedTextFragmentInfo>, pub full: Fragment, } impl Fragment { /// Constructs a new `Fragment` instance. pub fn new<N: ThreadSafeLayoutNode>(node: &N, specific: SpecificFragmentInfo, ctx: &LayoutContext) -> Fragment { let shared_context = ctx.shared_context(); let style = node.style(shared_context); let writing_mode = style.writing_mode; let mut restyle_damage = node.restyle_damage(); restyle_damage.remove(RECONSTRUCT_FLOW); Fragment { node: node.opaque(), style: style, selected_style: node.selected_style(), restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: node.get_pseudo_element_type().strip(), flags: FragmentFlags::empty(), debug_id: DebugId::new(), stacking_context_id: StackingContextId::root(), } } /// Constructs a new `Fragment` instance from an opaque node. pub fn from_opaque_node_and_style(node: OpaqueNode, pseudo: PseudoElementType<()>, style: StyleArc<ServoComputedValues>, selected_style: StyleArc<ServoComputedValues>, mut restyle_damage: RestyleDamage, specific: SpecificFragmentInfo) -> Fragment { let writing_mode = style.writing_mode; restyle_damage.remove(RECONSTRUCT_FLOW); Fragment { node: node, style: style, selected_style: selected_style, restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: pseudo, flags: FragmentFlags::empty(), debug_id: DebugId::new(), stacking_context_id: StackingContextId::root(), } } /// Creates an anonymous fragment just like this one but with the given style and fragment /// type. For the new anonymous fragment, layout-related values (border box, etc.) are reset to /// initial values. pub fn create_similar_anonymous_fragment(&self, style: StyleArc<ServoComputedValues>, specific: SpecificFragmentInfo) -> Fragment { let writing_mode = style.writing_mode; Fragment { node: self.node, style: style, selected_style: self.selected_style.clone(), restyle_damage: self.restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: self.pseudo, flags: FragmentFlags::empty(), debug_id: DebugId::new(), stacking_context_id: StackingContextId::root(), } } /// Transforms this fragment into another fragment of the given type, with the given size, /// preserving all the other data. pub fn transform(&self, size: LogicalSize<Au>, info: SpecificFragmentInfo) -> Fragment { let new_border_box = LogicalRect::from_point_size(self.style.writing_mode, self.border_box.start, size); let mut restyle_damage = RestyleDamage::rebuild_and_reflow(); restyle_damage.remove(RECONSTRUCT_FLOW); Fragment { node: self.node, style: self.style.clone(), selected_style: self.selected_style.clone(), restyle_damage: restyle_damage, border_box: new_border_box, border_padding: self.border_padding, margin: self.margin, specific: info, inline_context: self.inline_context.clone(), pseudo: self.pseudo.clone(), flags: FragmentFlags::empty(), debug_id: self.debug_id.clone(), stacking_context_id: StackingContextId::root(), } } /// Transforms this fragment using the given `SplitInfo`, preserving all the other data. pub fn transform_with_split_info(&self, split: &SplitInfo, text_run: Arc<TextRun>) -> Fragment { let size = LogicalSize::new(self.style.writing_mode, split.inline_size, self.border_box.size.block); // Preserve the insertion point if it is in this fragment's range or it is at line end. let (flags, insertion_point) = match self.specific { SpecificFragmentInfo::ScannedText(ref info) => { match info.insertion_point { Some(index) if split.range.contains(index) => (info.flags, info.insertion_point), Some(index) if index == ByteIndex(text_run.text.chars().count() as isize - 1) && index == split.range.end() => (info.flags, info.insertion_point), _ => (info.flags, None) } }, _ => (ScannedTextFlags::empty(), None) }; let info = box ScannedTextFragmentInfo::new( text_run, split.range, size, insertion_point, flags); self.transform(size, SpecificFragmentInfo::ScannedText(info)) } /// Transforms this fragment into an ellipsis fragment, preserving all the other data. pub fn transform_into_ellipsis(&self, layout_context: &LayoutContext, text_overflow_string: String) -> Fragment { let mut unscanned_ellipsis_fragments = LinkedList::new(); let mut ellipsis_fragment = self.transform( self.border_box.size, SpecificFragmentInfo::UnscannedText( box UnscannedTextFragmentInfo::new(text_overflow_string, None))); unscanned_ellipsis_fragments.push_back(ellipsis_fragment); let ellipsis_fragments = with_thread_local_font_context(layout_context, |font_context| { TextRunScanner::new().scan_for_runs(font_context, unscanned_ellipsis_fragments) }); debug_assert!(ellipsis_fragments.len() == 1); ellipsis_fragment = ellipsis_fragments.fragments.into_iter().next().unwrap(); ellipsis_fragment.flags |= IS_ELLIPSIS; ellipsis_fragment } pub fn restyle_damage(&self) -> RestyleDamage { self.restyle_damage | self.specific.restyle_damage() } pub fn contains_node(&self, node_address: OpaqueNode) -> bool { node_address == self.node || self.inline_context.as_ref().map_or(false, |ctx| { ctx.contains_node(node_address) }) } /// Adds a style to the inline context for this fragment. If the inline context doesn't exist /// yet, it will be created. pub fn add_inline_context_style(&mut self, node_info: InlineFragmentNodeInfo) { if self.inline_context.is_none() { self.inline_context = Some(InlineFragmentContext::new()); } self.inline_context.as_mut().unwrap().nodes.push(node_info); } /// Determines which quantities (border/padding/margin/specified) should be included in the /// intrinsic inline size of this fragment. fn quantities_included_in_intrinsic_inline_size(&self) -> QuantitiesIncludedInIntrinsicInlineSizes { match self.specific { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::Svg(_) => { QuantitiesIncludedInIntrinsicInlineSizes::all() } SpecificFragmentInfo::Table => { INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED | INTRINSIC_INLINE_SIZE_INCLUDES_PADDING | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } SpecificFragmentInfo::TableCell => { let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_PADDING | INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TableWrapper => { let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS | INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TableRow => { let base_quantities = INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::UnscannedText(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::MulticolColumn => { QuantitiesIncludedInIntrinsicInlineSizes::empty() } } } /// Returns the portion of the intrinsic inline-size that consists of borders/padding and /// margins, respectively. /// /// FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? pub fn surrounding_intrinsic_inline_size(&self) -> (Au, Au) { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let margin = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS) { let margin = style.logical_margin(); (MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero()) } else { Au(0) }; // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let padding = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_PADDING) { let padding = style.logical_padding(); (padding.inline_start.to_used_value(Au(0)) + padding.inline_end.to_used_value(Au(0))) } else { Au(0) }; let border = if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_BORDER) { self.border_width().inline_start_end() } else { Au(0) }; (border + padding, margin) } /// Uses the style only to estimate the intrinsic inline-sizes. These may be modified for text /// or replaced elements. pub fn style_specified_intrinsic_inline_size(&self) -> IntrinsicISizesContribution { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); // FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? let (border_padding, margin) = self.surrounding_intrinsic_inline_size(); let mut specified = Au(0); if flags.contains(INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED) { specified = MaybeAuto::from_style(style.content_inline_size(), Au(0)).specified_or_zero(); specified = max(style.min_inline_size().to_used_value(Au(0)), specified); if let Some(max) = style.max_inline_size().to_used_value(Au(0)) { specified = min(specified, max) } if self.style.get_position().box_sizing == box_sizing::T::border_box { specified = max(Au(0), specified - border_padding); } } IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes { minimum_inline_size: specified, preferred_inline_size: specified, }, surrounding_size: border_padding + margin, } } /// intrinsic width of this replaced element. #[inline] pub fn intrinsic_width(&self) -> Au { match self.specific { SpecificFragmentInfo::Image(ref info) => { if let Some(ref data) = info.metadata { Au::from_px(data.width as i32) } else { Au(0) } } SpecificFragmentInfo::Canvas(ref info) => info.dom_width, SpecificFragmentInfo::Svg(ref info) => info.dom_width, // Note: Currently for replaced element with no intrinsic size, // this function simply returns the default object size. As long as // these elements do not have intrinsic aspect ratio this should be // sufficient, but we may need to investigate if this is enough for // use cases like SVG. SpecificFragmentInfo::Iframe(_) => Au::from_px(DEFAULT_REPLACED_WIDTH), _ => panic!("Trying to get intrinsic width on non-replaced element!") } } /// intrinsic width of this replaced element. #[inline] pub fn intrinsic_height(&self) -> Au { match self.specific { SpecificFragmentInfo::Image(ref info) => { if let Some(ref data) = info.metadata { Au::from_px(data.height as i32) } else { Au(0) } } SpecificFragmentInfo::Canvas(ref info) => info.dom_height, SpecificFragmentInfo::Svg(ref info) => info.dom_height, SpecificFragmentInfo::Iframe(_) => Au::from_px(DEFAULT_REPLACED_HEIGHT), _ => panic!("Trying to get intrinsic height on non-replaced element!") } } /// Whether this replace element has intrinsic aspect ratio. pub fn has_intrinsic_ratio(&self) -> bool { match self.specific { SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Canvas(_) | // TODO(stshine): According to the SVG spec, whether a SVG element has intrinsic // aspect ratio is determined by the `preserveAspectRatio` attribute. Since for // now SVG is far from implemented, we simply choose the default behavior that // the intrinsic aspect ratio is preserved. // https://svgwg.org/svg2-draft/coords.html#PreserveAspectRatioAttribute SpecificFragmentInfo::Svg(_) => self.intrinsic_width() != Au(0) && self.intrinsic_height() != Au(0), _ => false } } /// CSS 2.1 § 10.3.2 & 10.6.2 Calculate the used width and height of a replaced element. /// When a parameter is `None` it means the specified size in certain direction /// is unconstrained. The inline containing size can also be `None` since this /// method is also used for calculating intrinsic inline size contribution. pub fn calculate_replaced_sizes(&self, containing_inline_size: Option<Au>, containing_block_size: Option<Au>) -> (Au, Au) { let (intrinsic_inline_size, intrinsic_block_size) = if self.style.writing_mode.is_vertical() { (self.intrinsic_height(), self.intrinsic_width()) } else { (self.intrinsic_width(), self.intrinsic_height()) }; // Make sure the size we used here is for content box since they may be // transferred by the intrinsic aspect ratio. let inline_size = style_length(self.style.content_inline_size(), containing_inline_size) .map(|x| x - self.box_sizing_boundary(Direction::Inline)); let block_size = style_length(self.style.content_block_size(), containing_block_size) .map(|x| x - self.box_sizing_boundary(Direction::Block)); let inline_constraint = self.size_constraint(containing_inline_size, Direction::Inline); let block_constraint = self.size_constraint(containing_block_size, Direction::Block); // https://drafts.csswg.org/css-images-3/#default-sizing match (inline_size, block_size) { // If the specified size is a definite width and height, the concrete // object size is given that width and height. (MaybeAuto::Specified(inline_size), MaybeAuto::Specified(block_size)) => (inline_constraint.clamp(inline_size), block_constraint.clamp(block_size)), // If the specified size is only a width or height (but not both) // then the concrete object size is given that specified width or // height. The other dimension is calculated as follows: // // If the object has an intrinsic aspect ratio, the missing dimension // of the concrete object size is calculated using the intrinsic // aspect ratio and the present dimension. // // Otherwise, if the missing dimension is present in the object’s intrinsic // dimensions, the missing dimension is taken from the object’s intrinsic // dimensions. Otherwise it is taken from the default object size. (MaybeAuto::Specified(inline_size), MaybeAuto::Auto) => { let inline_size = inline_constraint.clamp(inline_size); let block_size = if self.has_intrinsic_ratio() { // Note: We can not precompute the ratio and store it as a float, because // doing so may result one pixel difference in calculation for certain // images, thus make some tests fail. Au::new((inline_size.0 as i64 * intrinsic_block_size.0 as i64 / intrinsic_inline_size.0 as i64) as i32) } else { intrinsic_block_size }; (inline_size, block_constraint.clamp(block_size)) } (MaybeAuto::Auto, MaybeAuto::Specified(block_size)) => { let block_size = block_constraint.clamp(block_size); let inline_size = if self.has_intrinsic_ratio() { Au::new((block_size.0 as i64 * intrinsic_inline_size.0 as i64 / intrinsic_block_size.0 as i64) as i32) } else { intrinsic_inline_size }; (inline_constraint.clamp(inline_size), block_size) } // https://drafts.csswg.org/css2/visudet.html#min-max-widths (MaybeAuto::Auto, MaybeAuto::Auto) => { if self.has_intrinsic_ratio() { // This approch follows the spirit of cover and contain constraint. // https://drafts.csswg.org/css-images-3/#cover-contain // First, create two rectangles that keep aspect ratio while may be clamped // by the contraints; let first_isize = inline_constraint.clamp(intrinsic_inline_size); let first_bsize = Au::new((first_isize.0 as i64 * intrinsic_block_size.0 as i64 / intrinsic_inline_size.0 as i64) as i32); let second_bsize = block_constraint.clamp(intrinsic_block_size); let second_isize = Au::new((second_bsize.0 as i64 * intrinsic_inline_size.0 as i64 / intrinsic_block_size.0 as i64) as i32); let (inline_size, block_size) = match (first_isize.cmp(&intrinsic_inline_size) , second_isize.cmp(&intrinsic_inline_size)) { (Ordering::Equal, Ordering::Equal) => (first_isize, first_bsize), // When only one rectangle is clamped, use it; (Ordering::Equal, _) => (second_isize, second_bsize), (_, Ordering::Equal) => (first_isize, first_bsize), // When both rectangles grow (smaller than min sizes), // Choose the larger one; (Ordering::Greater, Ordering::Greater) => if first_isize > second_isize { (first_isize, first_bsize) } else { (second_isize, second_bsize) }, // When both rectangles shrink (larger than max sizes), // Choose the smaller one; (Ordering::Less, Ordering::Less) => if first_isize > second_isize { (second_isize, second_bsize) } else { (first_isize, first_bsize) }, // It does not matter which we choose here, because both sizes // will be clamped to constraint; (Ordering::Less, Ordering::Greater) | (Ordering::Greater, Ordering::Less) => (first_isize, first_bsize) }; // Clamp the result and we are done :-) (inline_constraint.clamp(inline_size), block_constraint.clamp(block_size)) } else { (inline_constraint.clamp(intrinsic_inline_size), block_constraint.clamp(intrinsic_block_size)) } } } } /// Return a size constraint that can be used the clamp size in given direction. /// To take `box-sizing: border-box` into account, the `border_padding` field /// must be initialized first. /// /// TODO(stshine): Maybe there is a more convenient way. pub fn size_constraint(&self, containing_size: Option<Au>, direction: Direction) -> SizeConstraint { let (style_min_size, style_max_size) = match direction { Direction::Inline => (self.style.min_inline_size(), self.style.max_inline_size()), Direction::Block => (self.style.min_block_size(), self.style.max_block_size()) }; let border = if self.style().get_position().box_sizing == box_sizing::T::border_box { Some(self.border_padding.start_end(direction)) } else { None }; SizeConstraint::new(containing_size, style_min_size, style_max_size, border) } /// Returns a guess as to the distances from the margin edge of this fragment to its content /// in the inline direction. This will generally be correct unless percentages are involved. /// /// This is used for the float placement speculation logic. pub fn guess_inline_content_edge_offsets(&self) -> SpeculatedInlineContentEdgeOffsets { let logical_margin = self.style.logical_margin(); let logical_padding = self.style.logical_padding(); let border_width = self.border_width(); SpeculatedInlineContentEdgeOffsets { start: MaybeAuto::from_style(logical_margin.inline_start, Au(0)).specified_or_zero() + logical_padding.inline_start.to_used_value(Au(0)) + border_width.inline_start, end: MaybeAuto::from_style(logical_margin.inline_end, Au(0)).specified_or_zero() + logical_padding.inline_end.to_used_value(Au(0)) + border_width.inline_end, } } /// Returns the sum of the inline-sizes of all the borders of this fragment. Note that this /// can be expensive to compute, so if possible use the `border_padding` field instead. #[inline] pub fn border_width(&self) -> LogicalMargin<Au> { let style_border_width = self.style().logical_border_width(); // NOTE: We can have nodes with different writing mode inside // the inline fragment context, so we need to overwrite the // writing mode to compute the child logical sizes. let writing_mode = self.style.writing_mode; let context_border = match self.inline_context { None => LogicalMargin::zero(writing_mode), Some(ref inline_fragment_context) => { inline_fragment_context.nodes.iter().fold(style_border_width, |accumulator, node| { let mut this_border_width = node.style.border_width_for_writing_mode(writing_mode); if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { this_border_width.inline_start = Au(0) } if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { this_border_width.inline_end = Au(0) } accumulator + this_border_width }) } }; style_border_width + context_border } /// Returns the border width in given direction if this fragment has property /// 'box-sizing: border-box'. The `border_padding` field must have been initialized. pub fn box_sizing_boundary(&self, direction: Direction) -> Au { match (self.style().get_position().box_sizing, direction) { (box_sizing::T::border_box, Direction::Inline) => { self.border_padding.inline_start_end() } (box_sizing::T::border_box, Direction::Block) => { self.border_padding.block_start_end() } _ => Au(0) } } /// Computes the margins in the inline direction from the containing block inline-size and the /// style. After this call, the inline direction of the `margin` field will be correct. /// /// Do not use this method if the inline direction margins are to be computed some other way /// (for example, via constraint solving for blocks). pub fn compute_inline_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => { self.margin.inline_start = Au(0); self.margin.inline_end = Au(0); return } _ => { let margin = self.style().logical_margin(); self.margin.inline_start = MaybeAuto::from_style(margin.inline_start, containing_block_inline_size).specified_or_zero(); self.margin.inline_end = MaybeAuto::from_style(margin.inline_end, containing_block_inline_size).specified_or_zero(); } } if let Some(ref inline_context) = self.inline_context { for node in &inline_context.nodes { let margin = node.style.logical_margin(); let this_inline_start_margin = if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { Au(0) } else { MaybeAuto::from_style(margin.inline_start, containing_block_inline_size).specified_or_zero() }; let this_inline_end_margin = if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { Au(0) } else { MaybeAuto::from_style(margin.inline_end, containing_block_inline_size).specified_or_zero() }; self.margin.inline_start += this_inline_start_margin; self.margin.inline_end += this_inline_end_margin; } } } /// Computes the margins in the block direction from the containing block inline-size and the /// style. After this call, the block direction of the `margin` field will be correct. /// /// Do not use this method if the block direction margins are to be computed some other way /// (for example, via constraint solving for absolutely-positioned flows). pub fn compute_block_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableColumn(_) => { self.margin.block_start = Au(0); self.margin.block_end = Au(0) } _ => { // NB: Percentages are relative to containing block inline-size (not block-size) // per CSS 2.1. let margin = self.style().logical_margin(); self.margin.block_start = MaybeAuto::from_style(margin.block_start, containing_block_inline_size) .specified_or_zero(); self.margin.block_end = MaybeAuto::from_style(margin.block_end, containing_block_inline_size) .specified_or_zero(); } } } /// Computes the border and padding in both inline and block directions from the containing /// block inline-size and the style. After this call, the `border_padding` field will be /// correct. /// /// TODO(pcwalton): Remove `border_collapse`; we can figure it out from our style and specific /// fragment info. pub fn compute_border_and_padding(&mut self, containing_block_inline_size: Au, border_collapse: border_collapse::T) { // Compute border. let border = match border_collapse { border_collapse::T::separate => self.border_width(), border_collapse::T::collapse => LogicalMargin::zero(self.style.writing_mode), }; // Compute padding from the fragment's style. let padding_from_style = match self.specific { SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper => LogicalMargin::zero(self.style.writing_mode), _ => model::padding_from_style(self.style(), containing_block_inline_size, self.style().writing_mode), }; // Compute padding from the inline fragment context. let padding_from_inline_fragment_context = match (&self.specific, &self.inline_context) { (_, &None) | (&SpecificFragmentInfo::TableColumn(_), _) | (&SpecificFragmentInfo::TableRow, _) | (&SpecificFragmentInfo::TableWrapper, _) => { LogicalMargin::zero(self.style.writing_mode) } (_, &Some(ref inline_fragment_context)) => { let writing_mode = self.style.writing_mode; let zero_padding = LogicalMargin::zero(writing_mode); inline_fragment_context.nodes.iter().fold(zero_padding, |accumulator, node| { let mut padding = model::padding_from_style(&*node.style, Au(0), writing_mode); if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { padding.inline_start = Au(0) } if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { padding.inline_end = Au(0) } accumulator + padding }) } }; self.border_padding = border + padding_from_style + padding_from_inline_fragment_context } // Return offset from original position because of `position: relative`. pub fn relative_position(&self, containing_block_size: &LogicalSize<Au>) -> LogicalSize<Au> { fn from_style(style: &ServoComputedValues, container_size: &LogicalSize<Au>) -> LogicalSize<Au> { let offsets = style.logical_position(); let offset_i = if offsets.inline_start != LengthOrPercentageOrAuto::Auto { MaybeAuto::from_style(offsets.inline_start, container_size.inline).specified_or_zero() } else { -MaybeAuto::from_style(offsets.inline_end, container_size.inline).specified_or_zero() }; let offset_b = if offsets.block_start != LengthOrPercentageOrAuto::Auto { MaybeAuto::from_style(offsets.block_start, container_size.block).specified_or_zero() } else { -MaybeAuto::from_style(offsets.block_end, container_size.block).specified_or_zero() }; LogicalSize::new(style.writing_mode, offset_i, offset_b) } // Go over the ancestor fragments and add all relative offsets (if any). let mut rel_pos = if self.style().get_box().position == position::T::relative { from_style(self.style(), containing_block_size) } else { LogicalSize::zero(self.style.writing_mode) }; if let Some(ref inline_fragment_context) = self.inline_context { for node in &inline_fragment_context.nodes { if node.style.get_box().position == position::T::relative { rel_pos = rel_pos + from_style(&*node.style, containing_block_size); } } } rel_pos } /// Always inline for SCCP. /// /// FIXME(pcwalton): Just replace with the clear type from the style module for speed? #[inline(always)] pub fn clear(&self) -> Option<ClearType> { let style = self.style(); match style.get_box().clear { clear::T::none => None, clear::T::left => Some(ClearType::Left), clear::T::right => Some(ClearType::Right), clear::T::both => Some(ClearType::Both), } } #[inline(always)] pub fn style(&self) -> &ServoComputedValues { &*self.style } #[inline(always)] pub fn selected_style(&self) -> &ServoComputedValues { &*self.selected_style } pub fn white_space(&self) -> white_space::T { self.style().get_inheritedtext().white_space } pub fn color(&self) -> color::T { self.style().get_color().color } /// Returns the text decoration line of this fragment, according to the style of the nearest ancestor /// element. /// /// NB: This may not be the actual text decoration line, because of the override rules specified in /// CSS 2.1 § 16.3.1. Unfortunately, computing this properly doesn't really fit into Servo's /// model. Therefore, this is a best lower bound approximation, but the end result may actually /// have the various decoration flags turned on afterward. pub fn text_decoration_line(&self) -> text_decoration_line::T { self.style().get_text().text_decoration_line } /// Returns the inline-start offset from margin edge to content edge. /// /// FIXME(#2262, pcwalton): I think this method is pretty bogus, because it won't work for /// inlines. pub fn inline_start_offset(&self) -> Au { match self.specific { SpecificFragmentInfo::TableWrapper => self.margin.inline_start, SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow => self.border_padding.inline_start, SpecificFragmentInfo::TableColumn(_) => Au(0), _ => self.margin.inline_start + self.border_padding.inline_start, } } /// Returns true if this element can be split. This is true for text fragments, unless /// `white-space: pre` or `white-space: nowrap` is set. pub fn can_split(&self) -> bool { self.is_scanned_text_fragment() && self.white_space().allow_wrap() } /// Returns true if and only if this fragment is a generated content fragment. pub fn is_unscanned_generated_content(&self) -> bool { match self.specific { SpecificFragmentInfo::GeneratedContent(box GeneratedContentInfo::Empty) => false, SpecificFragmentInfo::GeneratedContent(..) => true, _ => false, } } /// Returns true if and only if this is a scanned text fragment. pub fn is_scanned_text_fragment(&self) -> bool { match self.specific { SpecificFragmentInfo::ScannedText(..) => true, _ => false, } } /// Computes the intrinsic inline-sizes of this fragment. pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution { let mut result = self.style_specified_intrinsic_inline_size(); match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => {} SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Svg(_) => { let mut inline_size = match self.style.content_inline_size() { LengthOrPercentageOrAuto::Auto | LengthOrPercentageOrAuto::Percentage(_) => { // We have to initialize the `border_padding` field first to make // the size constraints work properly. // TODO(stshine): Find a cleaner way to do this. let padding = self.style.logical_padding(); self.border_padding.inline_start = padding.inline_start.to_used_value(Au(0)); self.border_padding.inline_end = padding.inline_end.to_used_value(Au(0)); self.border_padding.block_start = padding.block_start.to_used_value(Au(0)); self.border_padding.block_end = padding.block_end.to_used_value(Au(0)); let border = self.border_width(); self.border_padding.inline_start += border.inline_start; self.border_padding.inline_end += border.inline_end; self.border_padding.block_start += border.block_start; self.border_padding.block_end += border.block_end; let (result_inline, _) = self.calculate_replaced_sizes(None, None); result_inline } LengthOrPercentageOrAuto::Length(length) => length, LengthOrPercentageOrAuto::Calc(calc) => { // TODO(nox): This is probably wrong, because it accounts neither for // clamping (not sure if necessary here) nor percentage. calc.unclamped_length() }, }; let size_constraint = self.size_constraint(None, Direction::Inline); inline_size = size_constraint.clamp(inline_size); result.union_block(&IntrinsicISizes { minimum_inline_size: inline_size, preferred_inline_size: inline_size, }); } SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref text_fragment_info), .. }) | SpecificFragmentInfo::ScannedText(box ref text_fragment_info) => { let range = &text_fragment_info.range; // See http://dev.w3.org/csswg/css-sizing/#max-content-inline-size. // TODO: Account for soft wrap opportunities. let max_line_inline_size = text_fragment_info.run .metrics_for_range(range) .advance_width; let min_line_inline_size = if self.white_space().allow_wrap() { text_fragment_info.run.min_width_for_range(range) } else { max_line_inline_size }; result.union_block(&IntrinsicISizes { minimum_inline_size: min_line_inline_size, preferred_inline_size: max_line_inline_size, }) } SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: None, .. }) => return IntrinsicISizesContribution::new(), SpecificFragmentInfo::UnscannedText(..) => { panic!("Unscanned text fragments should have been scanned by now!") } }; // Take borders and padding for parent inline fragments into account. let writing_mode = self.style.writing_mode; if let Some(ref context) = self.inline_context { for node in &context.nodes { let mut border_width = node.style.logical_border_width(); let mut padding = model::padding_from_style(&*node.style, Au(0), writing_mode); let mut margin = model::specified_margin_from_style(&*node.style, writing_mode); if !node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { border_width.inline_start = Au(0); padding.inline_start = Au(0); margin.inline_start = Au(0); } if !node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { border_width.inline_end = Au(0); padding.inline_end = Au(0); margin.inline_end = Au(0); } result.surrounding_size = result.surrounding_size + border_width.inline_start_end() + padding.inline_start_end() + margin.inline_start_end(); } } result } /// Returns the narrowest inline-size that the first splittable part of this fragment could /// possibly be split to. (In most cases, this returns the inline-size of the first word in /// this fragment.) pub fn minimum_splittable_inline_size(&self) -> Au { match self.specific { SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref text), .. }) | SpecificFragmentInfo::ScannedText(box ref text) => { text.run.minimum_splittable_inline_size(&text.range) } _ => Au(0), } } /// Returns the dimensions of the content box. /// /// This is marked `#[inline]` because it is frequently called when only one or two of the /// values are needed and that will save computation. #[inline] pub fn content_box(&self) -> LogicalRect<Au> { self.border_box - self.border_padding } /// Attempts to find the split positions of a text fragment so that its inline-size is no more /// than `max_inline_size`. /// /// A return value of `None` indicates that the fragment could not be split. Otherwise the /// information pertaining to the split is returned. The inline-start and inline-end split /// information are both optional due to the possibility of them being whitespace. pub fn calculate_split_position(&self, max_inline_size: Au, starts_line: bool) -> Option<SplitResult> { let text_fragment_info = match self.specific { SpecificFragmentInfo::ScannedText(ref text_fragment_info) => text_fragment_info, _ => return None, }; let mut flags = SplitOptions::empty(); if starts_line { flags.insert(STARTS_LINE); if self.style().get_inheritedtext().overflow_wrap == overflow_wrap::T::break_word { flags.insert(RETRY_AT_CHARACTER_BOUNDARIES) } } match self.style().get_inheritedtext().word_break { word_break::T::normal | word_break::T::keep_all => { // Break at normal word boundaries. keep-all forbids soft wrap opportunities. let natural_word_breaking_strategy = text_fragment_info.run.natural_word_slices_in_range(&text_fragment_info.range); self.calculate_split_position_using_breaking_strategy( natural_word_breaking_strategy, max_inline_size, flags) } word_break::T::break_all => { // Break at character boundaries. let character_breaking_strategy = text_fragment_info.run.character_slices_in_range(&text_fragment_info.range); flags.remove(RETRY_AT_CHARACTER_BOUNDARIES); self.calculate_split_position_using_breaking_strategy( character_breaking_strategy, max_inline_size, flags) } } } /// Truncates this fragment to the given `max_inline_size`, using a character-based breaking /// strategy. The resulting fragment will have `SpecificFragmentInfo::TruncatedFragment`, /// preserving the original fragment for use in incremental reflow. /// /// This function will panic if self is already truncated. pub fn truncate_to_inline_size(self, max_inline_size: Au) -> Fragment { if let SpecificFragmentInfo::TruncatedFragment(_) = self.specific { panic!("Cannot truncate an already truncated fragment"); } let info = self.calculate_truncate_to_inline_size(max_inline_size); let (size, text_info) = match info { Some(TruncationResult { split: SplitInfo { inline_size, range }, text_run } ) => { let size = LogicalSize::new(self.style.writing_mode, inline_size, self.border_box.size.block); // Preserve the insertion point if it is in this fragment's range or it is at line end. let (flags, insertion_point) = match self.specific { SpecificFragmentInfo::ScannedText(ref info) => { match info.insertion_point { Some(index) if range.contains(index) => (info.flags, info.insertion_point), Some(index) if index == ByteIndex(text_run.text.chars().count() as isize - 1) && index == range.end() => (info.flags, info.insertion_point), _ => (info.flags, None) } }, _ => (ScannedTextFlags::empty(), None) }; let text_info = ScannedTextFragmentInfo::new( text_run, range, size, insertion_point, flags); (size, Some(text_info)) } None => (LogicalSize::zero(self.style.writing_mode), None) }; let mut result = self.transform(size, SpecificFragmentInfo::Generic); result.specific = SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: text_info, full: self, }); result } /// Truncates this fragment to the given `max_inline_size`, using a character-based breaking /// strategy. If no characters could fit, returns `None`. fn calculate_truncate_to_inline_size(&self, max_inline_size: Au) -> Option<TruncationResult> { let text_fragment_info = if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific { text_fragment_info } else { return None }; let character_breaking_strategy = text_fragment_info.run.character_slices_in_range(&text_fragment_info.range); match self.calculate_split_position_using_breaking_strategy(character_breaking_strategy, max_inline_size, SplitOptions::empty()) { None => None, Some(split_info) => { match split_info.inline_start { None => None, Some(split) => { Some(TruncationResult { split: split, text_run: split_info.text_run.clone(), }) } } } } } /// A helper method that uses the breaking strategy described by `slice_iterator` (at present, /// either natural word breaking or character breaking) to split this fragment. fn calculate_split_position_using_breaking_strategy<'a, I>( &self, slice_iterator: I, max_inline_size: Au, flags: SplitOptions) -> Option<SplitResult> where I: Iterator<Item=TextRunSlice<'a>> { let text_fragment_info = match self.specific { SpecificFragmentInfo::ScannedText(ref text_fragment_info) => text_fragment_info, _ => return None, }; let mut remaining_inline_size = max_inline_size - self.border_padding.inline_start_end(); let mut inline_start_range = Range::new(text_fragment_info.range.begin(), ByteIndex(0)); let mut inline_end_range = None; let mut overflowing = false; debug!("calculate_split_position_using_breaking_strategy: splitting text fragment \ (strlen={}, range={:?}, max_inline_size={:?})", text_fragment_info.run.text.len(), text_fragment_info.range, max_inline_size); for slice in slice_iterator { debug!("calculate_split_position_using_breaking_strategy: considering slice \ (offset={:?}, slice range={:?}, remaining_inline_size={:?})", slice.offset, slice.range, remaining_inline_size); // Use the `remaining_inline_size` to find a split point if possible. If not, go around // the loop again with the next slice. let metrics = text_fragment_info.run.metrics_for_slice(slice.glyphs, &slice.range); let advance = metrics.advance_width; // Have we found the split point? if advance <= remaining_inline_size || slice.glyphs.is_whitespace() { // Keep going; we haven't found the split point yet. debug!("calculate_split_position_using_breaking_strategy: enlarging span"); remaining_inline_size = remaining_inline_size - advance; inline_start_range.extend_by(slice.range.length()); continue } // The advance is more than the remaining inline-size, so split here. First, check to // see if we're going to overflow the line. If so, perform a best-effort split. let mut remaining_range = slice.text_run_range(); let split_is_empty = inline_start_range.is_empty() && !(self.requires_line_break_afterward_if_wrapping_on_newlines() && !self.white_space().allow_wrap()); if split_is_empty { // We're going to overflow the line. overflowing = true; inline_start_range = slice.text_run_range(); remaining_range = Range::new(slice.text_run_range().end(), ByteIndex(0)); remaining_range.extend_to(text_fragment_info.range.end()); } // Check to see if we need to create an inline-end chunk. let slice_begin = remaining_range.begin(); if slice_begin < text_fragment_info.range.end() { // There still some things left over at the end of the line, so create the // inline-end chunk. let mut inline_end = remaining_range; inline_end.extend_to(text_fragment_info.range.end()); inline_end_range = Some(inline_end); debug!("calculate_split_position: splitting remainder with inline-end range={:?}", inline_end); } // If we failed to find a suitable split point, we're on the verge of overflowing the // line. if split_is_empty || overflowing { // If we've been instructed to retry at character boundaries (probably via // `overflow-wrap: break-word`), do so. if flags.contains(RETRY_AT_CHARACTER_BOUNDARIES) { let character_breaking_strategy = text_fragment_info.run .character_slices_in_range(&text_fragment_info.range); let mut flags = flags; flags.remove(RETRY_AT_CHARACTER_BOUNDARIES); return self.calculate_split_position_using_breaking_strategy( character_breaking_strategy, max_inline_size, flags) } // We aren't at the start of the line, so don't overflow. Let inline layout wrap to // the next line instead. if !flags.contains(STARTS_LINE) { return None } } break } let split_is_empty = inline_start_range.is_empty() && !self.requires_line_break_afterward_if_wrapping_on_newlines(); let inline_start = if !split_is_empty { Some(SplitInfo::new(inline_start_range, &**text_fragment_info)) } else { None }; let inline_end = inline_end_range.map(|inline_end_range| { SplitInfo::new(inline_end_range, &**text_fragment_info) }); Some(SplitResult { inline_start: inline_start, inline_end: inline_end, text_run: text_fragment_info.run.clone(), }) } /// The opposite of `calculate_split_position_using_breaking_strategy`: merges this fragment /// with the next one. pub fn merge_with(&mut self, next_fragment: Fragment) { match (&mut self.specific, &next_fragment.specific) { (&mut SpecificFragmentInfo::ScannedText(ref mut this_info), &SpecificFragmentInfo::ScannedText(ref other_info)) => { debug_assert!(Arc::ptr_eq(&this_info.run, &other_info.run)); this_info.range_end_including_stripped_whitespace = other_info.range_end_including_stripped_whitespace; if other_info.requires_line_break_afterward_if_wrapping_on_newlines() { this_info.flags.insert(REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES); } if other_info.insertion_point.is_some() { this_info.insertion_point = other_info.insertion_point; } self.border_padding.inline_end = next_fragment.border_padding.inline_end; self.margin.inline_end = next_fragment.margin.inline_end; } _ => panic!("Can only merge two scanned-text fragments!"), } self.reset_text_range_and_inline_size(); self.meld_with_next_inline_fragment(&next_fragment); } /// Restore any whitespace that was stripped from a text fragment, and recompute inline metrics /// if necessary. pub fn reset_text_range_and_inline_size(&mut self) { if let SpecificFragmentInfo::ScannedText(ref mut info) = self.specific { if info.run.extra_word_spacing != Au(0) { Arc::make_mut(&mut info.run).extra_word_spacing = Au(0); } // FIXME (mbrubeck): Do we need to restore leading too? let range_end = info.range_end_including_stripped_whitespace; if info.range.end() == range_end { return } info.range.extend_to(range_end); info.content_size.inline = info.run.metrics_for_range(&info.range).advance_width; self.border_box.size.inline = info.content_size.inline + self.border_padding.inline_start_end(); } } /// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced /// content per CSS 2.1 § 10.3.2. pub fn assign_replaced_inline_size_if_necessary(&mut self, container_inline_size: Au, container_block_size: Option<Au>) { match self.specific { SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: None, .. }) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn => return, SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have inline size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Svg(_) => {} }; match self.specific { // Inline blocks SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_mut_block(); block_flow.base.position.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; // This is a hypothetical box, so it takes up no space. self.border_box.size.inline = Au(0); } SpecificFragmentInfo::InlineBlock(ref mut info) => { let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); block_flow.base.block_container_inline_size = self.border_box.size.inline; block_flow.base.block_container_writing_mode = self.style.writing_mode; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); block_flow.base.block_container_inline_size = self.border_box.size.inline; block_flow.base.block_container_writing_mode = self.style.writing_mode; } // Text SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref info), .. }) | SpecificFragmentInfo::ScannedText(box ref info) => { // Scanned text fragments will have already had their content inline-sizes assigned // by this point. self.border_box.size.inline = info.content_size.inline + self.border_padding.inline_start_end(); } // Replaced elements _ if self.is_replaced() => { let (inline_size, block_size) = self.calculate_replaced_sizes(Some(container_inline_size), container_block_size); self.border_box.size.inline = inline_size + self.border_padding.inline_start_end(); self.border_box.size.block = block_size + self.border_padding.block_start_end(); } ref unhandled @ _ => panic!("this case should have been handled above: {:?}", unhandled), } } /// Assign block-size for this fragment if it is replaced content. The inline-size must have /// been assigned first. /// /// Ideally, this should follow CSS 2.1 § 10.6.2. pub fn assign_replaced_block_size_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: None, .. }) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn => return, SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have block size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(_), .. }) | SpecificFragmentInfo::Svg(_) => {} } match self.specific { // Text SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref info), .. }) | SpecificFragmentInfo::ScannedText(box ref info) => { // Scanned text fragments' content block-sizes are calculated by the text run // scanner during flow construction. self.border_box.size.block = info.content_size.block + self.border_padding.block_start_end(); } // Inline blocks SpecificFragmentInfo::InlineBlock(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } // Replaced elements _ if self.is_replaced() => {}, ref unhandled @ _ => panic!("should have been handled above: {:?}", unhandled), } } /// Returns true if this fragment is replaced content. pub fn is_replaced(&self) -> bool { match self.specific { SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Svg(_) => true, _ => false } } /// Returns true if this fragment is replaced content or an inline-block or false otherwise. pub fn is_replaced_or_inline_block(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) => true, _ => self.is_replaced(), } } /// Calculates block-size above baseline, depth below baseline, and ascent for this fragment /// when used in an inline formatting context. See CSS 2.1 § 10.8.1. /// /// This does not take `vertical-align` into account. For that, use `aligned_inline_metrics()`. fn content_inline_metrics(&self, layout_context: &LayoutContext) -> InlineMetrics { // CSS 2.1 § 10.8: "The height of each inline-level box in the line box is // calculated. For replaced elements, inline-block elements, and inline-table // elements, this is the height of their margin box." // // FIXME(pcwalton): We have to handle `Generic` and `GeneratedContent` here to avoid // crashing in a couple of `css21_dev/html4/content-` WPTs, but I don't see how those two // fragment types should end up inside inlines. (In the case of `GeneratedContent`, those // fragment types should have been resolved by now…) let inline_metrics = match self.specific { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) => { let ascent = self.border_box.size.block + self.margin.block_end; InlineMetrics { space_above_baseline: ascent + self.margin.block_start, space_below_baseline: Au(0), ascent: ascent, } } SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref info), .. }) | SpecificFragmentInfo::ScannedText(box ref info) => { // Fragments with no glyphs don't contribute any inline metrics. // TODO: Filter out these fragments during flow construction? if info.insertion_point.is_none() && info.content_size.inline == Au(0) { return InlineMetrics::new(Au(0), Au(0), Au(0)); } // See CSS 2.1 § 10.8.1. let font_metrics = with_thread_local_font_context(layout_context, |font_context| { text::font_metrics_for_style(font_context, self.style.clone_font()) }); let line_height = text::line_height_from_style(&*self.style, &font_metrics); InlineMetrics::from_font_metrics(&info.run.font_metrics, line_height) } SpecificFragmentInfo::InlineBlock(ref info) => { inline_metrics_of_block(&info.flow_ref, &*self.style) } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => { inline_metrics_of_block(&info.flow_ref, &*self.style) } SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: None, .. }) | SpecificFragmentInfo::InlineAbsolute(_) => { InlineMetrics::new(Au(0), Au(0), Au(0)) } SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::UnscannedText(_) => { unreachable!("Shouldn't see fragments of this type here!") } }; return inline_metrics; fn inline_metrics_of_block(flow: &FlowRef, style: &ServoComputedValues) -> InlineMetrics { // CSS 2.1 § 10.8: "The height of each inline-level box in the line box is calculated. // For replaced elements, inline-block elements, and inline-table elements, this is the // height of their margin box." // // CSS 2.1 § 10.8.1: "The baseline of an 'inline-block' is the baseline of its last // line box in the normal flow, unless it has either no in-flow line boxes or if its // 'overflow' property has a computed value other than 'visible', in which case the // baseline is the bottom margin edge." // // NB: We must use `block_flow.fragment.border_box.size.block` here instead of // `block_flow.base.position.size.block` because sometimes the latter is late-computed // and isn't up to date at this point. let block_flow = flow.as_block(); let start_margin = block_flow.fragment.margin.block_start; let end_margin = block_flow.fragment.margin.block_end; if style.get_box().overflow_y == overflow_x::T::visible { if let Some(baseline_offset) = flow.baseline_offset_of_last_line_box_in_flow() { let ascent = baseline_offset + start_margin; let space_below_baseline = block_flow.fragment.border_box.size.block - baseline_offset + end_margin; return InlineMetrics::new(ascent, space_below_baseline, baseline_offset) } } let ascent = block_flow.fragment.border_box.size.block + end_margin; let space_above_baseline = start_margin + ascent; InlineMetrics::new(space_above_baseline, Au(0), ascent) } } /// Calculates the offset from the baseline that applies to this fragment due to /// `vertical-align`. Positive values represent downward displacement. /// /// If `actual_line_metrics` is supplied, then these metrics are used to determine the /// displacement of the fragment when `top` or `bottom` `vertical-align` values are /// encountered. If this is not supplied, then `top` and `bottom` values are ignored. fn vertical_alignment_offset(&self, layout_context: &LayoutContext, content_inline_metrics: &InlineMetrics, minimum_line_metrics: &LineMetrics, actual_line_metrics: Option<&LineMetrics>) -> Au { let mut offset = Au(0); for style in self.inline_styles() { // If any of the inline styles say `top` or `bottom`, adjust the vertical align // appropriately. // // FIXME(#5624, pcwalton): This passes our current reftests but isn't the right thing // to do. match style.get_box().vertical_align { vertical_align::T::baseline => {} vertical_align::T::middle => { let font_metrics = with_thread_local_font_context(layout_context, |font_context| { text::font_metrics_for_style(font_context, self.style.clone_font()) }); offset += (content_inline_metrics.ascent - content_inline_metrics.space_below_baseline - font_metrics.x_height).scale_by(0.5) } vertical_align::T::sub => { offset += minimum_line_metrics.space_needed() .scale_by(FONT_SUBSCRIPT_OFFSET_RATIO) } vertical_align::T::super_ => { offset -= minimum_line_metrics.space_needed() .scale_by(FONT_SUPERSCRIPT_OFFSET_RATIO) } vertical_align::T::text_top => { offset = self.content_inline_metrics(layout_context).ascent - minimum_line_metrics.space_above_baseline } vertical_align::T::text_bottom => { offset = minimum_line_metrics.space_below_baseline - self.content_inline_metrics(layout_context).space_below_baseline } vertical_align::T::top => { if let Some(actual_line_metrics) = actual_line_metrics { offset = content_inline_metrics.ascent - actual_line_metrics.space_above_baseline } } vertical_align::T::bottom => { if let Some(actual_line_metrics) = actual_line_metrics { offset = actual_line_metrics.space_below_baseline - content_inline_metrics.space_below_baseline } } vertical_align::T::LengthOrPercentage(LengthOrPercentage::Length(length)) => { offset -= length } vertical_align::T::LengthOrPercentage(LengthOrPercentage::Percentage( percentage)) => { offset -= minimum_line_metrics.space_needed().scale_by(percentage.0) } vertical_align::T::LengthOrPercentage(LengthOrPercentage::Calc(formula)) => { offset -= formula.to_used_value(Some(minimum_line_metrics.space_needed())).unwrap() } } } offset } /// Calculates block-size above baseline, depth below baseline, and ascent for this fragment /// when used in an inline formatting context, taking `vertical-align` (other than `top` or /// `bottom`) into account. See CSS 2.1 § 10.8.1. /// /// If `actual_line_metrics` is supplied, then these metrics are used to determine the /// displacement of the fragment when `top` or `bottom` `vertical-align` values are /// encountered. If this is not supplied, then `top` and `bottom` values are ignored. pub fn aligned_inline_metrics(&self, layout_context: &LayoutContext, minimum_line_metrics: &LineMetrics, actual_line_metrics: Option<&LineMetrics>) -> InlineMetrics { let content_inline_metrics = self.content_inline_metrics(layout_context); let vertical_alignment_offset = self.vertical_alignment_offset(layout_context, &content_inline_metrics, minimum_line_metrics, actual_line_metrics); let mut space_above_baseline = match actual_line_metrics { None => content_inline_metrics.space_above_baseline, Some(actual_line_metrics) => actual_line_metrics.space_above_baseline, }; space_above_baseline = space_above_baseline - vertical_alignment_offset; let space_below_baseline = content_inline_metrics.space_below_baseline + vertical_alignment_offset; let ascent = content_inline_metrics.ascent - vertical_alignment_offset; InlineMetrics::new(space_above_baseline, space_below_baseline, ascent) } /// Returns true if this fragment is a hypothetical box. See CSS 2.1 § 10.3.7. pub fn is_hypothetical(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => true, _ => false, } } /// Returns true if this fragment can merge with another immediately-following fragment or /// false otherwise. pub fn can_merge_with_fragment(&self, other: &Fragment) -> bool { match (&self.specific, &other.specific) { (&SpecificFragmentInfo::UnscannedText(ref first_unscanned_text), &SpecificFragmentInfo::UnscannedText(_)) => { // FIXME: Should probably use a whitelist of styles that can safely differ (#3165) if self.style().get_font() != other.style().get_font() || self.text_decoration_line() != other.text_decoration_line() || self.white_space() != other.white_space() || self.color() != other.color() { return false } if first_unscanned_text.text.ends_with('\n') { return false } // If this node has any styles that have border/padding/margins on the following // side, then we can't merge with the next fragment. if let Some(ref inline_context) = self.inline_context { for inline_context_node in inline_context.nodes.iter() { if !inline_context_node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node.style.logical_margin().inline_end != LengthOrPercentageOrAuto::Length(Au(0)) { return false } if inline_context_node.style.logical_padding().inline_end != LengthOrPercentage::Length(Au(0)) { return false } if inline_context_node.style.logical_border_width().inline_end != Au(0) { return false } } } // If the next fragment has any styles that have border/padding/margins on the // preceding side, then it can't merge with us. if let Some(ref inline_context) = other.inline_context { for inline_context_node in inline_context.nodes.iter() { if !inline_context_node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node.style.logical_margin().inline_start != LengthOrPercentageOrAuto::Length(Au(0)) { return false } if inline_context_node.style.logical_padding().inline_start != LengthOrPercentage::Length(Au(0)) { return false } if inline_context_node.style.logical_border_width().inline_start != Au(0) { return false } } } true } _ => false, } } /// Returns true if and only if this is the *primary fragment* for the fragment's style object /// (conceptually, though style sharing makes this not really true, of course). The primary /// fragment is the one that draws backgrounds, borders, etc., and takes borders, padding and /// margins into account. Every style object has at most one primary fragment. /// /// At present, all fragments are primary fragments except for inline-block and table wrapper /// fragments. Inline-block fragments are not primary fragments because the corresponding block /// flow is the primary fragment, while table wrapper fragments are not primary fragments /// because the corresponding table flow is the primary fragment. pub fn is_primary_fragment(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::TableWrapper => false, SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::UnscannedText(_) => true, } } /// Determines the inline sizes of inline-block fragments. These cannot be fully computed until /// inline size assignment has run for the child flow: thus it is computed "late", during /// block size assignment. pub fn update_late_computed_replaced_inline_size_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineBlock(ref mut inline_block_info) = self.specific { let block_flow = FlowRef::deref_mut(&mut inline_block_info.flow_ref).as_block(); self.border_box.size.inline = block_flow.fragment.margin_box_inline_size(); } } pub fn update_late_computed_inline_position_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) = self.specific { let position = self.border_box.start.i; FlowRef::deref_mut(&mut info.flow_ref) .update_late_computed_inline_position_if_necessary(position) } } pub fn update_late_computed_block_position_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) = self.specific { let position = self.border_box.start.b; FlowRef::deref_mut(&mut info.flow_ref) .update_late_computed_block_position_if_necessary(position) } } pub fn repair_style(&mut self, new_style: &StyleArc<ServoComputedValues>) { self.style = (*new_style).clone() } /// Given the stacking-context-relative position of the containing flow, returns the border box /// of this fragment relative to the parent stacking context. This takes `position: relative` /// into account. /// /// If `coordinate_system` is `Parent`, this returns the border box in the parent stacking /// context's coordinate system. Otherwise, if `coordinate_system` is `Own` and this fragment /// establishes a stacking context itself, this returns a border box anchored at (0, 0). (If /// this fragment does not establish a stacking context, then it always belongs to its parent /// stacking context and thus `coordinate_system` is ignored.) /// /// This is the method you should use for display list construction as well as /// `getBoundingClientRect()` and so forth. pub fn stacking_relative_border_box(&self, stacking_relative_flow_origin: &Vector2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, coordinate_system: CoordinateSystem) -> Rect<Au> { let container_size = relative_containing_block_size.to_physical(relative_containing_block_mode); let border_box = self.border_box.to_physical(self.style.writing_mode, container_size); if coordinate_system == CoordinateSystem::Own && self.establishes_stacking_context() { return Rect::new(Point2D::zero(), border_box.size) } // FIXME(pcwalton): This can double-count relative position sometimes for inlines (e.g. // `<div style="position:relative">x</div>`, because the `position:relative` trickles down // to the inline flow. Possibly we should extend the notion of "primary fragment" to fix // this. let relative_position = self.relative_position(relative_containing_block_size); border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode)) .translate(&stacking_relative_flow_origin) } /// Given the stacking-context-relative border box, returns the stacking-context-relative /// content box. pub fn stacking_relative_content_box(&self, stacking_relative_border_box: &Rect<Au>) -> Rect<Au> { let border_padding = self.border_padding.to_physical(self.style.writing_mode); Rect::new(Point2D::new(stacking_relative_border_box.origin.x + border_padding.left, stacking_relative_border_box.origin.y + border_padding.top), Size2D::new(stacking_relative_border_box.size.width - border_padding.horizontal(), stacking_relative_border_box.size.height - border_padding.vertical())) } /// Returns true if this fragment establishes a new stacking context and false otherwise. pub fn establishes_stacking_context(&self) -> bool { // Text fragments shouldn't create stacking contexts. match self.specific { SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::UnscannedText(_) => return false, _ => {} } if self.style().get_effects().opacity != 1.0 { return true } if !self.style().get_effects().filter.0.is_empty() { return true } if self.style().get_effects().mix_blend_mode != mix_blend_mode::T::normal { return true } if self.style().get_box().transform.0.is_some() || self.style().get_box().transform_style == transform_style::T::preserve_3d || self.style().overrides_transform_style() { return true } // TODO(mrobinson): Determine if this is necessary, since blocks with // transformations already create stacking contexts. if let Either::First(ref _length) = self.style().get_box().perspective { return true } // Fixed position blocks always create stacking contexts. if self.style.get_box().position == position::T::fixed { return true } match (self.style().get_box().position, self.style().get_position().z_index, self.style().get_box().overflow_x, self.style().get_box().overflow_y) { (position::T::absolute, Either::Second(Auto), overflow_x::T::visible, overflow_x::T::visible) | (position::T::fixed, Either::Second(Auto), overflow_x::T::visible, overflow_x::T::visible) | (position::T::relative, Either::Second(Auto), overflow_x::T::visible, overflow_x::T::visible) => false, (position::T::absolute, _, _, _) | (position::T::fixed, _, _, _) | (position::T::relative, _, _, _) => true, (position::T::static_, _, _, _) => false } } // Get the effective z-index of this fragment. Z-indices only apply to positioned element // per CSS 2 9.9.1 (http://www.w3.org/TR/CSS2/visuren.html#z-index), so this value may differ // from the value specified in the style. pub fn effective_z_index(&self) -> i32 { match self.style().get_box().position { position::T::static_ => {}, _ => return self.style().get_position().z_index.integer_or(0), } if self.style().get_box().transform.0.is_some() { return self.style().get_position().z_index.integer_or(0); } match self.style().get_box().display { display::T::flex => self.style().get_position().z_index.integer_or(0), _ => 0, } } /// Computes the overflow rect of this fragment relative to the start of the flow. pub fn compute_overflow(&self, flow_size: &Size2D<Au>, relative_containing_block_size: &LogicalSize<Au>) -> Overflow { let mut border_box = self.border_box.to_physical(self.style.writing_mode, *flow_size); // Relative position can cause us to draw outside our border box. // // FIXME(pcwalton): I'm not a fan of the way this makes us crawl though so many styles all // the time. Can't we handle relative positioning by just adjusting `border_box`? let relative_position = self.relative_position(relative_containing_block_size); border_box = border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode)); let mut overflow = Overflow::from_rect(&border_box); // Box shadows cause us to draw outside our border box. for box_shadow in &self.style().get_effects().box_shadow.0 { let offset = Vector2D::new(box_shadow.offset_x, box_shadow.offset_y); let inflation = box_shadow.spread_radius + box_shadow.blur_radius * BLUR_INFLATION_FACTOR; overflow.paint = overflow.paint.union(&border_box.translate(&offset) .inflate(inflation, inflation)) } // Outlines cause us to draw outside our border box. let outline_width = self.style.get_outline().outline_width; if outline_width != Au(0) { overflow.paint = overflow.paint.union(&border_box.inflate(outline_width, outline_width)) } // Include the overflow of the block flow, if any. match self.specific { SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); overflow.union(&flow::base(block_flow).overflow); } SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); overflow.union(&flow::base(block_flow).overflow); } _ => (), } // FIXME(pcwalton): Sometimes excessively fancy glyphs can make us draw outside our border // box too. overflow } pub fn requires_line_break_afterward_if_wrapping_on_newlines(&self) -> bool { match self.specific { SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref scanned_text), .. }) | SpecificFragmentInfo::ScannedText(box ref scanned_text) => { scanned_text.requires_line_break_afterward_if_wrapping_on_newlines() } _ => false, } } pub fn strip_leading_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult { if self.white_space().preserve_spaces() { return WhitespaceStrippingResult::RetainFragment } match self.specific { SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref mut scanned_text_fragment_info), .. }) | SpecificFragmentInfo::ScannedText(box ref mut scanned_text_fragment_info) => { let leading_whitespace_byte_count = scanned_text_fragment_info.text() .find(|c| !char_is_whitespace(c)) .unwrap_or(scanned_text_fragment_info.text().len()); let whitespace_len = ByteIndex(leading_whitespace_byte_count as isize); let whitespace_range = Range::new(scanned_text_fragment_info.range.begin(), whitespace_len); let text_bounds = scanned_text_fragment_info.run.metrics_for_range(&whitespace_range).bounding_box; self.border_box.size.inline = self.border_box.size.inline - text_bounds.size.width; scanned_text_fragment_info.content_size.inline = scanned_text_fragment_info.content_size.inline - text_bounds.size.width; scanned_text_fragment_info.range.adjust_by(whitespace_len, -whitespace_len); WhitespaceStrippingResult::RetainFragment } SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => { let mut new_text_string = String::new(); let mut modified = false; for (i, character) in unscanned_text_fragment_info.text.char_indices() { if gfx::text::util::is_bidi_control(character) { new_text_string.push(character); continue } if char_is_whitespace(character) { modified = true; continue } // Finished processing leading control chars and whitespace. if modified { new_text_string.push_str(&unscanned_text_fragment_info.text[i..]); } break } if modified { unscanned_text_fragment_info.text = new_text_string.into_boxed_str(); } WhitespaceStrippingResult::from_unscanned_text_fragment_info( &unscanned_text_fragment_info) } _ => WhitespaceStrippingResult::RetainFragment, } } /// Returns true if the entire fragment was stripped. pub fn strip_trailing_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult { if self.white_space().preserve_spaces() { return WhitespaceStrippingResult::RetainFragment } match self.specific { SpecificFragmentInfo::TruncatedFragment(box TruncatedFragmentInfo { text_info: Some(ref mut scanned_text_fragment_info), .. }) | SpecificFragmentInfo::ScannedText(box ref mut scanned_text_fragment_info) => { let mut trailing_whitespace_start_byte = 0; for (i, c) in scanned_text_fragment_info.text().char_indices().rev() { if !char_is_whitespace(c) { trailing_whitespace_start_byte = i + c.len_utf8(); break; } } let whitespace_start = ByteIndex(trailing_whitespace_start_byte as isize); let whitespace_len = scanned_text_fragment_info.range.length() - whitespace_start; let mut whitespace_range = Range::new(whitespace_start, whitespace_len); whitespace_range.shift_by(scanned_text_fragment_info.range.begin()); let text_bounds = scanned_text_fragment_info.run .metrics_for_range(&whitespace_range) .bounding_box; self.border_box.size.inline -= text_bounds.size.width; scanned_text_fragment_info.content_size.inline -= text_bounds.size.width; scanned_text_fragment_info.range.extend_by(-whitespace_len); WhitespaceStrippingResult::RetainFragment } SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => { let mut trailing_bidi_control_characters_to_retain = Vec::new(); let (mut modified, mut last_character_index) = (true, 0); for (i, character) in unscanned_text_fragment_info.text.char_indices().rev() { if gfx::text::util::is_bidi_control(character) { trailing_bidi_control_characters_to_retain.push(character); continue } if char_is_whitespace(character) { modified = true; continue } last_character_index = i + character.len_utf8(); break } if modified { let mut text = unscanned_text_fragment_info.text.to_string(); text.truncate(last_character_index); for character in trailing_bidi_control_characters_to_retain.iter().rev() { text.push(*character); } unscanned_text_fragment_info.text = text.into_boxed_str(); } WhitespaceStrippingResult::from_unscanned_text_fragment_info( &unscanned_text_fragment_info) } _ => WhitespaceStrippingResult::RetainFragment, } } pub fn inline_styles(&self) -> InlineStyleIterator { InlineStyleIterator::new(self) } /// Returns the inline-size of this fragment's margin box. pub fn margin_box_inline_size(&self) -> Au { self.border_box.size.inline + self.margin.inline_start_end() } /// Returns true if this node *or any of the nodes within its inline fragment context* have /// non-`static` `position`. pub fn is_positioned(&self) -> bool { if self.style.get_box().position != position::T::static_ { return true } if let Some(ref inline_context) = self.inline_context { for node in inline_context.nodes.iter() { if node.style.get_box().position != position::T::static_ { return true } } } false } /// Returns true if this node is absolutely positioned. pub fn is_absolutely_positioned(&self) -> bool { self.style.get_box().position == position::T::absolute } pub fn is_inline_absolute(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsolute(..) => true, _ => false, } } pub fn meld_with_next_inline_fragment(&mut self, next_fragment: &Fragment) { if let Some(ref mut inline_context_of_this_fragment) = self.inline_context { if let Some(ref inline_context_of_next_fragment) = next_fragment.inline_context { for (inline_context_node_from_this_fragment, inline_context_node_from_next_fragment) in inline_context_of_this_fragment.nodes.iter_mut().rev() .zip(inline_context_of_next_fragment.nodes.iter().rev()) { if !inline_context_node_from_next_fragment.flags.contains( LAST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node_from_next_fragment.address != inline_context_node_from_this_fragment.address { continue } inline_context_node_from_this_fragment.flags.insert(LAST_FRAGMENT_OF_ELEMENT); } } } } pub fn meld_with_prev_inline_fragment(&mut self, prev_fragment: &Fragment) { if let Some(ref mut inline_context_of_this_fragment) = self.inline_context { if let Some(ref inline_context_of_prev_fragment) = prev_fragment.inline_context { for (inline_context_node_from_prev_fragment, inline_context_node_from_this_fragment) in inline_context_of_prev_fragment.nodes.iter().rev().zip( inline_context_of_this_fragment.nodes.iter_mut().rev()) { if !inline_context_node_from_prev_fragment.flags.contains( FIRST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node_from_prev_fragment.address != inline_context_node_from_this_fragment.address { continue } inline_context_node_from_this_fragment.flags.insert( FIRST_FRAGMENT_OF_ELEMENT); } } } } /// Returns true if any of the inline styles associated with this fragment have /// `vertical-align` set to `top` or `bottom`. pub fn is_vertically_aligned_to_top_or_bottom(&self) -> bool { match self.style.get_box().vertical_align { vertical_align::T::top | vertical_align::T::bottom => return true, _ => {} } if let Some(ref inline_context) = self.inline_context { for node in &inline_context.nodes { match node.style.get_box().vertical_align { vertical_align::T::top | vertical_align::T::bottom => return true, _ => {} } } } false } pub fn is_text_or_replaced(&self) -> bool { match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper => false, SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::UnscannedText(_) => true } } /// Returns the 4D matrix representing this fragment's transform. pub fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Option<Transform3D<f32>> { let operations = match self.style.get_box().transform.0 { None => return None, Some(ref operations) => operations, }; let mut transform = Transform3D::identity(); let transform_origin = &self.style.get_box().transform_origin; let transform_origin_x = transform_origin.horizontal .to_used_value(stacking_relative_border_box.size.width) .to_f32_px(); let transform_origin_y = transform_origin.vertical .to_used_value(stacking_relative_border_box.size.height) .to_f32_px(); let transform_origin_z = transform_origin.depth.to_f32_px(); let pre_transform = Transform3D::create_translation(transform_origin_x, transform_origin_y, transform_origin_z); let post_transform = Transform3D::create_translation(-transform_origin_x, -transform_origin_y, -transform_origin_z); for operation in operations { let matrix = match *operation { transform::ComputedOperation::Rotate(ax, ay, az, theta) => { let theta = 2.0f32 * f32::consts::PI - theta.radians(); Transform3D::create_rotation(ax, ay, az, Radians::new(theta)) } transform::ComputedOperation::Perspective(d) => { create_perspective_matrix(d) } transform::ComputedOperation::Scale(sx, sy, sz) => { Transform3D::create_scale(sx, sy, sz) } transform::ComputedOperation::Translate(tx, ty, tz) => { let tx = tx.to_used_value(stacking_relative_border_box.size.width).to_f32_px(); let ty = ty.to_used_value(stacking_relative_border_box.size.height).to_f32_px(); let tz = tz.to_f32_px(); Transform3D::create_translation(tx, ty, tz) } transform::ComputedOperation::Matrix(m) => { m.to_gfx_matrix() } transform::ComputedOperation::MatrixWithPercents(_) => { // `-moz-transform` is not implemented in Servo yet. unreachable!() } transform::ComputedOperation::Skew(theta_x, theta_y) => { Transform3D::create_skew(Radians::new(theta_x.radians()), Radians::new(theta_y.radians())) } transform::ComputedOperation::InterpolateMatrix { .. } | transform::ComputedOperation::AccumulateMatrix { .. } => { // TODO: Convert InterpolateMatrix/AccmulateMatrix into a valid Transform3D by // the reference box. Transform3D::identity() } }; transform = transform.pre_mul(&matrix); } Some(pre_transform.pre_mul(&transform).pre_mul(&post_transform)) } /// Returns the 4D matrix representing this fragment's perspective. pub fn perspective_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Option<Transform3D<f32>> { match self.style().get_box().perspective { Either::First(length) => { let perspective_origin = self.style().get_box().perspective_origin; let perspective_origin = Point2D::new( perspective_origin.horizontal .to_used_value(stacking_relative_border_box.size.width) .to_f32_px(), perspective_origin.vertical .to_used_value(stacking_relative_border_box.size.height) .to_f32_px()); let pre_transform = Transform3D::create_translation(perspective_origin.x, perspective_origin.y, 0.0); let post_transform = Transform3D::create_translation(-perspective_origin.x, -perspective_origin.y, 0.0); let perspective_matrix = create_perspective_matrix(length); Some(pre_transform.pre_mul(&perspective_matrix).pre_mul(&post_transform)) } Either::Second(values::None_) => { None } } } } impl fmt::Debug for Fragment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let border_padding_string = if !self.border_padding.is_zero() { format!(" border_padding={:?}", self.border_padding) } else { "".to_owned() }; let margin_string = if !self.margin.is_zero() { format!(" margin={:?}", self.margin) } else { "".to_owned() }; let damage_string = if self.restyle_damage != RestyleDamage::empty() { format!(" damage={:?}", self.restyle_damage) } else { "".to_owned() }; write!(f, "{}({}) [{:?}] border_box={:?}{}{}{}", self.specific.get_type(), self.debug_id, self.specific, self.border_box, border_padding_string, margin_string, damage_string) } } bitflags! { flags QuantitiesIncludedInIntrinsicInlineSizes: u8 { const INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS = 0x01, const INTRINSIC_INLINE_SIZE_INCLUDES_PADDING = 0x02, const INTRINSIC_INLINE_SIZE_INCLUDES_BORDER = 0x04, const INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED = 0x08, } } bitflags! { // Various flags we can use when splitting fragments. See // `calculate_split_position_using_breaking_strategy()`. flags SplitOptions: u8 { #[doc = "True if this is the first fragment on the line."] const STARTS_LINE = 0x01, #[doc = "True if we should attempt to split at character boundaries if this split fails. \ This is used to implement `overflow-wrap: break-word`."] const RETRY_AT_CHARACTER_BOUNDARIES = 0x02, } } /// A top-down fragment border box iteration handler. pub trait FragmentBorderBoxIterator { /// The operation to perform. fn process(&mut self, fragment: &Fragment, level: i32, overflow: &Rect<Au>); /// Returns true if this fragment must be processed in-order. If this returns false, /// we skip the operation for this fragment, but continue processing siblings. fn should_process(&mut self, fragment: &Fragment) -> bool; } /// The coordinate system used in `stacking_relative_border_box()`. See the documentation of that /// method for details. #[derive(Clone, PartialEq, Debug)] pub enum CoordinateSystem { /// The border box returned is relative to the fragment's parent stacking context. Parent, /// The border box returned is relative to the fragment's own stacking context, if applicable. Own, } pub struct InlineStyleIterator<'a> { fragment: &'a Fragment, inline_style_index: usize, primary_style_yielded: bool, } impl<'a> Iterator for InlineStyleIterator<'a> { type Item = &'a ServoComputedValues; fn next(&mut self) -> Option<&'a ServoComputedValues> { if !self.primary_style_yielded { self.primary_style_yielded = true; return Some(&*self.fragment.style) } let inline_context = match self.fragment.inline_context { None => return None, Some(ref inline_context) => inline_context, }; let inline_style_index = self.inline_style_index; if inline_style_index == inline_context.nodes.len() { return None } self.inline_style_index += 1; Some(&*inline_context.nodes[inline_style_index].style) } } impl<'a> InlineStyleIterator<'a> { fn new(fragment: &Fragment) -> InlineStyleIterator { InlineStyleIterator { fragment: fragment, inline_style_index: 0, primary_style_yielded: false, } } } #[derive(Copy, Clone, Debug, PartialEq)] pub enum WhitespaceStrippingResult { RetainFragment, FragmentContainedOnlyBidiControlCharacters, FragmentContainedOnlyWhitespace, } impl WhitespaceStrippingResult { fn from_unscanned_text_fragment_info(info: &UnscannedTextFragmentInfo) -> WhitespaceStrippingResult { if info.text.is_empty() { WhitespaceStrippingResult::FragmentContainedOnlyWhitespace } else if info.text.chars().all(gfx::text::util::is_bidi_control) { WhitespaceStrippingResult::FragmentContainedOnlyBidiControlCharacters } else { WhitespaceStrippingResult::RetainFragment } } } /// The overflow area. We need two different notions of overflow: paint overflow and scrollable /// overflow. #[derive(Copy, Clone, Debug)] pub struct Overflow { pub scroll: Rect<Au>, pub paint: Rect<Au>, } impl Overflow { pub fn new() -> Overflow { Overflow { scroll: Rect::zero(), paint: Rect::zero(), } } pub fn from_rect(border_box: &Rect<Au>) -> Overflow { Overflow { scroll: *border_box, paint: *border_box, } } pub fn union(&mut self, other: &Overflow) { self.scroll = self.scroll.union(&other.scroll); self.paint = self.paint.union(&other.paint); } pub fn translate(&mut self, by: &Vector2D<Au>) { self.scroll = self.scroll.translate(by); self.paint = self.paint.translate(by); } } bitflags! { pub flags FragmentFlags: u8 { // TODO(stshine): find a better name since these flags can also be used for grid item. /// Whether this fragment represents a child in a row flex container. const IS_INLINE_FLEX_ITEM = 0b0000_0001, /// Whether this fragment represents a child in a column flex container. const IS_BLOCK_FLEX_ITEM = 0b0000_0010, /// Whether this fragment represents the generated text from a text-overflow clip. const IS_ELLIPSIS = 0b0000_0100, } } /// Specified distances from the margin edge of a block to its content in the inline direction. /// These are returned by `guess_inline_content_edge_offsets()` and are used in the float placement /// speculation logic. #[derive(Copy, Clone, Debug)] pub struct SpeculatedInlineContentEdgeOffsets { pub start: Au, pub end: Au, } #[cfg(not(debug_assertions))] #[derive(Clone)] struct DebugId; #[cfg(debug_assertions)] #[derive(Clone)] struct DebugId(u16); #[cfg(not(debug_assertions))] impl DebugId { pub fn new() -> DebugId { DebugId } } #[cfg(debug_assertions)] impl DebugId { pub fn new() -> DebugId { DebugId(layout_debug::generate_unique_debug_id()) } } #[cfg(not(debug_assertions))] impl fmt::Display for DebugId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:p}", &self) } } #[cfg(debug_assertions)] impl fmt::Display for DebugId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } #[cfg(not(debug_assertions))] impl Serialize for DebugId { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { serializer.serialize_str(&format!("{:p}", &self)) } } #[cfg(debug_assertions)] impl Serialize for DebugId { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { serializer.serialize_u16(self.0) } } // TODO(gw): The transforms spec says that perspective length must // be positive. However, there is some confusion between the spec // and browser implementations as to handling the case of 0 for the // perspective value. Until the spec bug is resolved, at least ensure // that a provided perspective value of <= 0.0 doesn't cause panics // and behaves as it does in other browsers. // See https://lists.w3.org/Archives/Public/www-style/2016Jan/0020.html for more details. #[inline] fn create_perspective_matrix(d: Au) -> Transform3D<f32> { let d = d.to_f32_px(); if d <= 0.0 { Transform3D::identity() } else { Transform3D::create_perspective(d) } }<|fim▁end|>
/// Tile an image pub fn tile_image(position: &mut Au, size: &mut Au,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from logbot.daemonizer import Daemonizer from logbot.irc_client import IrcClient<|fim▁hole|>from logbot.logger import Logger from logbot.parser import Parser<|fim▁end|>
<|file_name|>LocalTests.py<|end_file_name|><|fim▁begin|>from Hypotheses import * from ModelSelection import LinearRegression from Test import * sigma = 5 # observation noise sigma ############################################################################## # Synthetic tests ############################################################################## from Test import generate_noise_and_fit hc = HypothesisCollection() hc.append(PolynomialHypothesis(M=2, variance=3, noiseVariance=sigma**2)) # hc.append(PolynomialHypothesis(M=3, variance=3, noiseVariance=sigma**2)) hc.append(PolynomialHypothesis(M=6, variance=3, noiseVariance=sigma**2)) hc.append(PolynomialHypothesis(M=8, variance=3, noiseVariance=sigma**2)) hc.append(TrigonometricHypothesis(halfM=4, variance=2, noiseVariance=sigma**2)) hc.append(TrigonometricHypothesis(halfM=2, variance=2, noiseVariance=sigma**2)) # hc.append(TrigonometricHypothesis(halfM=10, variance=2, noiseVariance=sigma**2)) lr = LinearRegression(hc, sigma) # Two tests: generator = PolynomialHypothesis(M=6, variance=5, noiseVariance=sigma**2) # generator=TrigonometricHypothesis(halfM=2, variance=4, noiseVariance=sigma**2) # test_generator(generator) # Plot generator results generate_noise_and_fit(lr, generator, xmin=-1.0, xmax=4.0, num=100) ############################################################################## # Interactive tests ############################################################################## """ from Test import select_points_and_fit hc = HypothesisCollection() hc.append(PolynomialHypothesis(M=2, variance=3, noiseVariance=sigma**2)) hc.append(PolynomialHypothesis(M=3, variance=3, noiseVariance=sigma**2)) hc.append(TrigonometricHypothesis(halfM=4, variance=2, noiseVariance=sigma**2)) lr = LinearRegression(hc, sigma) select_points_and_fit(lr, num=10) """ ############################################################################## # Old tests ############################################################################## """ from Plots import * hc = HypothesisCollection() hc.append(PolynomialHypothesis(M=2, variance=3, noiseVariance=0.05)) hc.append(TrigonometricHypothesis(halfM=2, variance=2)) data = np.arange(0, 5) # Shape is (5,) pl.plot(data, hc[0].generate(data))<|fim▁hole|>for x, t in zip(data, hc[2].generate(data)): #print ("Updating with (%f, %f)" % (x, t)) lr.update_old(x, t) wmap = [param.mean for param in lr.parameter] ### Plot fig, (ax1, ax2) = pl.subplots(2) updateMAPFitPlot(ax1, lr.XHist, hc, wmap, 0.05) pl.draw() ax1.plot(lr.XHist, lr.THist, 'ro') pl.draw() updateProbabilitiesPlot(ax2, lr) pl.draw() pl.show() """<|fim▁end|>
lr = LinearRegression(hc, sigma) data = np.arange(0, 5)
<|file_name|>test_blockdown.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Use nose `$ pip install nose` `$ nosetests` """ from hyde.generator import Generator from hyde.site import Site from fswrap import File from pyquery import PyQuery TEST_SITE = File(__file__).parent.parent.child_folder('_test') class TestBlockdown(object): def setUp(self): TEST_SITE.make() TEST_SITE.parent.child_folder( 'sites/test_jinja').copy_contents_to(TEST_SITE) def tearDown(self): TEST_SITE.delete() def test_can_parse_blockdown(self): s = Site(TEST_SITE) s.config.plugins = ['hyde.ext.plugins.text.BlockdownPlugin'] txt = ("This template tests to make sure blocks can be replaced" "with markdownish syntax.") templ = """ {%% extends "base.html" %%} =====title======== %s ====/title========""" content = (templ.strip() % txt).strip() bd = File(TEST_SITE.child('content/blockdown.html')) bd.write(content)<|fim▁hole|> target = File( s.config.deploy_root_path.child(res.relative_deploy_path)) assert target.exists text = target.read_all() q = PyQuery(text) assert q('title').text().strip() == txt.strip()<|fim▁end|>
gen = Generator(s) gen.generate_resource_at_path(bd.path) res = s.content.resource_from_path(bd.path)
<|file_name|>condexpr.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2017 Christoph Heiss * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ use std::env; use std::path::Path; use state::State; use parser::{ExecResult, ToExecResult}; /* * https://www.gnu.org/software/bash/manual/html_node/Bash-Conditional-Expressions.html */ pub fn exec(state: &mut State, args: &[String]) -> ExecResult { if args.len() == 1 { return ExecResult::failure(); }<|fim▁hole|> match args[0].as_ref() { "-a" => Path::new(&args[1]).exists().to_exec_result(), "-d" => Path::new(&args[1]).is_dir().to_exec_result(), "-f" => Path::new(&args[1]).is_file().to_exec_result(), "-h" => { match Path::new(&args[1]).symlink_metadata() { Ok(metadata) => metadata.file_type().is_symlink().to_exec_result(), Err(_) => ExecResult::failure(), } }, "-n" => (!args[1].is_empty()).to_exec_result(), "-v" => { match state.var(&args[1]) { Some(var) => (!var.value.is_empty()).to_exec_result(), None => match env::var(&args[1]) { Ok(var) => (!var.is_empty()).to_exec_result(), Err(_) => ExecResult::failure(), }, } }, "-R" => { match state.var(&args[1]) { Some(var) => (!var.value.is_empty() && var.reference).to_exec_result(), None => ExecResult::failure(), } }, "-z" => args[1].is_empty().to_exec_result(), _ => ExecResult::with_code(2), } }<|fim▁end|>
let args = &args[..args.len()-1];
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import * <|fim▁hole|> url(r'^$', view='browse', name='foo.browse'), # Detail URL url(r'^(?P<slug>(?!overview\-)[\w\-\_\.\,]+)/$', view='detail', name='foo.detail'), )<|fim▁end|>
urlpatterns = patterns('foo.views', # Listing URL
<|file_name|>objects.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- def get_instance_children(obj, depth=0, sig=0): """ Récupèration récursive des relations enfants d'un objet @depth: integer limitant le niveau de recherche des enfants, 0=illimité """<|fim▁hole|> # Nom de l'attribut d'accès cname = child.get_accessor_name() verbose_name = child.model._meta.verbose_name # Récupère tout les objets des relations for elem in getattr(obj, cname).all(): followed = [] # Recherche récursive des enfants if depth == 0 or sig < depth: followed = get_instance_children(elem, depth=depth, sig=sig+1) children.append( (verbose_name, unicode(elem), followed) ) return children<|fim▁end|>
children = [] # Pour toute les relations enfants de l'objet for child in obj._meta.get_all_related_objects():
<|file_name|>borrowck-loan-blocks-move-cc.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(box_syntax)] use std::thread::Thread; fn borrow<F>(v: &isize, f: F) where F: FnOnce(&isize) { f(v); } fn box_imm() { let v = box 3is; let _w = &v; Thread::spawn(move|| { println!("v={}", *v); //~^ ERROR cannot move `v` into closure }); } fn box_imm_explicit() { let v = box 3is; let _w = &v; Thread::spawn(move|| {<|fim▁hole|> println!("v={}", *v); //~^ ERROR cannot move }); } fn main() { }<|fim▁end|>
<|file_name|>package.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import platform import shutil import sys import os from spack import * class Namd(MakefilePackage): """NAMDis a parallel molecular dynamics code designed for high-performance simulation of large biomolecular systems.""" homepage = "http://www.ks.uiuc.edu/Research/namd/" url = "file://{0}/NAMD_2.12_Source.tar.gz".format(os.getcwd()) version('2.12', '2a1191909b1ab03bf0205971ad4d8ee9') variant('fftw', default='3', values=('none', '2', '3', 'mkl'), description='Enable the use of FFTW/FFTW3/MKL FFT') variant('interface', default='none', values=('none', 'tcl', 'python'), description='Enables TCL and/or python interface') depends_on('charm') depends_on('fftw@:2.99', when="fftw=2") depends_on('fftw@3:', when="fftw=3") depends_on('intel-mkl', when="fftw=mkl") depends_on('tcl', when='interface=tcl') depends_on('tcl', when='interface=python') depends_on('python', when='interface=python') def _copy_arch_file(self, lib): config_filename = 'arch/{0}.{1}'.format(self.arch, lib) shutil.copy('arch/Linux-x86_64.{0}'.format(lib), config_filename) if lib == 'tcl': filter_file(r'-ltcl8\.5', '-ltcl{0}'.format(self.spec['tcl'].version.up_to(2)), config_filename) def _append_option(self, opts, lib): if lib != 'python': self._copy_arch_file(lib) spec = self.spec opts.extend([ '--with-{0}'.format(lib), '--{0}-prefix'.format(lib), spec[lib].prefix ]) @property def arch(self): plat = sys.platform if plat.startswith("linux"): plat = "linux" march = platform.machine() return '{0}-{1}'.format(plat, march) @property def build_directory(self): return '{0}-spack'.format(self.arch) def edit(self, spec, prefix): with working_dir('arch'): with open('{0}.arch'.format(self.build_directory), 'w') as fh: # this options are take from the default provided # configuration files optims_opts = { 'gcc': '-m64 -O3 -fexpensive-optimizations -ffast-math', 'intel': '-O2 -ip' } optim_opts = optims_opts[self.compiler.name] \ if self.compiler.name in optims_opts else '' fh.write('\n'.join([ 'NAMD_ARCH = {0}'.format(self.arch), 'CHARMARCH = ', 'CXX = {0.cxx} {0.cxx11_flag}'.format( self.compiler), 'CXXOPTS = {0}'.format(optim_opts), 'CC = {0}'.format(self.compiler.cc), 'COPTS = {0}'.format(optim_opts), '' ])) self._copy_arch_file('base') opts = ['--charm-base', spec['charm'].prefix] fftw_version = spec.variants['fftw'].value if fftw_version == 'none': opts.append('--without-fftw') elif fftw_version == 'mkl': self._append_option(opts, 'mkl') else: _fftw = 'fftw{0}'.format('' if fftw_version == '2' else '3') self._copy_arch_file(_fftw) opts.extend(['--with-{0}'.format(_fftw), '--fftw-prefix', spec['fftw'].prefix]) interface_type = spec.variants['interface'].value if interface_type != 'none':<|fim▁hole|> self._append_option(opts, 'python') else: opts.extend([ '--without-tcl', '--without-python' ]) config = Executable('./config') config(self.build_directory, *opts) def install(self, spec, prefix): with working_dir(self.build_directory): mkdirp(prefix.bin) install('namd2', prefix.bin) # I'm not sure this is a good idea or if an autoload of the charm # module would not be better. install('charmrun', prefix.bin)<|fim▁end|>
self._append_option(opts, 'tcl') if interface_type == 'python':
<|file_name|>test_settings.py<|end_file_name|><|fim▁begin|># encoding: utf-8 import logging # emplacement ou charger les fichier de configuration par instances INSTANCES_DIR = '/etc/jormungandr.d' # Start the thread at startup, True in production, False for test environments START_MONITORING_THREAD = False<|fim▁hole|>SQLALCHEMY_DATABASE_URI = 'postgresql://navitia:navitia@localhost/jormun_test' # désactivation de l'authentification PUBLIC = True REDIS_HOST = 'localhost' REDIS_PORT = 6379 # indice de la base de données redis utilisé, entier de 0 à 15 par défaut REDIS_DB = 0 REDIS_PASSWORD = None # Desactive l'utilisation du cache, et donc de redis CACHE_DISABLED = False # durée de vie des info d'authentification dans le cache en secondes AUTH_CACHE_TTL = 300 ERROR_HANDLER_FILE = 'jormungandr.log' ERROR_HANDLER_TYPE = 'rotating' # can be timedrotating ERROR_HANDLER_PARAMS = {'maxBytes': 20000000, 'backupCount': 5} LOG_LEVEL = logging.DEBUG<|fim▁end|>
# chaine de connnection à postgresql pour la base jormungandr
<|file_name|>glutin.rs<|end_file_name|><|fim▁begin|>//! Window creation using glutin for gfx. //! //! # Examples //! //! The following code creates a `gfx::Surface` using glutin. //! //! ```no_run //! extern crate glutin; //! extern crate gfx_backend_gl; //! //! fn main() { //! use gfx_backend_gl::Surface; //! use glutin::{EventsLoop, WindowBuilder, ContextBuilder, GlWindow}; //! //! // First create a window using glutin. //! let mut events_loop = EventsLoop::new(); //! let wb = WindowBuilder::new(); //! let cb = ContextBuilder::new().with_vsync(true); //! let glutin_window = GlWindow::new(wb, cb, &events_loop).unwrap(); //! //! // Then use the glutin window to create a gfx surface. //! let surface = Surface::from_window(glutin_window); //! } //! ``` //! //! Headless initialization without a window. //! //! ```no_run //! extern crate glutin; //! extern crate gfx_backend_gl; //! extern crate gfx_hal; //! //! use gfx_hal::Instance; //! use gfx_backend_gl::Headless; //! use glutin::{HeadlessRendererBuilder}; //! //! fn main() { //! let context = HeadlessRendererBuilder::new(256, 256) //! .build() //! .expect("Failed to build headless context"); //! let headless = Headless(context); //! let _adapters = headless.enumerate_adapters(); //! } //! ``` use hal::{self, format as f, image}; use {native as n, Backend as B, PhysicalDevice, QueueFamily}; use glutin::{self, GlContext}; use std::rc::Rc; fn get_window_dimensions(window: &glutin::GlWindow) -> image::Dimensions { let (width, height) = window.get_inner_size().unwrap(); let aa = window.get_pixel_format().multisampling .unwrap_or(0) as image::NumSamples; ((width as f32 * window.hidpi_factor()) as image::Size, (height as f32 * window.hidpi_factor()) as image::Size, 1, aa.into()) } pub struct Swapchain { // Underlying window, required for presentation window: Rc<glutin::GlWindow>, } impl hal::Swapchain<B> for Swapchain { fn acquire_frame(&mut self, _sync: hal::FrameSync<B>) -> hal::Frame { // TODO: sync hal::Frame::new(0) } fn present<C>(&mut self, _: &mut hal::CommandQueue<B, C>, _: &[&n::Semaphore]) { self.window.swap_buffers().unwrap(); } } //TODO: if we make `Surface` a `WindowBuilder` instead of `GlWindow`, // we could spawn window + GL context when a swapchain is requested // and actually respect the swapchain configuration provided by the user. pub struct Surface { window: Rc<glutin::GlWindow>, } impl Surface { pub fn from_window(window: glutin::GlWindow) -> Self { Surface { window: Rc::new(window) } } fn swapchain_formats(&self) -> Vec<f::Format> { use hal::format::ChannelType::*; use hal::format::SurfaceType::*; let pixel_format = self.window.get_pixel_format(); let color_bits = pixel_format.color_bits; let alpha_bits = pixel_format.alpha_bits; let srgb = pixel_format.srgb; // TODO: expose more formats match (color_bits, alpha_bits, srgb) { (24, 8, true) => vec![ f::Format(R8_G8_B8_A8, Srgb), f::Format(B8_G8_R8_A8, Srgb), ], (24, 8, false) => vec![ f::Format(R8_G8_B8_A8, Unorm), f::Format(B8_G8_R8_A8, Unorm), ], _ => vec![], } } }<|fim▁hole|> hal::image::Kind::D2(w, h, a) } fn capabilities_and_formats(&self, _: &PhysicalDevice) -> (hal::SurfaceCapabilities, Vec<f::Format>) { let _formats = self.swapchain_formats(); unimplemented!() } fn supports_queue_family(&self, _: &QueueFamily) -> bool { true } fn build_swapchain<C>( &mut self, _config: hal::SwapchainConfig, _: &hal::CommandQueue<B, C>, ) -> (Swapchain, hal::Backbuffer<B>) { let swapchain = Swapchain { window: self.window.clone(), }; let backbuffer = hal::Backbuffer::Framebuffer(0); (swapchain, backbuffer) } } impl hal::Instance for Surface { type Backend = B; fn enumerate_adapters(&self) -> Vec<hal::Adapter<B>> { unsafe { self.window.make_current().unwrap() }; let adapter = PhysicalDevice::new_adapter(|s| self.window.get_proc_address(s) as *const _); vec![adapter] } } pub fn config_context( builder: glutin::ContextBuilder, color_format: f::Format, ds_format: Option<f::Format>, ) -> glutin::ContextBuilder { let color_bits = color_format.0.describe_bits(); let depth_bits = match ds_format { Some(fm) => fm.0.describe_bits(), None => f::BITS_ZERO, }; builder .with_depth_buffer(depth_bits.depth) .with_stencil_buffer(depth_bits.stencil) .with_pixel_format(color_bits.color, color_bits.alpha) .with_srgb(color_format.1 == f::ChannelType::Srgb) } pub struct Headless(pub glutin::HeadlessContext); impl hal::Instance for Headless { type Backend = B; fn enumerate_adapters(&self) -> Vec<hal::Adapter<B>> { unsafe { self.0.make_current().unwrap() }; let adapter = PhysicalDevice::new_adapter(|s| self.0.get_proc_address(s) as *const _); vec![adapter] } }<|fim▁end|>
impl hal::Surface<B> for Surface { fn get_kind(&self) -> hal::image::Kind { let (w, h, _, a) = get_window_dimensions(&self.window);
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 *-* from collections import OrderedDict from nicepy.utils import ljust_all, pretty_repr def get_failed_msg(compare_method, values, expected_values, names=None, expected_names=None): failed_list = [] names = names or map(str, range(len(values))) expected_names = expected_names or [''] * len(names) for value, expected_value, name, expected_name in zip(values, expected_values, names, expected_names): #print value, expected_value, name, expected_name if not compare_method(expected_value, value): failed_list.append((pretty_repr(value), pretty_repr(expected_value), name, expected_name)) return _get_failed_msg(failed_list) def _get_failed_msg(failed_list):<|fim▁hole|> for value_repr, expected_value_repr, name, expected_name in sorted(failed_list): msg += '\n\t%s' % name if expected_name: msg += ' != %s' % expected_name msg += ': %s != %s' % (value_repr, expected_value_repr) return msg def get_multi_failed_msg(assert_method, *lists): failed_msgs = OrderedDict() for index, args in enumerate(zip(*lists)): try: assert_method(*args) except AssertionError as e: failed_msgs[index] = e.message msg = None if failed_msgs: msg = 'Multi-assert failed:' for index, error_msg in sorted(failed_msgs.iteritems()): msg += '\nIndex %d: %s' % (index, error_msg) return msg<|fim▁end|>
if not failed_list: return None msg = 'actual values != expected values:' failed_list = zip(*map(ljust_all, zip(*failed_list)))
<|file_name|>pygeolib.py<|end_file_name|><|fim▁begin|>import sys import collections class GeocoderResult(collections.Iterator): """ A geocoder resultset to iterate through address results. Exemple: results = Geocoder.geocode('paris, us') for result in results: print(result.formatted_address, result.location) Provide shortcut to ease field retrieval, looking at 'types' in each 'address_components'. Example: result.country result.postal_code You can also choose a different property to display for each lookup type. Example: result.country__short_name By default, use 'long_name' property of lookup type, so: result.country and: result.country__long_name are equivalent. """ attribute_mapping = { "state": "administrative_area_level_1", "province": "administrative_area_level_1", "city": "locality", "county": "administrative_area_level_2", } def __init__(self, data): """ Creates instance of GeocoderResult from the provided JSON data array """ self.data = data self.len = len(self.data) self.current_index = 0 self.current_data = self.data[0] def __len__(self): return self.len def __iter__(self): return self def return_next(self): if self.current_index >= self.len: raise StopIteration self.current_data = self.data[self.current_index] self.current_index += 1 return self def __getitem__(self, key): """ Accessing GeocoderResult by index will return a GeocoderResult with just one data entry """ return GeocoderResult([self.data[key]]) def __unicode__(self): return self.formatted_address if sys.version_info[0] >= 3: # Python 3 def __str__(self): return self.__unicode__() def __next__(self): return self.return_next() else: # Python 2 def __str__(self): return self.__unicode__().encode('utf8') def next(self): return self.return_next() @property def count(self): return self.len @property def coordinates(self):<|fim▁hole|> return location['lat'], location['lng'] @property def latitude(self): return self.coordinates[0] @property def longitude(self): return self.coordinates[1] @property def raw(self): """ Returns the full result set in dictionary format """ return self.data @property def valid_address(self): """ Returns true if queried address is valid street address """ return self.current_data['types'] == ['street_address'] @property def formatted_address(self): return self.current_data['formatted_address'] def __getattr__(self, name): lookup = name.split('__') attribute = lookup[0] if (attribute in GeocoderResult.attribute_mapping): attribute = GeocoderResult.attribute_mapping[attribute] try: prop = lookup[1] except IndexError: prop = 'long_name' for elem in self.current_data['address_components']: if attribute in elem['types']: return elem[prop] class GeocoderError(Exception): """Base class for errors in the :mod:`pygeocoder` module. Methods of the :class:`Geocoder` raise this when something goes wrong. """ #: See http://code.google.com/apis/maps/documentation/geocoding/index.html#StatusCodes #: for information on the meaning of these status codes. G_GEO_OK = "OK" G_GEO_ZERO_RESULTS = "ZERO_RESULTS" G_GEO_OVER_QUERY_LIMIT = "OVER_QUERY_LIMIT" G_GEO_REQUEST_DENIED = "REQUEST_DENIED" G_GEO_MISSING_QUERY = "INVALID_REQUEST" def __init__(self, status, url=None, response=None): """Create an exception with a status and optional full response. :param status: Either a ``G_GEO_`` code or a string explaining the exception. :type status: int or string :param url: The query URL that resulted in the error, if any. :type url: string :param response: The actual response returned from Google, if any. :type response: dict """ Exception.__init__(self, status) # Exception is an old-school class self.status = status self.url = url self.response = response def __str__(self): """Return a string representation of this :exc:`GeocoderError`.""" return 'Error %s\nQuery: %s' % (self.status, self.url) def __unicode__(self): """Return a unicode representation of this :exc:`GeocoderError`.""" return unicode(self.__str__())<|fim▁end|>
""" Return a (latitude, longitude) coordinate pair of the current result """ location = self.current_data['geometry']['location']
<|file_name|>Log.java<|end_file_name|><|fim▁begin|>package de.vsis.groomba.communication; public class Log { /** * @author Hannes * * Helps to create different log levels for debugging purposes. * */ public final static int OFF = 0; public final static int ERROR = 1; public final static int INFO = 2; public final static int VERBOSE = 3; public final static int DEBUG = 4; private String _classname = ""; private boolean _verbose = false; private boolean _error = false; private boolean _info = false; private boolean _debug = false; public Log() { // TODO Auto-generated constructor stub setVerbose(); } public Log(int level) { set(level); } public Log(String classname) { _classname = classname; } public void debug(String msg){ if(_debug) { printLogMsg("debug", msg); } } public void log(String msg){ if (_verbose){ printLogMsg("verbose", msg); } } public void error(String msg){ if (_error){ printLogMsg("error", msg); } } public void info(String msg){ if (_info){ printLogMsg("info", msg); } } /** combine and print to console slave * * * @param kind * @param msg */ private void printLogMsg(String kind, String msg){ String fullmsg = ""; if (!_classname.equals("")){ fullmsg = "["+kind+" from "+_classname+"] --- " + msg; } else {<|fim▁hole|> fullmsg = "["+kind+"] --- " + msg; } System.out.println(fullmsg); } public void enableAll() { _verbose = true; } public void dissableAll() { _verbose = false; } public void setVerbose(){ _verbose = true; _error = true; _info = true; _debug = true; } public void set(int level){ //0 off, 1 error, 2 info, 3 verbose, 4 Debug(all) switch(level){ case 0: _verbose = false; _error = false; _info = false; _debug = false; break; case 1: _error = true; _verbose = false; _info = false; _debug = false; break; case 2: _error = true; _info = true; _debug = false; _verbose = false; break; case 3: _verbose = true; _error = true; _info = true; _debug = false; break; case 4: setVerbose(); default: _verbose = true; _error = true; _info = true; _debug = true; break; } } }<|fim▁end|>
<|file_name|>product_channel_exclusivity.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/ads/googleads/v0/enums/product_channel_exclusivity.proto package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v0/enums" import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package // Enum describing the availability of a product offer. type ProductChannelExclusivityEnum_ProductChannelExclusivity int32 const ( // Not specified. ProductChannelExclusivityEnum_UNSPECIFIED ProductChannelExclusivityEnum_ProductChannelExclusivity = 0 // Used for return value only. Represents value unknown in this version. ProductChannelExclusivityEnum_UNKNOWN ProductChannelExclusivityEnum_ProductChannelExclusivity = 1 // The item is sold through one channel only, either local stores or online // as indicated by its ProductChannel. ProductChannelExclusivityEnum_SINGLE_CHANNEL ProductChannelExclusivityEnum_ProductChannelExclusivity = 2 // The item is matched to its online or local stores counterpart, indicating // it is available for purchase in both ShoppingProductChannels. ProductChannelExclusivityEnum_MULTI_CHANNEL ProductChannelExclusivityEnum_ProductChannelExclusivity = 3 ) var ProductChannelExclusivityEnum_ProductChannelExclusivity_name = map[int32]string{ 0: "UNSPECIFIED", 1: "UNKNOWN", 2: "SINGLE_CHANNEL", 3: "MULTI_CHANNEL", } var ProductChannelExclusivityEnum_ProductChannelExclusivity_value = map[string]int32{ "UNSPECIFIED": 0, "UNKNOWN": 1, "SINGLE_CHANNEL": 2, "MULTI_CHANNEL": 3, } func (x ProductChannelExclusivityEnum_ProductChannelExclusivity) String() string { return proto.EnumName(ProductChannelExclusivityEnum_ProductChannelExclusivity_name, int32(x)) } func (ProductChannelExclusivityEnum_ProductChannelExclusivity) EnumDescriptor() ([]byte, []int) { return fileDescriptor_product_channel_exclusivity_e3a61808f67db1de, []int{0, 0} } // Availability of a product offer. type ProductChannelExclusivityEnum struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *ProductChannelExclusivityEnum) Reset() { *m = ProductChannelExclusivityEnum{} } func (m *ProductChannelExclusivityEnum) String() string { return proto.CompactTextString(m) } func (*ProductChannelExclusivityEnum) ProtoMessage() {} func (*ProductChannelExclusivityEnum) Descriptor() ([]byte, []int) { return fileDescriptor_product_channel_exclusivity_e3a61808f67db1de, []int{0} } func (m *ProductChannelExclusivityEnum) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ProductChannelExclusivityEnum.Unmarshal(m, b) } func (m *ProductChannelExclusivityEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_ProductChannelExclusivityEnum.Marshal(b, m, deterministic) } func (dst *ProductChannelExclusivityEnum) XXX_Merge(src proto.Message) { xxx_messageInfo_ProductChannelExclusivityEnum.Merge(dst, src) }<|fim▁hole|>func (m *ProductChannelExclusivityEnum) XXX_DiscardUnknown() { xxx_messageInfo_ProductChannelExclusivityEnum.DiscardUnknown(m) } var xxx_messageInfo_ProductChannelExclusivityEnum proto.InternalMessageInfo func init() { proto.RegisterType((*ProductChannelExclusivityEnum)(nil), "google.ads.googleads.v0.enums.ProductChannelExclusivityEnum") proto.RegisterEnum("google.ads.googleads.v0.enums.ProductChannelExclusivityEnum_ProductChannelExclusivity", ProductChannelExclusivityEnum_ProductChannelExclusivity_name, ProductChannelExclusivityEnum_ProductChannelExclusivity_value) } func init() { proto.RegisterFile("google/ads/googleads/v0/enums/product_channel_exclusivity.proto", fileDescriptor_product_channel_exclusivity_e3a61808f67db1de) } var fileDescriptor_product_channel_exclusivity_e3a61808f67db1de = []byte{ // 308 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4f, 0xcf, 0xcf, 0x4f, 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x03, 0xfd, 0xd4, 0xbc, 0xd2, 0xdc, 0x62, 0xfd, 0x82, 0xa2, 0xfc, 0x94, 0xd2, 0xe4, 0x92, 0xf8, 0xe4, 0x8c, 0xc4, 0xbc, 0xbc, 0xd4, 0x9c, 0xf8, 0xd4, 0x8a, 0xe4, 0x9c, 0xd2, 0xe2, 0xcc, 0xb2, 0xcc, 0x92, 0x4a, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x59, 0x88, 0x2e, 0xbd, 0xc4, 0x94, 0x62, 0x3d, 0xb8, 0x01, 0x7a, 0x65, 0x06, 0x7a, 0x60, 0x03, 0x94, 0x1a, 0x19, 0xb9, 0x64, 0x03, 0x20, 0x86, 0x38, 0x43, 0xcc, 0x70, 0x45, 0x18, 0xe1, 0x9a, 0x57, 0x9a, 0xab, 0x94, 0xc0, 0x25, 0x89, 0x53, 0x81, 0x10, 0x3f, 0x17, 0x77, 0xa8, 0x5f, 0x70, 0x80, 0xab, 0xb3, 0xa7, 0x9b, 0xa7, 0xab, 0x8b, 0x00, 0x83, 0x10, 0x37, 0x17, 0x7b, 0xa8, 0x9f, 0xb7, 0x9f, 0x7f, 0xb8, 0x9f, 0x00, 0xa3, 0x90, 0x10, 0x17, 0x5f, 0xb0, 0xa7, 0x9f, 0xbb, 0x8f, 0x6b, 0xbc, 0xb3, 0x87, 0xa3, 0x9f, 0x9f, 0xab, 0x8f, 0x00, 0x93, 0x90, 0x20, 0x17, 0xaf, 0x6f, 0xa8, 0x4f, 0x88, 0x27, 0x5c, 0x88, 0xd9, 0xe9, 0x33, 0x23, 0x97, 0x62, 0x72, 0x7e, 0xae, 0x1e, 0x5e, 0x97, 0x3a, 0xc9, 0xe1, 0x74, 0x45, 0x00, 0xc8, 0xa3, 0x01, 0x8c, 0x51, 0x4e, 0x50, 0x03, 0xd2, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0xc1, 0x00, 0x0b, 0xbb, 0x82, 0xcc, 0x62, 0x1c, 0x41, 0x69, 0x0d, 0x26, 0x17, 0x31, 0x31, 0xbb, 0x3b, 0x3a, 0xae, 0x62, 0x92, 0x75, 0x87, 0x18, 0xe5, 0x98, 0x52, 0xac, 0x07, 0x61, 0x82, 0x58, 0x61, 0x06, 0x7a, 0xa0, 0x20, 0x29, 0x3e, 0x05, 0x93, 0x8f, 0x71, 0x4c, 0x29, 0x8e, 0x81, 0xcb, 0xc7, 0x84, 0x19, 0xc4, 0x80, 0xe5, 0x5f, 0x31, 0x29, 0x42, 0x04, 0xad, 0xac, 0x1c, 0x53, 0x8a, 0xad, 0xac, 0xe0, 0x2a, 0xac, 0xac, 0xc2, 0x0c, 0xac, 0xac, 0xc0, 0x6a, 0x92, 0xd8, 0xc0, 0x0e, 0x33, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x37, 0xa6, 0x32, 0xb6, 0xe2, 0x01, 0x00, 0x00, }<|fim▁end|>
func (m *ProductChannelExclusivityEnum) XXX_Size() int { return xxx_messageInfo_ProductChannelExclusivityEnum.Size(m) }
<|file_name|>px_mkfw.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ############################################################################ # # Copyright (C) 2012, 2013 PX4 Development Team. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # 3. Neither the name PX4 nor the names of its contributors may be # used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS # OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED # AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ############################################################################ # # PX4 firmware image generator # # The PX4 firmware file is a JSON-encoded Python object, containing # metadata fields and a zlib-compressed base64-encoded firmware image. # import sys import argparse import json import base64 import zlib import time import subprocess # # Construct a basic firmware description # def mkdesc(): proto = {} proto['magic'] = "PX4FWv1" proto['board_id'] = 0 proto['board_revision'] = 0 proto['version'] = "" proto['summary'] = "" proto['description'] = "" proto['git_identity'] = "" proto['build_time'] = 0 proto['image'] = bytes() proto['image_size'] = 0 return proto # Parse commandline parser = argparse.ArgumentParser(description="Firmware generator for the PX autopilot system.") parser.add_argument("--prototype", action="store", help="read a prototype description from a file") parser.add_argument("--board_id", action="store", help="set the board ID required") parser.add_argument("--board_revision", action="store", help="set the board revision required") parser.add_argument("--version", action="store", help="set a version string") parser.add_argument("--summary", action="store", help="set a brief description") parser.add_argument("--description", action="store", help="set a longer description") parser.add_argument("--git_identity", action="store", help="the working directory to check for git identity") parser.add_argument("--parameter_xml", action="store", help="the parameters.xml file") parser.add_argument("--airframe_xml", action="store", help="the airframes.xml file") parser.add_argument("--image", action="store", help="the firmware image") args = parser.parse_args() # Fetch the firmware descriptor prototype if specified if args.prototype != None: f = open(args.prototype,"r") desc = json.load(f) f.close() else: desc = mkdesc() desc['build_time'] = int(time.time()) if args.board_id != None: desc['board_id'] = int(args.board_id) if args.board_revision != None: desc['board_revision'] = int(args.board_revision) if args.version != None: desc['version'] = str(args.version) if args.summary != None: desc['summary'] = str(args.summary) if args.description != None: desc['description'] = str(args.description) if args.git_identity != None: cmd = " ".join(["git", "--git-dir", args.git_identity + "/.git", "describe", "--always", "--dirty"]) p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout desc['git_identity'] = str(p.read().strip()) p.close() if args.parameter_xml != None: f = open(args.parameter_xml, "rb") bytes = f.read() desc['parameter_xml_size'] = len(bytes) desc['parameter_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8') if args.airframe_xml != None: f = open(args.airframe_xml, "rb") bytes = f.read() desc['airframe_xml_size'] = len(bytes) desc['airframe_xml'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8') if args.image != None: f = open(args.image, "rb")<|fim▁hole|> desc['image'] = base64.b64encode(zlib.compress(bytes,9)).decode('utf-8') print(json.dumps(desc, indent=4))<|fim▁end|>
bytes = f.read() desc['image_size'] = len(bytes)
<|file_name|>WebPluginContainerTest.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2012 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "public/web/WebPluginContainer.h" #include "core/dom/Element.h" #include "core/events/KeyboardEvent.h" #include "core/frame/EventHandlerRegistry.h" #include "core/frame/FrameHost.h" #include "core/layout/LayoutObject.h" #include "core/page/Page.h" #include "platform/PlatformEvent.h" #include "platform/PlatformKeyboardEvent.h" #include "platform/graphics/GraphicsContext.h" #include "platform/graphics/paint/CullRect.h" #include "platform/graphics/paint/ForeignLayerDisplayItem.h" #include "platform/graphics/paint/PaintController.h" #include "platform/testing/URLTestHelpers.h" #include "platform/testing/UnitTestHelpers.h" #include "public/platform/Platform.h" #include "public/platform/WebClipboard.h" #include "public/platform/WebCompositorSupport.h" #include "public/platform/WebLayer.h" #include "public/platform/WebThread.h" #include "public/platform/WebURLLoaderMockFactory.h" #include "public/web/WebCache.h" #include "public/web/WebDocument.h" #include "public/web/WebElement.h" #include "public/web/WebFrame.h" #include "public/web/WebFrameClient.h" #include "public/web/WebPluginParams.h" #include "public/web/WebPrintParams.h" #include "public/web/WebSettings.h" #include "public/web/WebView.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/skia/include/core/SkPictureRecorder.h" #include "web/WebLocalFrameImpl.h" #include "web/WebPluginContainerImpl.h" #include "web/WebViewImpl.h" #include "web/tests/FakeWebPlugin.h" #include "web/tests/FrameTestHelpers.h" using blink::testing::runPendingTasks; namespace blink { class WebPluginContainerTest : public ::testing::Test { public: WebPluginContainerTest() : m_baseURL("http://www.test.com/") { } void TearDown() override { Platform::current()->getURLLoaderMockFactory()->unregisterAllURLs(); WebCache::clear(); } void calculateGeometry(WebPluginContainerImpl* pluginContainerImpl, IntRect& windowRect, IntRect& clipRect, IntRect& unobscuredRect, Vector<IntRect>& cutOutRects) { pluginContainerImpl->calculateGeometry(windowRect, clipRect, unobscuredRect, cutOutRects); } protected: std::string m_baseURL; }; namespace { template <typename T> class CustomPluginWebFrameClient : public FrameTestHelpers::TestWebFrameClient { public: WebPlugin* createPlugin(WebLocalFrame* frame, const WebPluginParams& params) override { return new T(frame, params); } }; class TestPluginWebFrameClient; // Subclass of FakeWebPlugin that has a selection of 'x' as plain text and 'y' as markup text. class TestPlugin : public FakeWebPlugin { public: TestPlugin(WebFrame* frame, const WebPluginParams& params, TestPluginWebFrameClient* testClient) : FakeWebPlugin(frame, params) { m_testClient = testClient; } bool hasSelection() const override { return true; } WebString selectionAsText() const override { return WebString("x"); } WebString selectionAsMarkup() const override { return WebString("y"); } bool supportsPaginatedPrint() override { return true; } int printBegin(const WebPrintParams& printParams) override { return 1; } void printPage(int pageNumber, WebCanvas*) override; private: TestPluginWebFrameClient* m_testClient; }; class TestPluginWebFrameClient : public FrameTestHelpers::TestWebFrameClient { WebPlugin* createPlugin(WebLocalFrame* frame, const WebPluginParams& params) override { if (params.mimeType == "application/x-webkit-test-webplugin" || params.mimeType == "application/pdf") return new TestPlugin(frame, params, this); return WebFrameClient::createPlugin(frame, params); } public: void onPrintPage() { m_printedPage = true; } bool printedAtLeastOnePage() { return m_printedPage; } private: bool m_printedPage = false; }; void TestPlugin::printPage(int pageNumber, WebCanvas* canvas) { DCHECK(m_testClient); m_testClient->onPrintPage(); } WebPluginContainer* getWebPluginContainer(WebView* webView, const WebString& id) { WebElement element = webView->mainFrame()->document().getElementById(id); return element.pluginContainer(); } } // namespace TEST_F(WebPluginContainerTest, WindowToLocalPointTest) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebPluginContainer* pluginContainerOne = getWebPluginContainer(webView, WebString::fromUTF8("translated-plugin")); DCHECK(pluginContainerOne); WebPoint point1 = pluginContainerOne->rootFrameToLocalPoint(WebPoint(10, 10)); ASSERT_EQ(0, point1.x); ASSERT_EQ(0, point1.y); WebPoint point2 = pluginContainerOne->rootFrameToLocalPoint(WebPoint(100, 100)); ASSERT_EQ(90, point2.x); ASSERT_EQ(90, point2.y); WebPluginContainer* pluginContainerTwo = getWebPluginContainer(webView, WebString::fromUTF8("rotated-plugin")); DCHECK(pluginContainerTwo); WebPoint point3 = pluginContainerTwo->rootFrameToLocalPoint(WebPoint(0, 10)); ASSERT_EQ(10, point3.x); ASSERT_EQ(0, point3.y); WebPoint point4 = pluginContainerTwo->rootFrameToLocalPoint(WebPoint(-10, 10)); ASSERT_EQ(10, point4.x); ASSERT_EQ(10, point4.y); } TEST_F(WebPluginContainerTest, PluginDocumentPluginIsFocused) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("test.pdf"), WebString::fromUTF8("application/pdf")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "test.pdf", true, &pluginWebFrameClient); DCHECK(webView); webView->updateAllLifecyclePhases(); WebDocument document = webView->mainFrame()->document(); EXPECT_TRUE(document.isPluginDocument()); WebPluginContainer* pluginContainer = getWebPluginContainer(webView, "plugin"); EXPECT_EQ(document.focusedElement(), pluginContainer->element()); } TEST_F(WebPluginContainerTest, IFramePluginDocumentNotFocused) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("test.pdf"), WebString::fromUTF8("application/pdf")); URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("iframe_pdf.html"), WebString::fromUTF8("text/html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "iframe_pdf.html", true, &pluginWebFrameClient); DCHECK(webView); webView->updateAllLifecyclePhases(); WebDocument document = webView->mainFrame()->document(); WebFrame* iframe = webView->mainFrame()->firstChild(); EXPECT_TRUE(iframe->document().isPluginDocument()); WebPluginContainer* pluginContainer = iframe->document().getElementById("plugin").pluginContainer(); EXPECT_NE(document.focusedElement(), pluginContainer->element()); EXPECT_NE(iframe->document().focusedElement(), pluginContainer->element()); } TEST_F(WebPluginContainerTest, PrintOnePage) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("test.pdf"), WebString::fromUTF8("application/pdf")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "test.pdf", true, &pluginWebFrameClient); DCHECK(webView); webView->updateAllLifecyclePhases(); runPendingTasks(); WebFrame* frame = webView->mainFrame(); WebPrintParams printParams; printParams.printContentArea.width = 500; printParams.printContentArea.height = 500; frame->printBegin(printParams); SkPictureRecorder recorder; frame->printPage(0, recorder.beginRecording(IntRect())); frame->printEnd(); DCHECK(pluginWebFrameClient.printedAtLeastOnePage()); } TEST_F(WebPluginContainerTest, PrintAllPages) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("test.pdf"), WebString::fromUTF8("application/pdf")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "test.pdf", true, &pluginWebFrameClient); DCHECK(webView); webView->updateAllLifecyclePhases(); runPendingTasks(); WebFrame* frame = webView->mainFrame(); WebPrintParams printParams; printParams.printContentArea.width = 500; printParams.printContentArea.height = 500; frame->printBegin(printParams); SkPictureRecorder recorder; frame->printPagesWithBoundaries(recorder.beginRecording(IntRect()), WebSize()); frame->printEnd(); DCHECK(pluginWebFrameClient.printedAtLeastOnePage()); } TEST_F(WebPluginContainerTest, LocalToWindowPointTest) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebPluginContainer* pluginContainerOne = getWebPluginContainer(webView, WebString::fromUTF8("translated-plugin")); DCHECK(pluginContainerOne); WebPoint point1 = pluginContainerOne->localToRootFramePoint(WebPoint(0, 0)); ASSERT_EQ(10, point1.x); ASSERT_EQ(10, point1.y); WebPoint point2 = pluginContainerOne->localToRootFramePoint(WebPoint(90, 90)); ASSERT_EQ(100, point2.x); ASSERT_EQ(100, point2.y); WebPluginContainer* pluginContainerTwo = getWebPluginContainer(webView, WebString::fromUTF8("rotated-plugin")); DCHECK(pluginContainerTwo); WebPoint point3 = pluginContainerTwo->localToRootFramePoint(WebPoint(10, 0)); ASSERT_EQ(0, point3.x); ASSERT_EQ(10, point3.y); WebPoint point4 = pluginContainerTwo->localToRootFramePoint(WebPoint(10, 10)); ASSERT_EQ(-10, point4.x); ASSERT_EQ(10, point4.y); } // Verifies executing the command 'Copy' results in copying to the clipboard. TEST_F(WebPluginContainerTest, Copy) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebElement pluginContainerOneElement = webView->mainFrame()->document().getElementById(WebString::fromUTF8("translated-plugin")); EXPECT_TRUE(webView->mainFrame()->executeCommand("Copy", pluginContainerOneElement)); EXPECT_EQ(WebString("x"), Platform::current()->clipboard()->readPlainText(WebClipboard::Buffer())); } // Verifies |Ctrl-C| and |Ctrl-Insert| keyboard events, results in copying to // the clipboard. TEST_F(WebPluginContainerTest, CopyInsertKeyboardEventsTest) { URLTestHelpers::registerMockedURLFromBaseURL( WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebElement pluginContainerOneElement = webView->mainFrame()->document().getElementById(WebString::fromUTF8("translated-plugin")); PlatformEvent::Modifiers modifierKey = static_cast<PlatformEvent::Modifiers>(PlatformEvent::CtrlKey | PlatformEvent::NumLockOn | PlatformEvent::IsLeft); #if OS(MACOSX) modifierKey = static_cast<PlatformEvent::Modifiers>(PlatformEvent::MetaKey | PlatformEvent::NumLockOn | PlatformEvent::IsLeft); #endif PlatformKeyboardEvent platformKeyboardEventC(PlatformEvent::RawKeyDown, "", "", "67", "", "", 67, 0, false, modifierKey, 0.0); KeyboardEvent* keyEventC = KeyboardEvent::create(platformKeyboardEventC, 0); toWebPluginContainerImpl(pluginContainerOneElement.pluginContainer())->handleEvent(keyEventC); EXPECT_EQ(WebString("x"), Platform::current()->clipboard()->readPlainText(WebClipboard::Buffer())); // Clearing |Clipboard::Buffer()|. Platform::current()->clipboard()->writePlainText(WebString("")); EXPECT_EQ(WebString(""), Platform::current()->clipboard()->readPlainText(WebClipboard::Buffer())); PlatformKeyboardEvent platformKeyboardEventInsert(PlatformEvent::RawKeyDown, "", "", "45", "", "", 45, 0, false, modifierKey, 0.0); KeyboardEvent* keyEventInsert = KeyboardEvent::create(platformKeyboardEventInsert, 0); toWebPluginContainerImpl(pluginContainerOneElement.pluginContainer())->handleEvent(keyEventInsert); EXPECT_EQ(WebString("x"), Platform::current()->clipboard()->readPlainText(WebClipboard::Buffer())); } // A class to facilitate testing that events are correctly received by plugins. class EventTestPlugin : public FakeWebPlugin { public: EventTestPlugin(WebFrame* frame, const WebPluginParams& params) : FakeWebPlugin(frame, params) , m_lastEventType(WebInputEvent::Undefined) { } WebInputEventResult handleInputEvent(const WebInputEvent& event, WebCursorInfo&) override { m_lastEventType = event.type; return WebInputEventResult::HandledSystem; } WebInputEvent::Type getLastInputEventType() {return m_lastEventType; } private: WebInputEvent::Type m_lastEventType; }; TEST_F(WebPluginContainerTest, GestureLongPressReachesPlugin) { URLTestHelpers::registerMockedURLFromBaseURL( WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); CustomPluginWebFrameClient<EventTestPlugin> pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebElement pluginContainerOneElement = webView->mainFrame()->document().getElementById(WebString::fromUTF8("translated-plugin")); WebPlugin* plugin = static_cast<WebPluginContainerImpl*>(pluginContainerOneElement.pluginContainer())->plugin(); EventTestPlugin* testPlugin = static_cast<EventTestPlugin*>(plugin); WebGestureEvent event; event.type = WebInputEvent::GestureLongPress; event.sourceDevice = WebGestureDeviceTouchscreen; // First, send an event that doesn't hit the plugin to verify that the // plugin doesn't receive it. event.x = 0; event.y = 0; webView->handleInputEvent(event); runPendingTasks(); EXPECT_EQ(WebInputEvent::Undefined, testPlugin->getLastInputEventType()); // Next, send an event that does hit the plugin, and verify it does receive it. WebRect rect = pluginContainerOneElement.boundsInViewport(); event.x = rect.x + rect.width / 2; event.y = rect.y + rect.height / 2; webView->handleInputEvent(event); runPendingTasks(); EXPECT_EQ(WebInputEvent::GestureLongPress, testPlugin->getLastInputEventType()); } // Verify that isRectTopmost returns false when the document is detached. TEST_F(WebPluginContainerTest, IsRectTopmostTest) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebPluginContainerImpl* pluginContainerImpl = toWebPluginContainerImpl(getWebPluginContainer(webView, WebString::fromUTF8("translated-plugin"))); pluginContainerImpl->setFrameRect(IntRect(0, 0, 300, 300)); WebRect rect = pluginContainerImpl->element().boundsInViewport(); EXPECT_TRUE(pluginContainerImpl->isRectTopmost(rect)); // Cause the plugin's frame to be detached. webViewHelper.reset(); EXPECT_FALSE(pluginContainerImpl->isRectTopmost(rect)); } #define EXPECT_RECT_EQ(expected, actual) \ do { \ const IntRect& actualRect = actual; \ EXPECT_EQ(expected.x(), actualRect.x()); \ EXPECT_EQ(expected.y(), actualRect.y()); \ EXPECT_EQ(expected.width(), actualRect.width()); \ EXPECT_EQ(expected.height(), actualRect.height()); \ } while (false) TEST_F(WebPluginContainerTest, ClippedRectsForIframedElement) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_containing_page.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_containing_page.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebElement pluginElement = webView->mainFrame()->firstChild()->document().getElementById("translated-plugin"); WebPluginContainerImpl* pluginContainerImpl = toWebPluginContainerImpl(pluginElement.pluginContainer()); DCHECK(pluginContainerImpl); pluginContainerImpl->setFrameRect(IntRect(0, 0, 300, 300)); IntRect windowRect, clipRect, unobscuredRect; Vector<IntRect> cutOutRects; calculateGeometry(pluginContainerImpl, windowRect, clipRect, unobscuredRect, cutOutRects); EXPECT_RECT_EQ(IntRect(10, 210, 300, 300), windowRect); EXPECT_RECT_EQ(IntRect(0, 0, 240, 90), clipRect); EXPECT_RECT_EQ(IntRect(0, 0, 240, 160), unobscuredRect); // Cause the plugin's frame to be detached. webViewHelper.reset(); } TEST_F(WebPluginContainerTest, ClippedRectsForSubpixelPositionedPlugin) { URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebElement pluginElement = webView->mainFrame()->document().getElementById("subpixel-positioned-plugin"); WebPluginContainerImpl* pluginContainerImpl = toWebPluginContainerImpl(pluginElement.pluginContainer()); DCHECK(pluginContainerImpl); IntRect windowRect, clipRect, unobscuredRect; Vector<IntRect> cutOutRects; calculateGeometry(pluginContainerImpl, windowRect, clipRect, unobscuredRect, cutOutRects); // TODO(chrishtr): these values should not be -1, they should be 0. They are -1 because WebPluginContainerImpl currently uses an IntRect for // frameRect() to determine the position of the plugin, which results in a loss of precision if it is actually subpixel positioned. EXPECT_RECT_EQ(IntRect(0, 0, 40, 40), windowRect); EXPECT_RECT_EQ(IntRect(-1, -1, 41, 41), clipRect); EXPECT_RECT_EQ(IntRect(-1, -1, 41, 41), unobscuredRect); // Cause the plugin's frame to be detached. webViewHelper.reset(); } TEST_F(WebPluginContainerTest, TopmostAfterDetachTest) { static WebRect topmostRect(10, 10, 40, 40); // Plugin that checks isRectTopmost in destroy(). class TopmostPlugin : public FakeWebPlugin { public: TopmostPlugin(WebFrame* frame, const WebPluginParams& params) : FakeWebPlugin(frame, params) {} bool isRectTopmost() { return container()->isRectTopmost(topmostRect); } void destroy() override { // In destroy, isRectTopmost is no longer valid. EXPECT_FALSE(container()->isRectTopmost(topmostRect)); FakeWebPlugin::destroy(); } }; URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); CustomPluginWebFrameClient<TopmostPlugin> pluginWebFrameClient; // Must outlive webViewHelper. FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebPluginContainerImpl* pluginContainerImpl = toWebPluginContainerImpl(getWebPluginContainer(webView, WebString::fromUTF8("translated-plugin"))); pluginContainerImpl->setFrameRect(IntRect(0, 0, 300, 300)); EXPECT_TRUE(pluginContainerImpl->isRectTopmost(topmostRect)); TopmostPlugin* testPlugin = static_cast<TopmostPlugin*>(pluginContainerImpl->plugin()); EXPECT_TRUE(testPlugin->isRectTopmost()); // Cause the plugin's frame to be detached. webViewHelper.reset(); EXPECT_FALSE(pluginContainerImpl->isRectTopmost(topmostRect)); } namespace { class CompositedPlugin : public FakeWebPlugin { public: CompositedPlugin(WebLocalFrame* frame, const WebPluginParams& params) : FakeWebPlugin(frame, params)<|fim▁hole|> { } WebLayer* getWebLayer() const { return m_layer.get(); } // WebPlugin bool initialize(WebPluginContainer* container) override { if (!FakeWebPlugin::initialize(container)) return false; container->setWebLayer(m_layer.get()); return true; } void destroy() override { container()->setWebLayer(nullptr); FakeWebPlugin::destroy(); } private: OwnPtr<WebLayer> m_layer; }; class ScopedSPv2 { public: ScopedSPv2() { RuntimeEnabledFeatures::setSlimmingPaintV2Enabled(true); } ~ScopedSPv2() { m_featuresBackup.restore(); } private: RuntimeEnabledFeatures::Backup m_featuresBackup; }; } // namespace TEST_F(WebPluginContainerTest, CompositedPluginSPv2) { ScopedSPv2 enableSPv2; URLTestHelpers::registerMockedURLFromBaseURL(WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin.html")); CustomPluginWebFrameClient<CompositedPlugin> webFrameClient; FrameTestHelpers::WebViewHelper webViewHelper; WebView* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin.html", true, &webFrameClient); ASSERT_TRUE(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(800, 600)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebPluginContainerImpl* container = static_cast<WebPluginContainerImpl*>(getWebPluginContainer(webView, WebString::fromUTF8("plugin"))); ASSERT_TRUE(container); Element* element = static_cast<Element*>(container->element()); const auto* plugin = static_cast<const CompositedPlugin*>(container->plugin()); OwnPtr<PaintController> paintController = PaintController::create(); GraphicsContext graphicsContext(*paintController); container->paint(graphicsContext, CullRect(IntRect(10, 10, 400, 300))); paintController->commitNewDisplayItems(); const auto& displayItems = paintController->paintArtifact().getDisplayItemList(); ASSERT_EQ(1u, displayItems.size()); EXPECT_EQ(element->layoutObject(), &displayItems[0].client()); ASSERT_EQ(DisplayItem::ForeignLayerPlugin, displayItems[0].getType()); const auto& foreignLayerDisplayItem = static_cast<const ForeignLayerDisplayItem&>(displayItems[0]); EXPECT_EQ(plugin->getWebLayer()->ccLayer(), foreignLayerDisplayItem.layer()); } TEST_F(WebPluginContainerTest, NeedsWheelEvents) { URLTestHelpers::registerMockedURLFromBaseURL( WebString::fromUTF8(m_baseURL.c_str()), WebString::fromUTF8("plugin_container.html")); TestPluginWebFrameClient pluginWebFrameClient; // Must outlive webViewHelper FrameTestHelpers::WebViewHelper webViewHelper; WebViewImpl* webView = webViewHelper.initializeAndLoad(m_baseURL + "plugin_container.html", true, &pluginWebFrameClient); DCHECK(webView); webView->settings()->setPluginsEnabled(true); webView->resize(WebSize(300, 300)); webView->updateAllLifecyclePhases(); runPendingTasks(); WebElement pluginContainerOneElement = webView->mainFrame()->document().getElementById(WebString::fromUTF8("translated-plugin")); pluginContainerOneElement.pluginContainer()->setWantsWheelEvents(true); runPendingTasks(); EXPECT_TRUE(webView->page()->frameHost().eventHandlerRegistry().hasEventHandlers(EventHandlerRegistry::WheelEventBlocking)); } } // namespace blink<|fim▁end|>
, m_layer(adoptPtr(Platform::current()->compositorSupport()->createLayer()))
<|file_name|>operator_pd_full_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from __future__ import absolute_import<|fim▁hole|>from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from tensorflow.contrib.distributions.python.ops import operator_pd_full class OperatorPDFullTest(tf.test.TestCase): # The only method needing checked (because it isn't part of the parent class) # is the check for symmetry. def setUp(self): self._rng = np.random.RandomState(42) def _random_positive_def_array(self, *shape): matrix = self._rng.rand(*shape) return tf.batch_matmul(matrix, matrix, adj_y=True).eval() def testPositiveDefiniteMatrixDoesntRaise(self): with self.test_session(): matrix = self._random_positive_def_array(2, 3, 3) operator = operator_pd_full.OperatorPDFull(matrix, verify_pd=True) operator.to_dense().eval() # Should not raise def testNegativeDefiniteMatrixRaises(self): with self.test_session(): matrix = -1 * self._random_positive_def_array(3, 2, 2) operator = operator_pd_full.OperatorPDFull(matrix, verify_pd=True) # Could fail inside Cholesky decomposition, or later when we test the # diag. with self.assertRaisesOpError("x > 0|LLT"): operator.to_dense().eval() def testNonSymmetricMatrixRaises(self): with self.test_session(): matrix = self._random_positive_def_array(3, 2, 2) matrix[0, 0, 1] += 0.001 operator = operator_pd_full.OperatorPDFull(matrix, verify_pd=True) with self.assertRaisesOpError("x == y"): operator.to_dense().eval() if __name__ == "__main__": tf.test.main()<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export { default as createShallow } from './createShallow'; export { default as createMount } from './createMount'; export { default as createRender } from './createRender'; export { default as findOutermostIntrinsic, wrapsIntrinsicElement } from './findOutermostIntrinsic'; export { default as getClasses } from './getClasses';<|fim▁hole|><|fim▁end|>
export { default as unwrap } from './unwrap';
<|file_name|>document_library_tags.py<|end_file_name|><|fim▁begin|>"""Templatetags for the ``document_library`` app.""" from django import template from ..models import Document register = template.Library() @register.assignment_tag def get_files_for_document(document): """ Returns the available files for all languages. In case the file is already present in another language, it does not re-add it again. """ files = [] for doc_trans in document.translations.all(): if doc_trans.filer_file is not None and \ doc_trans.filer_file not in files: doc_trans.filer_file.language = doc_trans.language_code files.append(doc_trans.filer_file) return files @register.assignment_tag(takes_context=True) def get_frontpage_documents(context): """Returns the library favs that should be shown on the front page.""" req = context.get('request') qs = Document.objects.published(req).filter(is_on_front_page=True) return qs @register.assignment_tag(takes_context=True) def get_latest_documents(context, count=5): """ Returns the latest documents.<|fim▁hole|> """ req = context.get('request') qs = Document.objects.published(req)[:count] return qs<|fim▁end|>
:param count: Number of documents to be returned. Defaults to 5.
<|file_name|>remove_issue_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """remove_issue tests.""" import unittest <|fim▁hole|> from clusterfuzz._internal.datastore import data_types from clusterfuzz._internal.tests.test_libs import helpers as test_helpers from clusterfuzz._internal.tests.test_libs import test_utils from handlers.testcase_detail import remove_issue from libs import form @test_utils.with_cloud_emulators('datastore') class HandlerTest(unittest.TestCase): """Test Handler.""" def setUp(self): test_helpers.patch(self, [ 'handlers.testcase_detail.show.get_testcase_detail', 'libs.auth.get_current_user', 'libs.auth.is_current_user_admin', ]) self.mock.is_current_user_admin.return_value = True self.mock.get_testcase_detail.return_value = {'testcase': 'yes'} self.mock.get_current_user().email = '[email protected]' flaskapp = flask.Flask('testflask') flaskapp.add_url_rule('/', view_func=remove_issue.Handler.as_view('/')) self.app = webtest.TestApp(flaskapp) def test_succeed(self): """Remove issue from a testcase.""" testcase = data_types.Testcase() testcase.bug_information = '1234' testcase.put() resp = self.app.post_json('/', { 'testcaseId': testcase.key.id(), 'csrf_token': form.generate_csrf_token(), }) self.assertEqual(200, resp.status_int) self.assertEqual('yes', resp.json['testcase']) self.assertEqual('', testcase.key.get().bug_information)<|fim▁end|>
import flask import webtest
<|file_name|>Main.js<|end_file_name|><|fim▁begin|>o2.xApplication.ConfigDesigner.options = { "multitask": true, "executable": false }; o2.xDesktop.requireApp("ConfigDesigner", "Script", null, false); o2.require("o2.xDesktop.UserData", null, false); o2.xApplication.ConfigDesigner.Main = new Class({ Extends: o2.xApplication.Common.Main, Implements: [Options, Events], options: { "style": "default", "name": "ConfigDesigner", "icon": "icon.png", "title": o2.xApplication.ConfigDesigner.LP.title, "appTitle": o2.xApplication.ConfigDesigner.LP.title, "id": "node_127.0.0.1.json", "actions": null, "category": null, "portalData": null }, onQueryLoad: function(){ this.actions = o2.Actions.load("x_program_center"); this.lp = o2.xApplication.ConfigDesigner.LP; this.addEvent("queryClose", function(e){ if (this.explorer){ this.explorer.reload(); } }.bind(this)); }, loadApplication: function(callback){ this.createNode(); if (!this.options.isRefresh){ this.maxSize(function(){ this.openScript(); }.bind(this)); }else{ this.openScript(); } if (callback) callback(); }, createNode: function(){ this.content.setStyle("overflow", "hidden"); this.node = new Element("div", { "styles": {"width": "100%", "height": "100%", "overflow": "hidden"} }).inject(this.content); }, getApplication:function(callback){ if (callback) callback(); }, openScript: function(){ this.getApplication(function(){ this.loadNodes(); this.loadScriptListNodes(); this.loadContentNode(function(){ this.loadProperty(); // this.loadTools(); this.resizeNode(); this.addEvent("resize", this.resizeNode.bind(this)); this.loadScript(); if (this.toolbarContentNode){ this.setScrollBar(this.toolbarContentNode, null, { "V": {"x": 0, "y": 0}, "H": {"x": 0, "y": 0} }); this.setScrollBar(this.propertyDomArea, null, { "V": {"x": 0, "y": 0}, "H": {"x": 0, "y": 0} }); } }.bind(this)); }.bind(this)); }, loadNodes: function(){ this.scriptListNode = new Element("div", { "styles": this.css.scriptListNode }).inject(this.node); this.propertyNode = new Element("div", { "styles": this.css.propertyNode }).inject(this.node); this.contentNode = new Element("div", { "styles": this.css.contentNode }).inject(this.node); }, //loadScriptList------------------------------- loadScriptListNodes: function(){ this.scriptListTitleNode = new Element("div", { "styles": this.css.scriptListTitleNode, "text": o2.xApplication.ConfigDesigner.LP.scriptLibrary }).inject(this.scriptListNode); this.scriptListResizeNode = new Element("div", {"styles": this.css.scriptListResizeNode}).inject(this.scriptListNode); this.scriptListAreaSccrollNode = new Element("div", {"styles": this.css.scriptListAreaSccrollNode}).inject(this.scriptListNode); this.scriptListAreaNode = new Element("div", {"styles": this.css.scriptListAreaNode}).inject(this.scriptListAreaSccrollNode); this.loadScriptListResize(); this.loadScriptList(); }, setScroll: function(){ o2.require("o2.widget.ScrollBar", function(){ this.listScrollBar = new o2.widget.ScrollBar(this.scriptListAreaSccrollNode, { "style":"xDesktop_Message", "where": "before", "indent": false, "distance": 100, "friction": 6, "axis": {"x": false, "y": true} }); }.bind(this)); }, loadScriptListResize: function(){ // var size = this.propertyNode.getSize(); // var position = this.propertyResizeBar.getPosition(); this.scriptListResize = new Drag(this.scriptListResizeNode,{ "snap": 1, "onStart": function(el, e){ var x = (Browser.name=="firefox") ? e.event.clientX : e.event.x; var y = (Browser.name=="firefox") ? e.event.clientY : e.event.y; el.store("position", {"x": x, "y": y}); var size = this.scriptListAreaSccrollNode.getSize(); el.store("initialWidth", size.x); }.bind(this), "onDrag": function(el, e){ var x = (Browser.name=="firefox") ? e.event.clientX : e.event.x; // var y = e.event.y; var bodySize = this.content.getSize(); var position = el.retrieve("position"); var initialWidth = el.retrieve("initialWidth").toFloat(); var dx = x.toFloat() - position.x.toFloat(); var width = initialWidth+dx; if (width> bodySize.x/2) width = bodySize.x/2; if (width<40) width = 40; this.contentNode.setStyle("margin-left", width+1); this.scriptListNode.setStyle("width", width); }.bind(this) }); }, loadScriptList: function() { this.actions.ConfigAction.getList(function( json ){ data = json.data; var config = JSON.parse(data.config); this.config = config; for (var key in config) { if(key.indexOf("node_")>-1){ this.options.id = key; } this.createListScriptItem(key,config[key]); } this.setScroll(); }.bind(this), null, false); }, createListScriptItem: function(id, name){ var _self = this; var listScriptItem = new Element("div", {"styles": this.css.listScriptItem}).inject(this.scriptListAreaNode, "bottom"); var listScriptItemIcon = new Element("div", {"styles": this.css.listScriptItemIcon}).inject(listScriptItem); var listScriptItemText = new Element("div", {"styles": this.css.listScriptItemText, "text":id.replace(".json","")+" ("+name+")" }).inject(listScriptItem); listScriptItem.store("script", {id:id,name:name}); listScriptItem.addEvents({ "dblclick": function(e){_self.loadScriptByData(this, e);}, "mouseover": function(){if (_self.currentListScriptItem!=this) this.setStyles(_self.css.listScriptItem_over);}, "mouseout": function(){if (_self.currentListScriptItem!=this) this.setStyles(_self.css.listScriptItem);} }); this.listScriptItemMove(listScriptItem); }, createScriptListCopy: function(node){ var copyNode = node.clone().inject(this.node); copyNode.position({ "relativeTo": node, "position": "upperLeft", "edge": "upperLeft" }); var size = copyNode.getSize(); copyNode.setStyles({ "width": ""+size.x+"px", "height": ""+size.y+"px", "z-index": 50001, }); return copyNode; }, listDragEnter: function(dragging, inObj){ var markNode = inObj.retrieve("markNode"); if (!markNode){ var size = inObj.getSize(); markNode = new Element("div", {"styles": this.css.dragListItemMark}).inject(this.node); markNode.setStyles({ "width": ""+size.x+"px", "height": ""+size.y+"px", "position": "absolute", "background-color": "#666", "z-index": 50000, "opacity": 0.3 // "border": "2px solid #ffba00" }); markNode.position({ "relativeTo": inObj, "position": "upperLeft", "edge": "upperLeft" }); var y = markNode.getStyle("top").toFloat()-1; var x = markNode.getStyle("left").toFloat()-2; markNode.setStyles({ "left": ""+x+"px", "top": ""+y+"px", }); inObj.store("markNode", markNode); } }, listDragLeave: function(dragging, inObj){ var markNode = inObj.retrieve("markNode"); if (markNode) markNode.destroy(); inObj.eliminate("markNode"); }, listScriptItemMove: function(node){ var iconNode = node.getFirst(); iconNode.addEvent("mousedown", function(e){ var script = node.retrieve("script"); if (script.id!=this.scriptTab.showPage.script.data.id){ var copyNode = this.createScriptListCopy(node); var droppables = [this.designNode, this.propertyDomArea]; var listItemDrag = new Drag.Move(copyNode, { "droppables": droppables, "onEnter": function(dragging, inObj){ this.listDragEnter(dragging, inObj); }.bind(this), "onLeave": function(dragging, inObj){ this.listDragLeave(dragging, inObj); }.bind(this), "onDrag": function(e){ //nothing }.bind(this), "onDrop": function(dragging, inObj){ if (inObj){ this.addIncludeScript(script); this.listDragLeave(dragging, inObj); copyNode.destroy(); }else{ copyNode.destroy(); } }.bind(this), "onCancel": function(dragging){ copyNode.destroy(); }.bind(this) }); listItemDrag.start(e); } }.bind(this)); }, addIncludeScript: function(script){ var currentScript = this.scriptTab.showPage.script; if (currentScript.data.dependScriptList.indexOf(script.name)==-1){ currentScript.data.dependScriptList.push(script.name); this.addIncludeToList(script.name); } }, addIncludeToList: function(name){ this.actions.getScriptByName(name, this.application.id, function(json){ var script = json.data; var includeScriptItem = new Element("div", {"styles": this.css.includeScriptItem}).inject(this.propertyIncludeListArea); var includeScriptItemAction = new Element("div", {"styles": this.css.includeScriptItemAction}).inject(includeScriptItem); var includeScriptItemText = new Element("div", {"styles": this.css.includeScriptItemText}).inject(includeScriptItem); includeScriptItemText.set("text", script.name+" ("+script.alias+")"); includeScriptItem.store("script", script); var _self = this; includeScriptItemAction.addEvent("click", function(){ var node = this.getParent(); var script = node.retrieve("script"); if (script){ _self.scriptTab.showPage.script.data.dependScriptList.erase(script.name); } node.destroy(); }); }.bind(this), function(){ this.scriptTab.showPage.script.data.dependScriptList.erase(name); }.bind(this)); }, loadScriptByData: function(node, e){ var script = node.retrieve("script"); var scriptName = script.name; var openNew = true; for (var i = 0; i<this.scriptTab.pages.length; i++){ if (script.id==this.scriptTab.pages[i].script.data.id){ this.scriptTab.pages[i].showTabIm(); openNew = false; break; } } if (openNew){ this.loadScriptData(script.id, function(data){ data.name = scriptName; var script = new o2.xApplication.ConfigDesigner.Script(this, data); script.load(); }.bind(this), true); } }, //loadContentNode------------------------------ loadContentNode: function(toolbarCallback, contentCallback){ this.contentToolbarNode = new Element("div#contentToolbarNode", { "styles": this.css.contentToolbarNode }).inject(this.contentNode); this.loadContentToolbar(toolbarCallback); this.editContentNode = new Element("div", { "styles": this.css.editContentNode }).inject(this.contentNode); this.loadEditContent(function(){ // if (this.designDcoument) this.designDcoument.body.setStyles(this.css.designBody); if (this.designNode) this.designNode.setStyles(this.css.designNode); if (contentCallback) contentCallback(); }.bind(this)); }, loadContentToolbar: function(callback){ this.getFormToolbarHTML(function(toolbarNode){ var spans = toolbarNode.getElements("span"); spans.each(function(item, idx){ var img = item.get("MWFButtonImage"); if (img){ item.set("MWFButtonImage", this.path+""+this.options.style+"/toolbar/"+img); } }.bind(this)); $(toolbarNode).inject(this.contentToolbarNode); o2.require("o2.widget.Toolbar", function(){ this.toolbar = new o2.widget.Toolbar(toolbarNode, {"style": "ProcessCategory"}, this); this.toolbar.load(); var _self = this; //this.styleSelectNode = toolbarNode.getElement("select"); //this.styleSelectNode.addEvent("change", function(){ // _self.changeEditorStyle(this); //}); this.styleSelectNode = toolbarNode.getElement("select[MWFnodetype='theme']"); this.styleSelectNode.addEvent("change", function(){ _self.changeEditorStyle(this); }); this.fontsizeSelectNode = toolbarNode.getElement("select[MWFnodetype='fontSize']"); this.fontsizeSelectNode.addEvent("change", function(){ _self.changeFontSize(this); }); this.editorSelectNode = toolbarNode.getElement("select[MWFnodetype='editor']"); this.editorSelectNode.addEvent("change", function(){ _self.changeEditor(this); }); this.monacoStyleSelectNode = toolbarNode.getElement("select[MWFnodetype='monaco-theme']"); this.monacoStyleSelectNode.addEvent("change", function(){ _self.changeEditorStyle(this); }); if (callback) callback(); }.bind(this)); }.bind(this)); }, changeEditor: function(node){ var idx = node.selectedIndex; var value = node.options[idx].value; if (!o2.editorData){ o2.editorData = { "javascriptEditor": { "monaco_theme": "vs", "theme": "tomorrow", "fontSize" : "12px" } }; } o2.editorData.javascriptEditor["editor"] = value; o2.UD.putData("editor", o2.editorData); this.scriptTab.pages.each(function(page){ var editor = page.script.editor; if (editor) editor.changeEditor(value); }.bind(this)); if (value=="ace"){ this.monacoStyleSelectNode.hide(); this.styleSelectNode.show(); }else{ this.monacoStyleSelectNode.show(); this.styleSelectNode.hide(); } }, changeFontSize: function(node){ var idx = node.selectedIndex; var value = node.options[idx].value; //var editorData = null; this.scriptTab.pages.each(function(page){ //if (!editorData) editorData = page.invoke.editor.editorData; var editor = page.script.editor; if (editor) editor.setFontSize(value); }.bind(this)); //if (!editorData) editorData = o2.editorData; //editorData.javainvokeEditor.theme = value; if (!o2.editorData){ o2.editorData = { "javascriptEditor": { "monaco_theme": "vs", "theme": "tomorrow", "fontSize" : "12px" } }; } o2.editorData.javascriptEditor["fontSize"] = value; o2.UD.putData("editor", o2.editorData); }, changeEditorStyle: function(node){ var idx = node.selectedIndex; var value = node.options[idx].value; //var editorData = null; this.scriptTab.pages.each(function(page){ //if (!editorData) editorData = page.script.editor.editorData; var editor = page.script.editor; if (editor) editor.setTheme(value); }.bind(this)); //if (!editorData) editorData = o2.editorData; //editorData.javascriptEditor.theme = value; if (!o2.editorData){ o2.editorData = { "javascriptEditor": { "monaco_theme": "vs", "theme": "tomorrow", "fontSize" : "12px" } }; } if (o2.editorData.javascriptEditor.editor === "monaco"){ o2.editorData.javascriptEditor.monaco_theme = value; }else{ o2.editorData.javascriptEditor.theme = value; } o2.UD.putData("editor", o2.editorData); }, getFormToolbarHTML: function(callback){ var toolbarUrl = this.path+this.options.style+"/toolbars.html"; var r = new Request.HTML({ url: toolbarUrl, method: "get", onSuccess: function(responseTree, responseElements, responseHTML, responseJavaScript){ var toolbarNode = responseTree[0]; if (callback) callback(toolbarNode); }.bind(this), onFailure: function(xhr){ this.notice("request portalToolbars error: "+xhr.responseText, "error"); }.bind(this) });<|fim▁hole|> this.designNode.inject(this.node); this.designNode.setStyles({ "position": "absolute", "width": "100%", "height": "100%", "top": "0px", "margin": "0px", "left": "0px" }); this.scriptTab.pages.each(function(page){ page.script.setAreaNodeSize(); }); this.isMax = true; }else{ this.isMax = false; this.designNode.inject(this.editContentNode); this.designNode.setStyles(this.css.designNode); this.designNode.setStyles({ "position": "static" }); this.resizeNode(); this.scriptTab.pages.each(function(page){ page.script.setAreaNodeSize(); }); } }, loadEditContent: function(callback){ this.designNode = new Element("div", { "styles": this.css.designNode }).inject(this.editContentNode); o2.require("o2.widget.Tab", function(){ this.scriptTab = new o2.widget.Tab(this.designNode, {"style": "script"}); this.scriptTab.load(); }.bind(this), false); //o2.require("o2.widget.ScrollBar", function(){ // new o2.widget.ScrollBar(this.designNode, {"distance": 100}); //}.bind(this)); }, //loadProperty------------------------ loadProperty: function(){ this.propertyTitleNode = new Element("div", { "styles": this.css.propertyTitleNode, "text": o2.xApplication.ConfigDesigner.LP.property }).inject(this.propertyNode); this.propertyResizeBar = new Element("div", { "styles": this.css.propertyResizeBar }).inject(this.propertyNode); this.loadPropertyResize(); this.propertyContentNode = new Element("div", { "styles": this.css.propertyContentNode }).inject(this.propertyNode); this.propertyDomArea = new Element("div", { "styles": this.css.propertyDomArea }).inject(this.propertyContentNode); this.propertyDomPercent = 0.3; this.propertyContentResizeNode = new Element("div", { "styles": this.css.propertyContentResizeNode }).inject(this.propertyContentNode); this.propertyContentArea = new Element("div", { "styles": this.css.propertyContentArea }).inject(this.propertyContentNode); this.loadPropertyContentResize(); this.setPropertyContent(); this.setIncludeNode(); }, setIncludeNode: function(){ this.includeTitleNode = new Element("div", {"styles": this.css.includeTitleNode}).inject(this.propertyDomArea); this.includeTitleActionNode = new Element("div", {"styles": this.css.includeTitleActionNode}).inject(this.includeTitleNode); this.includeTitleTextNode = new Element("div", {"styles": this.css.includeTitleTextNode, "text": this.lp.include}).inject(this.includeTitleNode); this.includeTitleActionNode.addEvent("click", function(){ this.addInclude(); }.bind(this)); this.propertyIncludeListArea = new Element("div", { "styles": {"overflow": "hidden"} }).inject(this.propertyDomArea); }, addInclude: function(){ }, setPropertyContent: function(){ var node = new Element("div", {"styles": this.css.propertyItemTitleNode, "text": this.lp.id+":"}).inject(this.propertyContentArea); this.propertyIdNode = new Element("div", {"styles": this.css.propertyTextNode, "text": ""}).inject(this.propertyContentArea); node = new Element("div", {"styles": this.css.propertyItemTitleNode, "text": this.lp.name+":"}).inject(this.propertyContentArea); this.propertyNameNode = new Element("div", {"styles": this.css.propertyTextNode, "text": ""}).inject(this.propertyContentArea); node = new Element("div", {"styles": this.css.propertyItemTitleNode, "text": this.lp.node+":"}).inject(this.propertyContentArea); this.propertyServerNode = new Element("select", {"styles": this.css.propertyTextNode}).inject(this.propertyContentArea); o2.Actions.load("x_program_center").CommandAction.getNodeInfoList( function( json ){ var nodeList = json.data.nodeList; if(nodeList.length>1){ new Element("option", {"value": "*", "text": "*"}).inject(this.propertyServerNode); } nodeList.each(function (node) { new Element("option", { "value": node.node.nodeAgentPort, "text": node.nodeAddress }).inject(this.propertyServerNode); }.bind(this)); }.bind(this),null, false ); node = new Element("div", {"styles": this.css.propertyItemTitleNode, "text": this.lp.description+":"}).inject(this.propertyContentArea); this.propertyDescriptionNode = new Element("div", {"styles": this.css.propertyTextNode, "text": ""}).inject(this.propertyContentArea); }, loadPropertyResize: function(){ // var size = this.propertyNode.getSize(); // var position = this.propertyResizeBar.getPosition(); this.propertyResize = new Drag(this.propertyResizeBar,{ "snap": 1, "onStart": function(el, e){ var x = (Browser.name=="firefox") ? e.event.clientX : e.event.x; var y = (Browser.name=="firefox") ? e.event.clientY : e.event.y; el.store("position", {"x": x, "y": y}); var size = this.propertyNode.getSize(); el.store("initialWidth", size.x); }.bind(this), "onDrag": function(el, e){ var x = (Browser.name=="firefox") ? e.event.clientX : e.event.x; // var y = e.event.y; var bodySize = this.content.getSize(); var position = el.retrieve("position"); var initialWidth = el.retrieve("initialWidth").toFloat(); var dx = position.x.toFloat()-x.toFloat(); var width = initialWidth+dx; if (width> bodySize.x/2) width = bodySize.x/2; if (width<40) width = 40; this.contentNode.setStyle("margin-right", width+1); this.propertyNode.setStyle("width", width); }.bind(this) }); }, loadPropertyContentResize: function(){ this.propertyContentResize = new Drag(this.propertyContentResizeNode, { "snap": 1, "onStart": function(el, e){ var x = (Browser.name=="firefox") ? e.event.clientX : e.event.x; var y = (Browser.name=="firefox") ? e.event.clientY : e.event.y; el.store("position", {"x": x, "y": y}); var size = this.propertyDomArea.getSize(); el.store("initialHeight", size.y); }.bind(this), "onDrag": function(el, e){ var size = this.propertyContentNode.getSize(); // var x = e.event.x; var y = (Browser.name=="firefox") ? e.event.clientY : e.event.y; var position = el.retrieve("position"); var dy = y.toFloat()-position.y.toFloat(); var initialHeight = el.retrieve("initialHeight").toFloat(); var height = initialHeight+dy; if (height<40) height = 40; if (height> size.y-40) height = size.y-40; this.propertyDomPercent = height/size.y; this.setPropertyContentResize(); }.bind(this) }); }, setPropertyContentResize: function(){ var size = this.propertyContentNode.getSize(); var resizeNodeSize = this.propertyContentResizeNode.getSize(); var height = size.y-resizeNodeSize.y; var domHeight = this.propertyDomPercent*height; var contentHeight = height-domHeight; this.propertyDomArea.setStyle("height", ""+domHeight+"px"); this.propertyContentArea.setStyle("height", ""+contentHeight+"px"); }, //resizeNode------------------------------------------------ resizeNode: function(){ if (!this.isMax){ var nodeSize = this.node.getSize(); this.contentNode.setStyle("height", ""+nodeSize.y+"px"); this.propertyNode.setStyle("height", ""+nodeSize.y+"px"); var contentToolbarMarginTop = this.contentToolbarNode.getStyle("margin-top").toFloat(); var contentToolbarMarginBottom = this.contentToolbarNode.getStyle("margin-bottom").toFloat(); var allContentToolberSize = this.contentToolbarNode.getComputedSize(); var y = nodeSize.y - allContentToolberSize.totalHeight - contentToolbarMarginTop - contentToolbarMarginBottom; this.editContentNode.setStyle("height", ""+y+"px"); if (this.designNode){ var designMarginTop = this.designNode.getStyle("margin-top").toFloat(); var designMarginBottom = this.designNode.getStyle("margin-bottom").toFloat(); y = nodeSize.y - allContentToolberSize.totalHeight - contentToolbarMarginTop - contentToolbarMarginBottom - designMarginTop - designMarginBottom; this.designNode.setStyle("height", ""+y+"px"); } titleSize = this.propertyTitleNode.getSize(); titleMarginTop = this.propertyTitleNode.getStyle("margin-top").toFloat(); titleMarginBottom = this.propertyTitleNode.getStyle("margin-bottom").toFloat(); titlePaddingTop = this.propertyTitleNode.getStyle("padding-top").toFloat(); titlePaddingBottom = this.propertyTitleNode.getStyle("padding-bottom").toFloat(); y = titleSize.y+titleMarginTop+titleMarginBottom+titlePaddingTop+titlePaddingBottom; y = nodeSize.y-y; this.propertyContentNode.setStyle("height", ""+y+"px"); this.propertyResizeBar.setStyle("height", ""+y+"px"); this.setPropertyContentResize(); titleSize = this.scriptListTitleNode.getSize(); titleMarginTop = this.scriptListTitleNode.getStyle("margin-top").toFloat(); titleMarginBottom = this.scriptListTitleNode.getStyle("margin-bottom").toFloat(); titlePaddingTop = this.scriptListTitleNode.getStyle("padding-top").toFloat(); titlePaddingBottom = this.scriptListTitleNode.getStyle("padding-bottom").toFloat(); nodeMarginTop = this.scriptListAreaSccrollNode.getStyle("margin-top").toFloat(); nodeMarginBottom = this.scriptListAreaSccrollNode.getStyle("margin-bottom").toFloat(); y = titleSize.y+titleMarginTop+titleMarginBottom+titlePaddingTop+titlePaddingBottom+nodeMarginTop+nodeMarginBottom; y = nodeSize.y-y; this.scriptListAreaSccrollNode.setStyle("height", ""+y+"px"); this.scriptListResizeNode.setStyle("height", ""+y+"px"); } }, //loadForm------------------------------------------ loadScript: function(){ //this.scriptTab.addTab(node, title); this.getScriptData(this.options.id, function(data){ data.name = this.config[this.options.id]; this.script = new o2.xApplication.ConfigDesigner.Script(this, data); this.script.load(); }.bind(this)); }, getScriptData: function(id, callback){ this.loadScriptData(id, callback); }, loadScriptData: function(id, callback, notSetTile){ this.actions.ConfigAction.open({fileName:id}, function(json){ if (json){ var data = json.data; data.id = id; data.text = data.fileContent; if (callback) callback(data); } }.bind(this)); }, saveScript: function(){ if (this.scriptTab.showPage){ var script = this.scriptTab.showPage.script; script.save(function(){ if (script==this.script){ var name = script.data.name; this.setTitle(o2.xApplication.ConfigDesigner.LP.title + "-"+name); this.options.desktopReload = true; this.options.id = script.data.id; } }.bind(this)); } }, saveDictionaryAs: function(){ this.dictionary.saveAs(); }, dictionaryExplode: function(){ this.dictionary.explode(); }, dictionaryImplode: function(){ this.dictionary.implode(); } });<|fim▁end|>
r.send(); }, maxOrReturnEditor: function(){ if (!this.isMax){
<|file_name|>mc_send.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import socket import struct import sys message = 'very important data' multicast_group = ('224.3.29.71', 10000) # Create the datagram socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Set a timeout so the socket does not block indefinitely when trying<|fim▁hole|># local network segment. ttl = struct.pack('b', 1) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) try: # Send data to the multicast group print >>sys.stderr, 'sending "%s"' % message sent = sock.sendto(message, multicast_group) # Look for responses from all recipients while True: print >>sys.stderr, 'waiting to receive' try: data, server = sock.recvfrom(16) except socket.timeout: print >>sys.stderr, 'timed out, no more responses' break else: print >>sys.stderr, 'received "%s" from %s' % (data, server) finally: print >>sys.stderr, 'closing socket' sock.close()<|fim▁end|>
# to receive data. sock.settimeout(0.2) # Set the time-to-live for messages to 1 so they do not go past the
<|file_name|>all_campaigns.py<|end_file_name|><|fim▁begin|>"""Module to access all emailing campagins.""" import datetime from typing import Any, Dict from urllib import parse from bob_emploi.frontend.api import job_pb2 from bob_emploi.frontend.api import project_pb2 from bob_emploi.frontend.api import user_pb2 from bob_emploi.frontend.server import auth from bob_emploi.frontend.server import french from bob_emploi.frontend.server import i18n from bob_emploi.frontend.server import jobs from bob_emploi.frontend.server import mongo from bob_emploi.frontend.server import scoring from bob_emploi.frontend.server.mail import campaign # pylint: disable=unused-import # Import all plugins: they register themselves when imported. from bob_emploi.frontend.server.mail import deletion from bob_emploi.frontend.server.mail import holiday from bob_emploi.frontend.server.mail import imt from bob_emploi.frontend.server.mail import improve_cv from bob_emploi.frontend.server.mail import jobbing from bob_emploi.frontend.server.mail import prepare_your_application from bob_emploi.frontend.server.mail import network from bob_emploi.frontend.server.mail import nps from bob_emploi.frontend.server.mail import switch from bob_emploi.frontend.server.mail import training # pylint: enable=unused-import _ONE_YEAR_AGO = datetime.datetime.now().replace(microsecond=0) - datetime.timedelta(365) _SIX_MONTHS_AGO = datetime.datetime.now().replace(microsecond=0) - datetime.timedelta(180) _ONE_MONTH_AGO = datetime.datetime.now().replace(microsecond=0) - datetime.timedelta(30) _EXPERIENCE_AS_TEXT = { project_pb2.JUNIOR: 'quelques temps', project_pb2.INTERMEDIARY: 'plus de 2 ans', project_pb2.SENIOR: 'plus de 6 ans', project_pb2.EXPERT: 'plus de 10 ans', } def _get_spontaneous_vars( user: user_pb2.User, *, now: datetime.datetime, database: mongo.NoPiiMongoDatabase, **unused_kwargs: Any) -> Dict[str, str]: """Computes vars for a given user for the spontaneous email. Returns a dict with all vars required for the template. """ project = user.projects[0] job_search_length = campaign.job_search_started_months_ago(project, now) if job_search_length < 0: raise campaign.DoNotSend('No info on user search duration') rome_id = project.target_job.job_group.rome_id if not rome_id: raise campaign.DoNotSend('User has no target job yet') job_group_info = jobs.get_group_proto(database, rome_id) if not job_group_info: raise scoring.NotEnoughDataException( 'Requires job group info to check if spontaneous application is a good channel.', fields={'projects.0.targetJob.jobGroup.romeId'}) application_modes = job_group_info.application_modes if not application_modes: raise scoring.NotEnoughDataException( 'Requires application modes to check if spontaneous application is a good channel.', fields={f'data.job_group_info.{rome_id}.application_modes'}) def _should_use_spontaneous(modes: job_pb2.RecruitingModesDistribution) -> bool: return any( mode.mode == job_pb2.SPONTANEOUS_APPLICATION and mode.percentage > 20 for mode in modes.modes) if not any(_should_use_spontaneous(modes) for modes in application_modes.values()): raise campaign.DoNotSend("Spontaneous isn't bigger than 20% of interesting channels.") contact_mode = job_group_info.preferred_application_medium if not contact_mode: raise scoring.NotEnoughDataException( 'Contact mode is required to push people to apply spontaneously', fields={f'data.job_group_info.{rome_id}.preferred_application_medium'}) in_a_workplace = job_group_info.in_a_workplace if not in_a_workplace and contact_mode != job_pb2.APPLY_BY_EMAIL: raise scoring.NotEnoughDataException( 'To apply in person, the %inAWorkplace template is required', fields={f'data.job_group_info.{rome_id}.in_a_workplace'}) like_your_workplace = job_group_info.like_your_workplace if in_a_workplace and not like_your_workplace: raise scoring.NotEnoughDataException( 'The template %likeYourWorkplace is required', fields={f'data.job_group_info.{rome_id}.like_your_workplace'}) to_the_workplace = job_group_info.to_the_workplace if not to_the_workplace: to_the_workplace = "à l'entreprise" some_companies = job_group_info.place_plural if not some_companies: some_companies = 'des entreprises' what_i_love_about = job_group_info.what_i_love_about if user.profile.gender == user_pb2.FEMININE: what_i_love_about_feminine = job_group_info.what_i_love_about_feminine if what_i_love_about_feminine: what_i_love_about = what_i_love_about_feminine if not what_i_love_about and contact_mode == job_pb2.APPLY_BY_EMAIL: raise scoring.NotEnoughDataException( 'An example about "What I love about" a company is required', fields={f'data.job_group_info.{rome_id}.what_i_love_about'}) why_specific_company = job_group_info.why_specific_company if not why_specific_company: raise scoring.NotEnoughDataException( 'An example about "Why this specific company" is required', fields={f'data.job_group_info.{rome_id}.why_specific_company'}) at_various_companies = job_group_info.at_various_companies if project.weekly_applications_estimate == project_pb2.SOME: weekly_applications_count = '5' elif project.weekly_applications_estimate > project_pb2.SOME: weekly_applications_count = '15' else: weekly_applications_count = '' if project.weekly_applications_estimate: weekly_applications_option = project_pb2.NumberOfferEstimateOption.Name( project.weekly_applications_estimate) else: weekly_applications_option = '' return dict(campaign.get_default_coaching_email_vars(user), **{ 'applicationComplexity': job_pb2.ApplicationProcessComplexity.Name(job_group_info.application_complexity), 'atVariousCompanies': at_various_companies, 'contactMode': job_pb2.ApplicationMedium.Name(contact_mode).replace('APPLY_', ''), 'deepLinkLBB': f'https://labonneboite.pole-emploi.fr/entreprises/commune/{project.city.city_id}/rome/' f'{project.target_job.job_group.rome_id}?utm_medium=web&utm_source=bob&' 'utm_campaign=bob-email', 'emailInUrl': parse.quote(user.profile.email), 'experienceAsText': _EXPERIENCE_AS_TEXT.get(project.seniority, 'peu'), 'inWorkPlace': in_a_workplace, 'jobName': french.lower_first_letter(french.genderize_job( project.target_job, user.profile.gender)), 'lastName': user.profile.last_name, 'likeYourWorkplace': like_your_workplace, 'someCompanies': some_companies, 'toTheWorkplace': to_the_workplace, 'weeklyApplicationsCount': weekly_applications_count, 'weeklyApplicationsOption': weekly_applications_option, 'whatILoveAbout': what_i_love_about, 'whySpecificCompany': why_specific_company, }) def _get_self_development_vars( user: user_pb2.User, *, now: datetime.datetime, **unused_kwargs: Any) \ -> Dict[str, str]: """Computes vars for a given user for the self-development email. Returns a dict with all vars required for the template. """ project = user.projects[0] job_search_length = campaign.job_search_started_months_ago(project, now) if job_search_length < 0: raise campaign.DoNotSend('No info on user search duration.') if job_search_length > 12: raise campaign.DoNotSend(f'User has been searching for too long ({job_search_length:.2f}).') genderized_job_name = french.lower_first_letter(french.genderize_job( project.target_job, user.profile.gender)) age = datetime.date.today().year - user.profile.year_of_birth max_young = 30 min_old = 50 return dict(campaign.get_default_coaching_email_vars(user), **{ 'hasEnoughExperience': campaign.as_template_boolean( project.seniority > project_pb2.JUNIOR), 'isAdministrativeAssistant': campaign.as_template_boolean( project.target_job.job_group.name == 'Secrétariat'), 'isOld': campaign.as_template_boolean(age >= min_old), 'isOldNotWoman': campaign.as_template_boolean( age >= min_old and user.profile.gender != user_pb2.FEMININE), 'isYoung': campaign.as_template_boolean(age <= max_young), 'isYoungNotWoman': campaign.as_template_boolean( age <= max_young and user.profile.gender != user_pb2.FEMININE), 'jobName': genderized_job_name, }) def _body_language_vars(user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: """Computes vars for a given user for the body language email. Returns a dict with all vars required for the template. """ worst_frustration = next( (user_pb2.Frustration.Name(frustration) for frustration in (user_pb2.SELF_CONFIDENCE, user_pb2.INTERVIEW, user_pb2.ATYPIC_PROFILE) if frustration in user.profile.frustrations), '') if not worst_frustration: raise campaign.DoNotSend('User has no frustration related to body language.') return dict(campaign.get_default_coaching_email_vars(user), **{ 'worstFrustration': worst_frustration, }) def _employment_vars( user: user_pb2.User, *, now: datetime.datetime, **unused_kwargs: Any) \ -> Dict[str, str]: """Computes vars for a given user for the employment survey. Returns a dict with all vars required for the template. """ registered_months_ago = campaign.get_french_months_ago(user.registered_at.ToDatetime(), now=now) if not registered_months_ago: if user.features_enabled.alpha: # Hack to be able to send the RER campaign to alpha users early. registered_months_ago = '0' else: raise campaign.DoNotSend( f'User registered only recently ({user.registered_at})') for status in user.employment_status: if status.created_at.ToDatetime() > _ONE_MONTH_AGO: raise campaign.DoNotSend( 'User has already updated their employment status less than one month ago.') survey_token = parse.quote(auth.create_token(user.user_id, role='employment-status')) redirect_url = parse.quote(f'{campaign.BASE_URL}/statut/') return dict(campaign.get_default_vars(user), **{ 'registeredMonthsAgo': registered_months_ago, 'seekingUrl': f'{campaign.BASE_URL}/api/employment-status?user={user.user_id}&token={survey_token}&' f'seeking=STILL_SEEKING&redirect={redirect_url}en-recherche', 'stopSeekingUrl': f'{campaign.BASE_URL}/api/employment-status?user={user.user_id}&token={survey_token}&' f'seeking=STOP_SEEKING&redirect={redirect_url}ne-recherche-plus', }) def _get_galita1_vars(user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: if user_pb2.MOTIVATION not in user.profile.frustrations: raise campaign.DoNotSend('User is motivated enough.') if user.projects and user.projects[0].job_search_has_not_started: raise campaign.DoNotSend('User is not searching for a job yet.') return campaign.get_default_coaching_email_vars(user) def _get_galita2_vars(user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: if not user.projects: raise scoring.NotEnoughDataException( 'Project is required for galita-2.', fields={'user.projects.0.kind'}) project = user.projects[0] if project.kind not in {project_pb2.FIND_A_FIRST_JOB, project_pb2.REORIENTATION} and \ project.previous_job_similarity != project_pb2.NEVER_DONE: raise campaign.DoNotSend('User is not searching a job in a profession new to them.') return dict(campaign.get_default_coaching_email_vars(user), **{ 'isReorienting': campaign.as_template_boolean(project.kind == project_pb2.REORIENTATION)}) def _get_galita3_vars(user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: if user_pb2.NO_OFFER_ANSWERS not in user.profile.frustrations: raise campaign.DoNotSend('User is getting enough answers from recruiters.') # We set a string with a blank as this is the only way to exclude a section # on Passport except to check equality or inequality with a non-empty # string. deep_link_to_follow_up_advice = ' ' if user.projects: for project in user.projects: link = campaign.get_deep_link_advice(user.user_id, project, 'follow-up') if link: deep_link_to_follow_up_advice = link return dict(campaign.get_default_coaching_email_vars(user), **{ 'deepLinkToAdvice': deep_link_to_follow_up_advice, }) def _get_post_covid_vars( user: user_pb2.User, *, database: mongo.NoPiiMongoDatabase, **unused_kwargs: Any) -> Dict[str, str]: if not user.projects: raise scoring.NotEnoughDataException( 'Project is required.', fields={'user.projects.0.advices'}) project = user.projects[0] scoring_project = scoring.ScoringProject(project, user, database) if scoring_project.job_group_info().covid_risk != job_pb2.COVID_RISKY: raise campaign.DoNotSend("The user's project job is not covid risky.") try: network_advice_link = next( campaign.get_deep_link_advice(user.user_id, project, a.advice_id) for a in project.advices if a.advice_id.startswith('network-application')) except StopIteration: raise campaign.DoNotSend('No network-application advice found for the user.')\ from None return dict(campaign.get_default_coaching_email_vars(user), **{ 'deepLinkAdviceUrl': network_advice_link, 'ofJobName': scoring_project.populate_template('%ofJobName'), }) def _get_upskilling_user_research_vars( user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: if not user.profile.highest_degree or user.profile.highest_degree > job_pb2.BAC_BACPRO: raise campaign.DoNotSend('User might have higher education.') if user.profile.coaching_email_frequency <= user_pb2.EMAIL_NONE: raise campaign.DoNotSend("User doesn't want any email.") return campaign.get_default_coaching_email_vars(user) def _get_upskilling_undefined_project_vars( user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: if user.profile.coaching_email_frequency <= user_pb2.EMAIL_NONE: raise campaign.DoNotSend("User doesn't want any email.") if not user.projects: raise scoring.NotEnoughDataException( 'Project is required.', fields={'user.projects.0.diagnostic'}) project = user.projects[0] if project.diagnostic.category_id != 'undefined-project':<|fim▁hole|> raise campaign.DoNotSend("Bob didn't give undefined-project main challenge to the user.") upskilling_params = parse.urlencode({ 'departement': project.city.departement_id, 'gender': user_pb2.Gender.Name(user.profile.gender), 'hl': user.profile.locale, 'utm_medium': 'email', 'utm_campaign': 'upskilling-beta', }) return dict( campaign.get_default_coaching_email_vars(user), upskillingUrl=f'{campaign.BASE_URL}/orientation/accueil?{upskilling_params}', userId=user.user_id) def _viral_sharing_vars(user: user_pb2.User, **unused_kwargs: Any) -> Dict[str, str]: """Template variables for viral sharing emails.""" if user.registered_at.ToDatetime() > _ONE_YEAR_AGO: raise campaign.DoNotSend('User registered more than one year ago.') return campaign.get_default_vars(user) def _open_classrooms_vars( user: user_pb2.User, *, database: mongo.NoPiiMongoDatabase, **unused_kwargs: Any) -> Dict[str, str]: """Template variables for viral sharing emails.""" if user.registered_at.ToDatetime() < _SIX_MONTHS_AGO: raise campaign.DoNotSend('User registered less than 6 months ago.') age = datetime.date.today().year - user.profile.year_of_birth if age < 18: raise campaign.DoNotSend('User too young to subscribe to OpenClassrooms.') if age > 54: raise campaign.DoNotSend('User too old to subscribe to OpenClassrooms.') if user.profile.highest_degree > job_pb2.BAC_BACPRO: raise campaign.DoNotSend('User might have higher education.') if user.employment_status and user.employment_status[-1].seeking != user_pb2.STILL_SEEKING: raise campaign.DoNotSend('User is no more seeking for a job.') if not (user.projects and user.projects[0]): raise scoring.NotEnoughDataException( 'Project is required.', fields={'user.projects.0.kind'}) project = user.projects[0] if project.kind != project_pb2.REORIENTATION and not ( project.kind == project_pb2.FIND_A_NEW_JOB and project.passionate_level == project_pb2.ALIMENTARY_JOB): raise campaign.DoNotSend( 'User is happy with their job (no reorientation and enthusiastic about their job).') has_children = user.profile.family_situation in { user_pb2.FAMILY_WITH_KIDS, user_pb2.SINGLE_PARENT_SITUATION, } job_group_info = jobs.get_group_proto(database, project.target_job.job_group.rome_id) if not job_group_info: raise scoring.NotEnoughDataException( 'Requires job group info for the difficulty of applying to this kind of job.') return dict(campaign.get_default_coaching_email_vars(user), **{ 'hasAtypicProfile': campaign.as_template_boolean( user_pb2.ATYPIC_PROFILE in user.profile.frustrations), 'hasFamilyAndManagementIssue': campaign.as_template_boolean( has_children and user_pb2.TIME_MANAGEMENT in user.profile.frustrations), 'hasSeniority': campaign.as_template_boolean( project.seniority > project_pb2.INTERMEDIARY), 'hasSimpleApplication': campaign.as_template_boolean( job_group_info.application_complexity == job_pb2.SIMPLE_APPLICATION_PROCESS), 'isReorienting': campaign.as_template_boolean( project.kind == project_pb2.REORIENTATION), 'isFrustratedOld': campaign.as_template_boolean( age >= 40 and user_pb2.AGE_DISCRIMINATION in user.profile.frustrations), 'ofFirstName': french.maybe_contract_prefix('de ', "d'", user.profile.name) }) # TODO(cyrille): Modularize. _CAMPAIGNS = [ campaign.Campaign( campaign_id='focus-spontaneous', mongo_filters={ 'projects': {'$elemMatch': { 'jobSearchHasNotStarted': {'$ne': True}, 'isIncomplete': {'$ne': True}, }}, }, get_vars=_get_spontaneous_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, is_big_focus=True, ), campaign.Campaign( campaign_id='focus-self-develop', mongo_filters={ 'projects': {'$elemMatch': { 'jobSearchHasNotStarted': {'$ne': True}, 'isIncomplete': {'$ne': True}, }} }, get_vars=_get_self_development_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, is_big_focus=True, ), campaign.Campaign( campaign_id='focus-body-language', mongo_filters={ 'projects': {'$elemMatch': { 'isIncomplete': {'$ne': True}, }}, 'profile.frustrations': {'$in': ['SELF_CONFIDENCE', 'INTERVIEW', 'ATYPIC_PROFILE']}, }, get_vars=_body_language_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, ), campaign.Campaign( campaign_id='employment-status', mongo_filters={ 'projects': {'$elemMatch': { 'jobSearchHasNotStarted': {'$ne': True}, 'isIncomplete': {'$ne': True}, }} }, get_vars=_employment_vars, sender_name=i18n.make_translatable_string('Florian de Bob'), sender_email='[email protected]', ), campaign.Campaign( campaign_id='handicap-week', mongo_filters={ 'profile.isNewsletterEnabled': True, }, get_vars=lambda u, **kw: {'firstName': u.profile.name}, sender_name='Bob', sender_email='[email protected]', ), campaign.Campaign( campaign_id='galita-1', mongo_filters={ 'profile.frustrations': 'MOTIVATION', 'projects.jobSearchHasNotStarted': {'$ne': True}, }, get_vars=_get_galita1_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, ), campaign.Campaign( campaign_id='galita-2', mongo_filters={'projects': {'$elemMatch': {'$or': [ {'previousJobSimilarity': 'NEVER_DONE'}, {'kind': {'$in': ['FIND_A_FIRST_JOB', 'REORIENTATION']}}, ]}}}, get_vars=_get_galita2_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, ), campaign.Campaign( campaign_id='galita-3', mongo_filters={ 'profile.frustrations': 'NO_OFFER_ANSWERS', 'projects.jobSearchHasNotStarted': {'$ne': True}, }, get_vars=_get_galita3_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, is_big_focus=True, ), campaign.Campaign( campaign_id='viral-sharing-1', mongo_filters={}, get_vars=_viral_sharing_vars, sender_name=i18n.make_translatable_string('Joanna de Bob'), sender_email='[email protected]', ), # TODO(sil): Make it a coaching email when the partnership is on again. campaign.Campaign( campaign_id='open-classrooms', mongo_filters={ '$or': [ {'employmentStatus': {'$exists': False}}, {'employmentStatus.seeking': 'STILL_SEEKING'} ], 'profile.highestDegree': { '$in': ['UNKNOWN_DEGREE', 'NO_DEGREE', 'CAP_BEP', 'BAC_BACPRO'] }, 'profile.yearOfBirth': { '$gt': datetime.datetime.now().year - 54, '$lt': datetime.datetime.now().year - 18, }, 'registeredAt': {'$gt': _SIX_MONTHS_AGO.isoformat() + 'Z'}, }, get_vars=_open_classrooms_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', ), campaign.Campaign( campaign_id='post-covid', mongo_filters={}, get_vars=_get_post_covid_vars, sender_name=i18n.make_translatable_string("Joanna et l'équipe de Bob"), sender_email='[email protected]', is_coaching=True, is_big_focus=True, ), campaign.Campaign( campaign_id='upskilling-user-research', mongo_filters={ 'profile.coachingEmailFrequency': {'$in': ['EMAIL_ONCE_A_MONTH', 'EMAIL_MAXIMUM']}, 'profile.highestDegree': {'$in': ['NO_DEGREE', 'CAP_BEP', 'BAC_BACPRO']}, }, get_vars=_get_upskilling_user_research_vars, sender_name=i18n.make_translatable_string("L'équipe de Bob"), sender_email='[email protected]', ), campaign.Campaign( campaign_id='upskilling-undefined-project', mongo_filters={ 'profile.coachingEmailFrequency': {'$in': ['EMAIL_ONCE_A_MONTH', 'EMAIL_MAXIMUM']}, 'projects.0.diagnostic.categoryId': 'undefined-project', }, get_vars=_get_upskilling_undefined_project_vars, sender_name=i18n.make_translatable_string("L'équipe de Bob"), sender_email='[email protected]', ), campaign.Campaign( campaign_id='upskilling-undefined-project-beta', mongo_filters={ 'profile.coachingEmailFrequency': {'$in': ['EMAIL_ONCE_A_MONTH', 'EMAIL_MAXIMUM']}, 'projects.0.diagnostic.categoryId': 'undefined-project', }, get_vars=_get_upskilling_undefined_project_vars, sender_name=i18n.make_translatable_string("L'équipe de Bob"), sender_email='[email protected]', ), ] for the_campaign in _CAMPAIGNS: campaign.register_campaign(the_campaign)<|fim▁end|>
<|file_name|>struct.go<|end_file_name|><|fim▁begin|>package xml2json import ( "strings" ) // Node is a data element on a tree type Node struct { Children map[string]Nodes Data string ChildrenAlwaysAsArray bool } // Nodes is a list of nodes type Nodes []*Node // AddChild appends a node to the list of children func (n *Node) AddChild(s string, c *Node) { // Lazy lazy if n.Children == nil { n.Children = map[string]Nodes{} } n.Children[s] = append(n.Children[s], c) } // IsComplex returns whether it is a complex type (has children) func (n *Node) IsComplex() bool { return len(n.Children) > 0 } // GetChild returns child by path if exists. Path looks like "grandparent.parent.child.grandchild" func (n *Node) GetChild(path string) *Node { result := n names := strings.Split(path, ".") for _, name := range names { children, exists := result.Children[name] if !exists { return nil<|fim▁hole|> if len(children) == 0 { return nil } result = children[0] } return result }<|fim▁end|>
}
<|file_name|>benchmark_3_layers_arcpy.py<|end_file_name|><|fim▁begin|>import time import arcpy from arcpy import env from arcpy.sa import * # Set environment settings env.workspace = "" # set your workspace<|fim▁hole|>tic = time.clock() a_file = "random_a.tif" b_file = "random_b.tif" c_file = "random_c.tif" out_file = "output.tif" a = Raster(a_file) b = Raster(b_file) c = Raster(c_file) out = 3 * a + b * c out.save(out_file)<|fim▁end|>
arcpy.env.overwriteOutput = True # Check out the ArcGIS Spatial Analyst extension license arcpy.CheckOutExtension("Spatial")
<|file_name|>arrays.js<|end_file_name|><|fim▁begin|>(function() { var _ = typeof require == 'function' ? require('..') : window._; QUnit.module('Arrays'); test('first', function() { equal(_.first([1, 2, 3]), 1, 'can pull out the first element of an array'); equal(_([1, 2, 3]).first(), 1, 'can perform OO-style "first()"'); deepEqual(_.first([1, 2, 3], 0), [], 'can pass an index to first'); deepEqual(_.first([1, 2, 3], 2), [1, 2], 'can pass an index to first'); deepEqual(_.first([1, 2, 3], 5), [1, 2, 3], 'can pass an index to first'); var result = (function(){ return _.first(arguments); }(4, 3, 2, 1)); equal(result, 4, 'works on an arguments object.'); result = _.map([[1, 2, 3], [1, 2, 3]], _.first); deepEqual(result, [1, 1], 'works well with _.map'); result = (function() { return _.first([1, 2, 3], 2); }()); deepEqual(result, [1, 2]); equal(_.first(null), undefined, 'handles nulls'); strictEqual(_.first([1, 2, 3], -1).length, 0); }); test('head', function() { strictEqual(_.first, _.head, 'alias for first'); }); test('take', function() { strictEqual(_.first, _.take, 'alias for first'); }); test('rest', function() { var numbers = [1, 2, 3, 4]; deepEqual(_.rest(numbers), [2, 3, 4], 'working rest()'); deepEqual(_.rest(numbers, 0), [1, 2, 3, 4], 'working rest(0)'); deepEqual(_.rest(numbers, 2), [3, 4], 'rest can take an index'); var result = (function(){ return _(arguments).rest(); }(1, 2, 3, 4)); deepEqual(result, [2, 3, 4], 'works on arguments object'); result = _.map([[1, 2, 3], [1, 2, 3]], _.rest); deepEqual(_.flatten(result), [2, 3, 2, 3], 'works well with _.map'); result = (function(){ return _(arguments).rest(); }(1, 2, 3, 4)); deepEqual(result, [2, 3, 4], 'works on arguments object'); }); test('tail', function() { strictEqual(_.rest, _.tail, 'alias for rest'); }); test('drop', function() { strictEqual(_.rest, _.drop, 'alias for rest'); }); test('initial', function() { deepEqual(_.initial([1, 2, 3, 4, 5]), [1, 2, 3, 4], 'working initial()'); deepEqual(_.initial([1, 2, 3, 4], 2), [1, 2], 'initial can take an index'); deepEqual(_.initial([1, 2, 3, 4], 6), [], 'initial can take a large index'); var result = (function(){ return _(arguments).initial(); }(1, 2, 3, 4)); deepEqual(result, [1, 2, 3], 'initial works on arguments object'); result = _.map([[1, 2, 3], [1, 2, 3]], _.initial); deepEqual(_.flatten(result), [1, 2, 1, 2], 'initial works with _.map'); }); test('last', function() { equal(_.last([1, 2, 3]), 3, 'can pull out the last element of an array'); deepEqual(_.last([1, 2, 3], 0), [], 'can pass an index to last'); deepEqual(_.last([1, 2, 3], 2), [2, 3], 'can pass an index to last'); deepEqual(_.last([1, 2, 3], 5), [1, 2, 3], 'can pass an index to last'); var result = (function(){ return _(arguments).last(); }(1, 2, 3, 4)); equal(result, 4, 'works on an arguments object'); result = _.map([[1, 2, 3], [1, 2, 3]], _.last); deepEqual(result, [3, 3], 'works well with _.map'); equal(_.last(null), undefined, 'handles nulls'); strictEqual(_.last([1, 2, 3], -1).length, 0); }); test('compact', function() { equal(_.compact([0, 1, false, 2, false, 3]).length, 3, 'can trim out all falsy values'); var result = (function(){ return _.compact(arguments).length; }(0, 1, false, 2, false, 3)); equal(result, 3, 'works on an arguments object'); }); test('flatten', function() { deepEqual(_.flatten(null), [], 'Flattens supports null'); deepEqual(_.flatten(void 0), [], 'Flattens supports undefined'); deepEqual(_.flatten([[], [[]], []]), [], 'Flattens empty arrays'); deepEqual(_.flatten([[], [[]], []], true), [[]], 'Flattens empty arrays'); var list = [1, [2], [3, [[[4]]]]]; deepEqual(_.flatten(list), [1, 2, 3, 4], 'can flatten nested arrays'); deepEqual(_.flatten(list, true), [1, 2, 3, [[[4]]]], 'can shallowly flatten nested arrays'); var result = (function(){ return _.flatten(arguments); }(1, [2], [3, [[[4]]]])); deepEqual(result, [1, 2, 3, 4], 'works on an arguments object'); list = [[1], [2], [3], [[4]]]; deepEqual(_.flatten(list, true), [1, 2, 3, [4]], 'can shallowly flatten arrays containing only other arrays'); equal(_.flatten([_.range(10), _.range(10), 5, 1, 3], true).length, 23); equal(_.flatten([_.range(10), _.range(10), 5, 1, 3]).length, 23); equal(_.flatten([new Array(1000000), _.range(56000), 5, 1, 3]).length, 1056003, 'Flatten can handle massive collections'); equal(_.flatten([new Array(1000000), _.range(56000), 5, 1, 3], true).length, 1056003, 'Flatten can handle massive collections'); }); test('without', function() { var list = [1, 2, 1, 0, 3, 1, 4]; deepEqual(_.without(list, 0, 1), [2, 3, 4], 'can remove all instances of an object'); var result = (function(){ return _.without(arguments, 0, 1); }(1, 2, 1, 0, 3, 1, 4)); deepEqual(result, [2, 3, 4], 'works on an arguments object'); list = [{one : 1}, {two : 2}]; equal(_.without(list, {one : 1}).length, 2, 'uses real object identity for comparisons.'); equal(_.without(list, list[0]).length, 1, 'ditto.'); }); test('sortedIndex', function() { var numbers = [10, 20, 30, 40, 50], num = 35; var indexForNum = _.sortedIndex(numbers, num); equal(indexForNum, 3, '35 should be inserted at index 3'); var indexFor30 = _.sortedIndex(numbers, 30); equal(indexFor30, 2, '30 should be inserted at index 2'); var objects = [{x: 10}, {x: 20}, {x: 30}, {x: 40}]; var iterator = function(obj){ return obj.x; }; strictEqual(_.sortedIndex(objects, {x: 25}, iterator), 2); strictEqual(_.sortedIndex(objects, {x: 35}, 'x'), 3); var context = {1: 2, 2: 3, 3: 4}; iterator = function(obj){ return this[obj]; }; strictEqual(_.sortedIndex([1, 3], 2, iterator, context), 1); var values = [0, 1, 3, 7, 15, 31, 63, 127, 255, 511, 1023, 2047, 4095, 8191, 16383, 32767, 65535, 131071, 262143, 524287, 1048575, 2097151, 4194303, 8388607, 16777215, 33554431, 67108863, 134217727, 268435455, 536870911, 1073741823, 2147483647]; var array = Array(Math.pow(2, 32) - 1); var length = values.length; while (length--) { array[values[length]] = values[length]; } equal(_.sortedIndex(array, 2147483648), 2147483648, 'should work with large indexes'); }); test('uniq', function() { var list = [1, 2, 1, 3, 1, 4]; deepEqual(_.uniq(list), [1, 2, 3, 4], 'can find the unique values of an unsorted array'); list = [1, 1, 1, 2, 2, 3]; deepEqual(_.uniq(list, true), [1, 2, 3], 'can find the unique values of a sorted array faster'); list = [{name: 'moe'}, {name: 'curly'}, {name: 'larry'}, {name: 'curly'}]; var iterator = function(value) { return value.name; }; deepEqual(_.map(_.uniq(list, false, iterator), iterator), ['moe', 'curly', 'larry'], 'can find the unique values of an array using a custom iterator'); deepEqual(_.map(_.uniq(list, iterator), iterator), ['moe', 'curly', 'larry'], 'can find the unique values of an array using a custom iterator without specifying whether array is sorted'); iterator = function(value) { return value + 1; }; list = [1, 2, 2, 3, 4, 4]; deepEqual(_.uniq(list, true, iterator), [1, 2, 3, 4], 'iterator works with sorted array'); var kittens = [ {kitten: 'Celery', cuteness: 8}, {kitten: 'Juniper', cuteness: 10}, {kitten: 'Spottis', cuteness: 10} ]; var expected = [ {kitten: 'Celery', cuteness: 8}, {kitten: 'Juniper', cuteness: 10} ]; deepEqual(_.uniq(kittens, true, 'cuteness'), expected, 'string iterator works with sorted array'); var result = (function(){ return _.uniq(arguments); }(1, 2, 1, 3, 1, 4)); deepEqual(result, [1, 2, 3, 4], 'works on an arguments object'); var a = {}, b = {}, c = {}; deepEqual(_.uniq([a, b, a, b, c]), [a, b, c], 'works on values that can be tested for equivalency but not ordered'); deepEqual(_.uniq(null), []); var context = {}; list = [3]; _.uniq(list, function(value, index, array) { strictEqual(this, context); strictEqual(value, 3); strictEqual(index, 0); strictEqual(array, list); }, context); deepEqual(_.uniq([{a: 1, b: 1}, {a: 1, b: 2}, {a: 1, b: 3}, {a: 2, b: 1}], 'a'), [{a: 1, b: 1}, {a: 2, b: 1}], 'can use pluck like iterator'); deepEqual(_.uniq([{0: 1, b: 1}, {0: 1, b: 2}, {0: 1, b: 3}, {0: 2, b: 1}], 0), [{0: 1, b: 1}, {0: 2, b: 1}], 'can use falsey pluck like iterator'); }); test('unique', function() { strictEqual(_.uniq, _.unique, 'alias for uniq'); }); test('intersection', function() { var stooges = ['moe', 'curly', 'larry'], leaders = ['moe', 'groucho']; deepEqual(_.intersection(stooges, leaders), ['moe'], 'can take the set intersection of two arrays'); deepEqual(_(stooges).intersection(leaders), ['moe'], 'can perform an OO-style intersection'); var result = (function(){ return _.intersection(arguments, leaders); }('moe', 'curly', 'larry')); deepEqual(result, ['moe'], 'works on an arguments object'); var theSixStooges = ['moe', 'moe', 'curly', 'curly', 'larry', 'larry']; deepEqual(_.intersection(theSixStooges, leaders), ['moe'], 'returns a duplicate-free array'); result = _.intersection([2, 4, 3, 1], [1, 2, 3]); deepEqual(result, [2, 3, 1], 'preserves order of first array'); result = _.intersection(null, [1, 2, 3]); equal(Object.prototype.toString.call(result), '[object Array]', 'returns an empty array when passed null as first argument'); equal(result.length, 0, 'returns an empty array when passed null as first argument'); result = _.intersection([1, 2, 3], null); equal(Object.prototype.toString.call(result), '[object Array]', 'returns an empty array when passed null as argument beyond the first'); equal(result.length, 0, 'returns an empty array when passed null as argument beyond the first'); }); test('union', function() { var result = _.union([1, 2, 3], [2, 30, 1], [1, 40]); deepEqual(result, [1, 2, 3, 30, 40], 'takes the union of a list of arrays'); result = _.union([1, 2, 3], [2, 30, 1], [1, 40, [1]]); deepEqual(result, [1, 2, 3, 30, 40, [1]], 'takes the union of a list of nested arrays'); var args = null; (function(){ args = arguments; }(1, 2, 3)); result = _.union(args, [2, 30, 1], [1, 40]); deepEqual(result, [1, 2, 3, 30, 40], 'takes the union of a list of arrays'); result = _.union([1, 2, 3], 4); deepEqual(result, [1, 2, 3], 'restrict the union to arrays only'); }); test('difference', function() { var result = _.difference([1, 2, 3], [2, 30, 40]); deepEqual(result, [1, 3], 'takes the difference of two arrays'); result = _.difference([1, 2, 3, 4], [2, 30, 40], [1, 11, 111]); deepEqual(result, [3, 4], 'takes the difference of three arrays'); result = _.difference([1, 2, 3], 1); deepEqual(result, [1, 2, 3], 'restrict the difference to arrays only'); }); test('zip', function() { var names = ['moe', 'larry', 'curly'], ages = [30, 40, 50], leaders = [true]; deepEqual(_.zip(names, ages, leaders), [ ['moe', 30, true], ['larry', 40, undefined], ['curly', 50, undefined] ], 'zipped together arrays of different lengths'); var stooges = _.zip(['moe', 30, 'stooge 1'], ['larry', 40, 'stooge 2'], ['curly', 50, 'stooge 3']); deepEqual(stooges, [['moe', 'larry', 'curly'], [30, 40, 50], ['stooge 1', 'stooge 2', 'stooge 3']], 'zipped pairs'); // In the case of difference lengths of the tuples undefineds // should be used as placeholder stooges = _.zip(['moe', 30], ['larry', 40], ['curly', 50, 'extra data']); deepEqual(stooges, [['moe', 'larry', 'curly'], [30, 40, 50], [undefined, undefined, 'extra data']], 'zipped pairs with empties'); var empty = _.zip([]); deepEqual(empty, [], 'unzipped empty'); deepEqual(_.zip(null), [], 'handles null'); deepEqual(_.zip(), [], '_.zip() returns []'); }); test('unzip', function() { deepEqual(_.unzip(null), [], 'handles null'); deepEqual(_.unzip([['a', 'b'], [1, 2]]), [['a', 1], ['b', 2]]); // complements zip var zipped = _.zip(['fred', 'barney'], [30, 40], [true, false]); deepEqual(_.unzip(zipped), [['fred', 'barney'], [30, 40], [true, false]]); zipped = _.zip(['moe', 30], ['larry', 40], ['curly', 50, 'extra data']); deepEqual(_.unzip(zipped), [['moe', 30, void 0], ['larry', 40, void 0], ['curly', 50, 'extra data']], 'Uses length of largest array'); }); test('object', function() { var result = _.object(['moe', 'larry', 'curly'], [30, 40, 50]); var shouldBe = {moe: 30, larry: 40, curly: 50}; deepEqual(result, shouldBe, 'two arrays zipped together into an object'); result = _.object([['one', 1], ['two', 2], ['three', 3]]); shouldBe = {one: 1, two: 2, three: 3}; deepEqual(result, shouldBe, 'an array of pairs zipped together into an object'); var stooges = {moe: 30, larry: 40, curly: 50}; deepEqual(_.object(_.pairs(stooges)), stooges, 'an object converted to pairs and back to an object'); deepEqual(_.object(null), {}, 'handles nulls'); }); test('indexOf', function() { var numbers = [1, 2, 3]; equal(_.indexOf(numbers, 2), 1, 'can compute indexOf'); var result = (function(){ return _.indexOf(arguments, 2); }(1, 2, 3)); equal(result, 1, 'works on an arguments object'); _.each([null, void 0, [], false], function(val) { var msg = 'Handles: ' + (_.isArray(val) ? '[]' : val); equal(_.indexOf(val, 2), -1, msg); equal(_.indexOf(val, 2, -1), -1, msg); equal(_.indexOf(val, 2, -20), -1, msg); equal(_.indexOf(val, 2, 15), -1, msg); }); var num = 35; numbers = [10, 20, 30, 40, 50]; var index = _.indexOf(numbers, num, true); equal(index, -1, '35 is not in the list'); numbers = [10, 20, 30, 40, 50]; num = 40; index = _.indexOf(numbers, num, true); equal(index, 3, '40 is in the list'); numbers = [1, 40, 40, 40, 40, 40, 40, 40, 50, 60, 70]; num = 40; equal(_.indexOf(numbers, num, true), 1, '40 is in the list'); equal(_.indexOf(numbers, 6, true), -1, '6 isnt in the list'); equal(_.indexOf([1, 2, 5, 4, 6, 7], 5, true), -1, 'sorted indexOf doesn\'t uses binary search'); ok(_.every(['1', [], {}, null], function() { return _.indexOf(numbers, num, {}) === 1; }), 'non-nums as fromIndex make indexOf assume sorted'); numbers = [1, 2, 3, 1, 2, 3, 1, 2, 3]; index = _.indexOf(numbers, 2, 5); equal(index, 7, 'supports the fromIndex argument'); index = _.indexOf([,,,], undefined); equal(index, 0, 'treats sparse arrays as if they were dense'); var array = [1, 2, 3, 1, 2, 3]; strictEqual(_.indexOf(array, 1, -3), 3, 'neg `fromIndex` starts at the right index'); strictEqual(_.indexOf(array, 1, -2), -1, 'neg `fromIndex` starts at the right index'); strictEqual(_.indexOf(array, 2, -3), 4); _.each([-6, -8, -Infinity], function(fromIndex) { strictEqual(_.indexOf(array, 1, fromIndex), 0); }); strictEqual(_.indexOf([1, 2, 3], 1, true), 0); index = _.indexOf([], undefined, true); equal(index, -1, 'empty array with truthy `isSorted` returns -1'); }); test('indexOf with NaN', function() { strictEqual(_.indexOf([1, 2, NaN, NaN], NaN), 2, 'Expected [1, 2, NaN] to contain NaN'); strictEqual(_.indexOf([1, 2, Infinity], NaN), -1, 'Expected [1, 2, NaN] to contain NaN'); strictEqual(_.indexOf([1, 2, NaN, NaN], NaN, 1), 2, 'startIndex does not affect result'); strictEqual(_.indexOf([1, 2, NaN, NaN], NaN, -2), 2, 'startIndex does not affect result'); (function() { strictEqual(_.indexOf(arguments, NaN), 2, 'Expected arguments [1, 2, NaN] to contain NaN'); }(1, 2, NaN, NaN)); }); test('indexOf with +- 0', function() { _.each([-0, +0], function(val) { strictEqual(_.indexOf([1, 2, val, val], val), 2); strictEqual(_.indexOf([1, 2, val, val], -val), 2); }); }); test('lastIndexOf', function() { var numbers = [1, 0, 1]; var falsey = [void 0, '', 0, false, NaN, null, undefined]; equal(_.lastIndexOf(numbers, 1), 2); numbers = [1, 0, 1, 0, 0, 1, 0, 0, 0]; numbers.lastIndexOf = null; equal(_.lastIndexOf(numbers, 1), 5, 'can compute lastIndexOf, even without the native function'); equal(_.lastIndexOf(numbers, 0), 8, 'lastIndexOf the other element'); var result = (function(){ return _.lastIndexOf(arguments, 1); }(1, 0, 1, 0, 0, 1, 0, 0, 0)); equal(result, 5, 'works on an arguments object'); _.each([null, void 0, [], false], function(val) { var msg = 'Handles: ' + (_.isArray(val) ? '[]' : val); equal(_.lastIndexOf(val, 2), -1, msg); equal(_.lastIndexOf(val, 2, -1), -1, msg); equal(_.lastIndexOf(val, 2, -20), -1, msg); equal(_.lastIndexOf(val, 2, 15), -1, msg); }); numbers = [1, 2, 3, 1, 2, 3, 1, 2, 3]; var index = _.lastIndexOf(numbers, 2, 2); equal(index, 1, 'supports the fromIndex argument'); var array = [1, 2, 3, 1, 2, 3]; strictEqual(_.lastIndexOf(array, 1, 0), 0, 'starts at the correct from idx'); strictEqual(_.lastIndexOf(array, 3), 5, 'should return the index of the last matched value'); strictEqual(_.lastIndexOf(array, 4), -1, 'should return `-1` for an unmatched value'); strictEqual(_.lastIndexOf(array, 1, 2), 0, 'should work with a positive `fromIndex`'); _.each([6, 8, Math.pow(2, 32), Infinity], function(fromIndex) { strictEqual(_.lastIndexOf(array, undefined, fromIndex), -1); strictEqual(_.lastIndexOf(array, 1, fromIndex), 3); strictEqual(_.lastIndexOf(array, '', fromIndex), -1); }); var expected = _.map(falsey, function(value) { return typeof value == 'number' ? -1 : 5; }); var actual = _.map(falsey, function(fromIndex) { return _.lastIndexOf(array, 3, fromIndex); }); deepEqual(actual, expected, 'should treat falsey `fromIndex` values, except `0` and `NaN`, as `array.length`'); strictEqual(_.lastIndexOf(array, 3, '1'), 5, 'should treat non-number `fromIndex` values as `array.length`'); strictEqual(_.lastIndexOf(array, 3, true), 5, 'should treat non-number `fromIndex` values as `array.length`'); strictEqual(_.lastIndexOf(array, 2, -3), 1, 'should work with a negative `fromIndex`'); strictEqual(_.lastIndexOf(array, 1, -3), 3, 'neg `fromIndex` starts at the right index'); deepEqual(_.map([-6, -8, -Infinity], function(fromIndex) { return _.lastIndexOf(array, 1, fromIndex); }), [0, -1, -1]); }); test('lastIndexOf with NaN', function() { strictEqual(_.lastIndexOf([1, 2, NaN, NaN], NaN), 3, 'Expected [1, 2, NaN] to contain NaN'); strictEqual(_.lastIndexOf([1, 2, Infinity], NaN), -1, 'Expected [1, 2, NaN] to contain NaN'); strictEqual(_.lastIndexOf([1, 2, NaN, NaN], NaN, 2), 2, 'fromIndex does not affect result'); strictEqual(_.lastIndexOf([1, 2, NaN, NaN], NaN, -2), 2, 'fromIndex does not affect result'); (function() { strictEqual(_.lastIndexOf(arguments, NaN), 3, 'Expected arguments [1, 2, NaN] to contain NaN'); }(1, 2, NaN, NaN)); }); test('lastIndexOf with +- 0', function() { _.each([-0, +0], function(val) { strictEqual(_.lastIndexOf([1, 2, val, val], val), 3); strictEqual(_.lastIndexOf([1, 2, val, val], -val), 3); strictEqual(_.lastIndexOf([-1, 1, 2], -val), -1); }); }); test('findIndex', function() { var objects = [ {'a': 0, 'b': 0}, {'a': 1, 'b': 1}, {'a': 2, 'b': 2}, {'a': 0, 'b': 0} ]; equal(_.findIndex(objects, function(obj) { return obj.a === 0; }), 0); equal(_.findIndex(objects, function(obj) { return obj.b * obj.a === 4; }), 2); equal(_.findIndex(objects, 'a'), 1, 'Uses lookupIterator'); equal(_.findIndex(objects, function(obj) { return obj.b * obj.a === 5; }), -1); equal(_.findIndex(null, _.noop), -1); strictEqual(_.findIndex(objects, function(a) { return a.foo === null; }), -1);<|fim▁hole|> }, objects); var sparse = []; sparse[20] = {'a': 2, 'b': 2}; equal(_.findIndex(sparse, function(obj) { return obj && obj.b * obj.a === 4; }), 20, 'Works with sparse arrays'); var array = [1, 2, 3, 4]; array.match = 55; strictEqual(_.findIndex(array, function(x) { return x === 55; }), -1, 'doesn\'t match array-likes keys'); }); test('findLastIndex', function() { var objects = [ {'a': 0, 'b': 0}, {'a': 1, 'b': 1}, {'a': 2, 'b': 2}, {'a': 0, 'b': 0} ]; equal(_.findLastIndex(objects, function(obj) { return obj.a === 0; }), 3); equal(_.findLastIndex(objects, function(obj) { return obj.b * obj.a === 4; }), 2); equal(_.findLastIndex(objects, 'a'), 2, 'Uses lookupIterator'); equal(_.findLastIndex(objects, function(obj) { return obj.b * obj.a === 5; }), -1); equal(_.findLastIndex(null, _.noop), -1); strictEqual(_.findLastIndex(objects, function(a) { return a.foo === null; }), -1); _.findLastIndex([{a: 1}], function(a, key, obj) { equal(key, 0); deepEqual(obj, [{a: 1}]); strictEqual(this, objects, 'called with context'); }, objects); var sparse = []; sparse[20] = {'a': 2, 'b': 2}; equal(_.findLastIndex(sparse, function(obj) { return obj && obj.b * obj.a === 4; }), 20, 'Works with sparse arrays'); var array = [1, 2, 3, 4]; array.match = 55; strictEqual(_.findLastIndex(array, function(x) { return x === 55; }), -1, 'doesn\'t match array-likes keys'); }); test('range', function() { deepEqual(_.range(0), [], 'range with 0 as a first argument generates an empty array'); deepEqual(_.range(4), [0, 1, 2, 3], 'range with a single positive argument generates an array of elements 0,1,2,...,n-1'); deepEqual(_.range(5, 8), [5, 6, 7], 'range with two arguments a &amp; b, a&lt;b generates an array of elements a,a+1,a+2,...,b-2,b-1'); deepEqual(_.range(8, 5), [], 'range with two arguments a &amp; b, b&lt;a generates an empty array'); deepEqual(_.range(3, 10, 3), [3, 6, 9], 'range with three arguments a &amp; b &amp; c, c &lt; b-a, a &lt; b generates an array of elements a,a+c,a+2c,...,b - (multiplier of a) &lt; c'); deepEqual(_.range(3, 10, 15), [3], 'range with three arguments a &amp; b &amp; c, c &gt; b-a, a &lt; b generates an array with a single element, equal to a'); deepEqual(_.range(12, 7, -2), [12, 10, 8], 'range with three arguments a &amp; b &amp; c, a &gt; b, c &lt; 0 generates an array of elements a,a-c,a-2c and ends with the number not less than b'); deepEqual(_.range(0, -10, -1), [0, -1, -2, -3, -4, -5, -6, -7, -8, -9], 'final example in the Python docs'); }); }());<|fim▁end|>
_.findIndex([{a: 1}], function(a, key, obj) { equal(key, 0); deepEqual(obj, [{a: 1}]); strictEqual(this, objects, 'called with context');
<|file_name|>ScrewTurnPageFile.js<|end_file_name|><|fim▁begin|>var path = require('path') ScrewTurnPageFile.LATEST = -1 ScrewTurnPageFile.compare = compare ScrewTurnPageFile.prototype.isLatest = isLatest ScrewTurnPageFile.prototype.compareTo = compareTo function ScrewTurnPageFile(filename) { var revision = getRevision(filename) , title = getTitle(filename) this.__defineGetter__('filename', function() { return filename }) this.__defineGetter__('title', function() { return title }) this.__defineGetter__('revision', function() { return revision }) } function getRevision(filename) { var basename = path.basename(filename, '.cs') , offset = basename.indexOf('.') , revision = offset >= 0 ? parseInt(basename.substr(offset + 1), 10) : ScrewTurnPageFile.LATEST return revision } function getTitle(filename) { var basename = path.basename(filename, 'cs') , offset = basename.indexOf('.') , title = offset >= 0 ? basename.substr(0, offset) : basename<|fim▁hole|> return title } function isLatest() { return this.revision === ScrewTurnPageFile.LATEST } function compareTo(item) { return compare(this, item) } function compare(a, b) { if(a.title < b.title) return -1 else if(a.title > b.title) return 1 else if(a.revision === ScrewTurnPageFile.LATEST) return 1 else if(b.revision === ScrewTurnPageFile.LATEST) return -1 return a.revision - b.revision } module.exports = ScrewTurnPageFile<|fim▁end|>
<|file_name|>ActuExpendedAdapter.java<|end_file_name|><|fim▁begin|>package com.krealid.starter.adapters; import android.content.Context; import android.support.v7.widget.RecyclerView; import android.util.TypedValue; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import com.krealid.starter.R; import butterknife.ButterKnife; /** * Created by Maxime on 26/08/2015. */ public class ActuExpendedAdapter extends RecyclerView.Adapter<ActuExpendedAdapter.ViewHolder> { private String text; private Context context; private View view; public ActuExpendedAdapter(String text, Context context){ this.text = text; this.context = context; } @Override public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { view = LayoutInflater .from(parent.getContext()) .inflate(R.layout.actu_text, parent,false); return new ViewHolder(view); } @Override public void onBindViewHolder(ViewHolder holder, int position) { String iframeLink; if(this.text.contains("<iframe")){ String iframeStart = "<iframe src=\""; String iframeEnd = "\" width="; int indexToStartIframe = this.text.indexOf(iframeStart); int indexToEndIframe = (this.text.substring(indexToStartIframe)).indexOf(iframeEnd); String iframeHeight = "height=\""; int indexToStartHeightIframe= this.text.indexOf(iframeHeight); String iframeHeightValue = this.text.substring(indexToStartHeightIframe + iframeHeight.length(), this.text.indexOf('"', indexToStartHeightIframe + iframeHeight.length())); iframeLink = this.text.substring(indexToStartIframe + iframeStart.length(), indexToStartIframe + indexToEndIframe); String articleText = this.text.substring(0, indexToStartIframe); holder.text.loadData("<font style=\"text-align:justify;text-justify:inter-word;\">" + articleText + "</font>", "text/html; charset=UTF-8", null); final RelativeLayout layout = new RelativeLayout(this.context); RelativeLayout.LayoutParams lprams = new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.MATCH_PARENT); layout.setLayoutParams(lprams); WebView web1 = new WebView(this.context); web1.setWebChromeClient(new WebChromeClient()); web1.getSettings().setJavaScriptCanOpenWindowsAutomatically(true); web1.getSettings().setJavaScriptEnabled(true); web1.getSettings().setPluginState(WebSettings.PluginState.ON); web1.loadUrl(iframeLink); web1.setId(R.id.myWebView); int height = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, Integer.parseInt(iframeHeightValue), this.context.getResources().getDisplayMetrics()); final RelativeLayout.LayoutParams webViewParams = new RelativeLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, height); layout.addView(web1, webViewParams); holder.articleContainer.addView(layout); } else { holder.text.loadData("<font style=\"text-align:justify;text-justify:inter-word;\">" + this.text + "</font>", "text/html; charset=UTF-8", null); } } @Override public int getItemCount() { return 1; } public void stopVideo(){<|fim▁hole|> WebView mWebView = (WebView) holder.articleContainer.findViewById(R.id.myWebView); if(mWebView != null) mWebView.loadUrl("about:blank"); } public static class ViewHolder extends RecyclerView.ViewHolder { public WebView text; public LinearLayout articleContainer; public ViewHolder(View itemView) { super(itemView); text = ButterKnife.findById(itemView, R.id.articleContent); articleContainer = ButterKnife.findById(itemView, R.id.article_container); } } }<|fim▁end|>
ViewHolder holder = new ViewHolder(view);
<|file_name|>test_is.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.test import SimpleTestCase from localflavor.is_.forms import (ISIdNumberField, ISPhoneNumberField, ISPostalCodeSelect) class ISLocalFlavorTests(SimpleTestCase): def test_ISPostalCodeSelect(self): f = ISPostalCodeSelect()<|fim▁hole|><option value="101">101 Reykjav\xedk</option> <option value="103">103 Reykjav\xedk</option> <option value="104">104 Reykjav\xedk</option> <option value="105">105 Reykjav\xedk</option> <option value="107">107 Reykjav\xedk</option> <option value="108">108 Reykjav\xedk</option> <option value="109">109 Reykjav\xedk</option> <option value="110">110 Reykjav\xedk</option> <option value="111">111 Reykjav\xedk</option> <option value="112">112 Reykjav\xedk</option> <option value="113">113 Reykjav\xedk</option> <option value="116">116 Kjalarnes</option> <option value="121">121 Reykjav\xedk</option> <option value="123">123 Reykjav\xedk</option> <option value="124">124 Reykjav\xedk</option> <option value="125">125 Reykjav\xedk</option> <option value="127">127 Reykjav\xedk</option> <option value="128">128 Reykjav\xedk</option> <option value="129">129 Reykjav\xedk</option> <option value="130">130 Reykjav\xedk</option> <option value="132">132 Reykjav\xedk</option> <option value="150">150 Reykjav\xedk</option> <option value="155">155 Reykjav\xedk</option> <option value="170">170 Seltjarnarnes</option> <option value="172">172 Seltjarnarnes</option> <option value="190">190 Vogar</option> <option value="200">200 K\xf3pavogur</option> <option value="201">201 K\xf3pavogur</option> <option value="202">202 K\xf3pavogur</option> <option value="203">203 K\xf3pavogur</option> <option value="210">210 Gar\xf0ab\xe6r</option> <option value="212">212 Gar\xf0ab\xe6r</option> <option value="220">220 Hafnarfj\xf6r\xf0ur</option> <option value="221">221 Hafnarfj\xf6r\xf0ur</option> <option value="222">222 Hafnarfj\xf6r\xf0ur</option> <option value="225">225 \xc1lftanes</option> <option value="230">230 Reykjanesb\xe6r</option> <option value="232">232 Reykjanesb\xe6r</option> <option value="233">233 Reykjanesb\xe6r</option> <option value="235">235 Keflav\xedkurflugv\xf6llur</option> <option value="240">240 Grindav\xedk</option> <option value="245">245 Sandger\xf0i</option> <option value="250">250 Gar\xf0ur</option> <option value="260">260 Reykjanesb\xe6r</option> <option value="270">270 Mosfellsb\xe6r</option> <option value="271">271 Mosfellsb\xe6r</option> <option value="276">276 Mosfellsb\xe6r</option> <option value="300">300 Akranes</option> <option value="301">301 Akranes</option> <option value="302">302 Akranes</option> <option value="310">310 Borgarnes</option> <option value="311">311 Borgarnes</option> <option value="320">320 Reykholt \xed Borgarfir\xf0i</option> <option value="340">340 Stykkish\xf3lmur</option> <option value="345">345 Flatey \xe1 Brei\xf0afir\xf0i</option> <option value="350">350 Grundarfj\xf6r\xf0ur</option> <option value="355">355 \xd3lafsv\xedk</option> <option value="356">356 Sn\xe6fellsb\xe6r</option> <option value="360">360 Hellissandur</option> <option value="370">370 B\xfa\xf0ardalur</option> <option value="371">371 B\xfa\xf0ardalur</option> <option value="380">380 Reykh\xf3lahreppur</option> <option value="400">400 \xcdsafj\xf6r\xf0ur</option> <option value="401">401 \xcdsafj\xf6r\xf0ur</option> <option value="410">410 Hn\xedfsdalur</option> <option value="415">415 Bolungarv\xedk</option> <option value="420">420 S\xfa\xf0av\xedk</option> <option value="425">425 Flateyri</option> <option value="430">430 Su\xf0ureyri</option> <option value="450">450 Patreksfj\xf6r\xf0ur</option> <option value="451">451 Patreksfj\xf6r\xf0ur</option> <option value="460">460 T\xe1lknafj\xf6r\xf0ur</option> <option value="465">465 B\xedldudalur</option> <option value="470">470 \xdeingeyri</option> <option value="471">471 \xdeingeyri</option> <option value="500">500 Sta\xf0ur</option> <option value="510">510 H\xf3lmav\xedk</option> <option value="512">512 H\xf3lmav\xedk</option> <option value="520">520 Drangsnes</option> <option value="522">522 Kj\xf6rvogur</option> <option value="523">523 B\xe6r</option> <option value="524">524 Nor\xf0urfj\xf6r\xf0ur</option> <option value="530">530 Hvammstangi</option> <option value="531">531 Hvammstangi</option> <option value="540">540 Bl\xf6ndu\xf3s</option> <option value="541">541 Bl\xf6ndu\xf3s</option> <option value="545">545 Skagastr\xf6nd</option> <option value="550">550 Sau\xf0\xe1rkr\xf3kur</option> <option value="551">551 Sau\xf0\xe1rkr\xf3kur</option> <option value="560">560 Varmahl\xed\xf0</option> <option value="565">565 Hofs\xf3s</option> <option value="566">566 Hofs\xf3s</option> <option value="570">570 Flj\xf3t</option> <option value="580">580 Siglufj\xf6r\xf0ur</option> <option value="600">600 Akureyri</option> <option value="601">601 Akureyri</option> <option value="602">602 Akureyri</option> <option value="603">603 Akureyri</option> <option value="610">610 Greniv\xedk</option> <option value="611">611 Gr\xedmsey</option> <option value="620">620 Dalv\xedk</option> <option value="621">621 Dalv\xedk</option> <option value="625">625 \xd3lafsfj\xf6r\xf0ur</option> <option value="630">630 Hr\xedsey</option> <option value="640">640 H\xfasav\xedk</option> <option value="641">641 H\xfasav\xedk</option> <option value="645">645 Fossh\xf3ll</option> <option value="650">650 Laugar</option> <option value="660">660 M\xfdvatn</option> <option value="670">670 K\xf3pasker</option> <option value="671">671 K\xf3pasker</option> <option value="675">675 Raufarh\xf6fn</option> <option value="680">680 \xde\xf3rsh\xf6fn</option> <option value="681">681 \xde\xf3rsh\xf6fn</option> <option value="685">685 Bakkafj\xf6r\xf0ur</option> <option value="690">690 Vopnafj\xf6r\xf0ur</option> <option value="700">700 Egilssta\xf0ir</option> <option value="701">701 Egilssta\xf0ir</option> <option value="710">710 Sey\xf0isfj\xf6r\xf0ur</option> <option value="715">715 Mj\xf3ifj\xf6r\xf0ur</option> <option value="720">720 Borgarfj\xf6r\xf0ur eystri</option> <option value="730">730 Rey\xf0arfj\xf6r\xf0ur</option> <option value="735">735 Eskifj\xf6r\xf0ur</option> <option value="740">740 Neskaupsta\xf0ur</option> <option value="750">750 F\xe1skr\xfa\xf0sfj\xf6r\xf0ur</option> <option value="755">755 St\xf6\xf0varfj\xf6r\xf0ur</option> <option value="760">760 Brei\xf0dalsv\xedk</option> <option value="765">765 Dj\xfapivogur</option> <option value="780">780 H\xf6fn \xed Hornafir\xf0i</option> <option value="781">781 H\xf6fn \xed Hornafir\xf0i</option> <option value="785">785 \xd6r\xe6fi</option> <option value="800">800 Selfoss</option> <option value="801">801 Selfoss</option> <option value="802">802 Selfoss</option> <option value="810">810 Hverager\xf0i</option> <option value="815">815 \xdeorl\xe1ksh\xf6fn</option> <option value="816">816 \xd6lfus</option> <option value="820">820 Eyrarbakki</option> <option value="825">825 Stokkseyri</option> <option value="840">840 Laugarvatn</option> <option value="845">845 Fl\xfa\xf0ir</option> <option value="850">850 Hella</option> <option value="851">851 Hella</option> <option value="860">860 Hvolsv\xf6llur</option> <option value="861">861 Hvolsv\xf6llur</option> <option value="870">870 V\xedk</option> <option value="871">871 V\xedk</option> <option value="880">880 Kirkjub\xe6jarklaustur</option> <option value="900">900 Vestmannaeyjar</option> <option value="902">902 Vestmannaeyjar</option> </select>''' self.assertHTMLEqual(f.render('foo', 'bar'), out) def test_ISIdNumberField(self): error_atleast = ['Ensure this value has at least 10 characters (it has 9).'] error_invalid = ['Enter a valid Icelandic identification number. The format is XXXXXX-XXXX.'] error_atmost = ['Ensure this value has at most 11 characters (it has 12).'] error_notvalid = ['The Icelandic identification number is not valid.'] valid = { '2308803449': '230880-3449', '230880-3449': '230880-3449', '230880 3449': '230880-3449', '2308803440': '230880-3440', } invalid = { '230880343': error_atleast + error_invalid, '230880343234': error_atmost + error_invalid, 'abcdefghijk': error_invalid, '2308803439': error_notvalid, } self.assertFieldOutput(ISIdNumberField, valid, invalid) def test_ISPhoneNumberField(self): error_invalid = ['Enter a valid value.'] error_atleast = ['Ensure this value has at least 7 characters (it has 6).'] error_atmost = ['Ensure this value has at most 8 characters (it has 9).'] valid = { '1234567': '1234567', '123 4567': '1234567', '123-4567': '1234567', } invalid = { '123-456': error_invalid, '123456': error_atleast + error_invalid, '123456555': error_atmost + error_invalid, 'abcdefg': error_invalid, ' 1234567 ': error_atmost + error_invalid, ' 12367 ': error_invalid } self.assertFieldOutput(ISPhoneNumberField, valid, invalid)<|fim▁end|>
out = '''<select name="foo">
<|file_name|>AlfMenuGroup.js<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2005-2016 Alfresco Software Limited. * * This file is part of Alfresco * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. */ /** * This widget can be used to group menu items within a popup menu such as an [AlfMenuBarPopup]{@link module:alfresco/menus/AlfMenuBarPopup} * or a [AlfCascadingMenu]{@link module:alfresco/menus/AlfCascadingMenu}. When an item is added to any [AlfMenuGroups]{@link module:alfresco/menus/AlfMenuGroups} * popup such as in those widgets then a new instance will automatically be wrapped any child widget that is not in a group. * * @module alfresco/menus/AlfMenuGroup * @extends module:alfresco/menus/AlfDropDownMenu * @mixes module:alfresco/core/Core * @mixes module:alfresco/core/CoreRwd * @author Dave Draper */ define(["dojo/_base/declare", "dojo/text!./templates/AlfMenuGroup.html", "alfresco/core/Core", "alfresco/menus/AlfDropDownMenu", "alfresco/core/CoreRwd", "dojo/_base/event", "dojo/dom-style", "dojo/dom-class", "dojo/keys", "dijit/popup", "dojo/string"], function(declare, template, AlfCore, AlfDropDownMenu, CoreRwd, event, domStyle, domClass, keys, popup, string) { return declare([AlfDropDownMenu, AlfCore, CoreRwd], { // TODO: There's an argument that this should actually extend (rather than wrap) the DropDownMenu to avoid needing to delegate the functions /** * The HTML template to use for the widget. * @instance * @type {string} */ templateString: template, /** * An array of the CSS files to use with this widget. * * @instance * @type {object[]} * @default [{cssFile:"./css/AlfMenuGroup.css"}] */ cssRequirements: [{cssFile:"./css/AlfMenuGroup.css"}], /** * The label for the group. If this is left as the empty string then the group label node will be * hidden completely. The value assigned to label can either be an i18n property key or a value * but an attempt will be made to look up the assigned value in the available i18n keys. * * @instance * @type {string} * @default */ label: "", /** * @instance */ constructor: function alfresco_menus_AlfMenuGroup__constructor(/*jshint unused:false*/args) { this.templateString = string.substitute(template, { ddmTemplateString: AlfDropDownMenu.prototype.templateString}); }, /** * Sets the group label and creates a new alfresco/menus/AlfDropDownMenu to contain the items * in the group. * * @instance */ postCreate: function alfresco_menus_AlfMenuGroup__postCreate() { if (this.label === "") { // If there is no label for the title then hide the title node entirely... domStyle.set(this._groupTitleNode, "display", "none"); } else { // Make sure that an attempt is made to get the localized label... this.label = this.message(this.label); this._groupTitleNode.innerHTML = this.encodeHTML(this.label); } if(this.additionalCssClasses) { domClass.add(this._containerNode, this.additionalCssClasses); } // Setup the Drop down menu as normal... this.inherited(arguments); }, /** * * @instance */ isFocusable: function alfresco_menus_AlfMenuGroup__isFocusable() { return this.hasChildren(); }, /** * Overrides the inherited function in order to address the additional Alfesco object * placed in the chain between the Dojo menu objects. * * @instance * @param {object} evt */ _onRightArrow: function(evt){ if(this.focusedChild && this.focusedChild.popup && !this.focusedChild.disabled) { // This first block is identical to that of the inherited function... this.alfLog("log", "Open cascading menu"); this._moveToPopup(evt); } else { // Find the top menu and focus next in it... this.alfLog("log", "Try and find a menu bar in the stack and move to next"); var menuBarAncestor = this.findMenuBarAncestor(this.getParent()); if (menuBarAncestor) { var next = menuBarAncestor._getNextFocusableChild(menuBarAncestor.focusedChild, 1); if (next) { this.alfLog("log", "Go to next item in menu bar"); menuBarAncestor.focusChild(next); }<|fim▁hole|> /** * Overrides the inherited function in order to address the additional Alfesco object * placed in the chain between the Dojo menu objects. * * @instance * @param {object} evt */ _onLeftArrow: function(evt) { if(this.getParent().parentMenu && !this.getParent().parentMenu._isMenuBar) { this.alfLog("log", "Close cascading menu"); this.getParent().parentMenu.focusChild(this.getParent().parentMenu.focusedChild); popup.close(this.getParent()); } else { var menuBarAncestor = this.findMenuBarAncestor(this.getParent()); if (menuBarAncestor) { var prev = menuBarAncestor._getNextFocusableChild(menuBarAncestor.focusedChild, -1); if (prev) { this.alfLog("log", "Focus previous item in menu bar"); menuBarAncestor.focusChild(prev, true); } } else { evt.stopPropagation(); evt.preventDefault(); } } }, /** * This function will work up the stack of menus to find the first menu bar in the stack. This * is required because of the additional grouping capabilities that have been added to the basic * Dojo menu widgets. In the core Dojo code the "parentMenu" attribute is used to work up the stack * but not all widgets in the Alfresco menu stack have this attribute (and it was not possible to * set it correctly during the widget processing phase). * * @instance * @return Either null if a menu bar cannot be found or a menu bar widget. */ findMenuBarAncestor: function alfresco_menus_AlfMenuGroup__findMenuBarAncestor(currentMenu) { var reachedMenuTop = false; while (!reachedMenuTop && !currentMenu._isMenuBar) { if (currentMenu.parentMenu) { // The current menu item has a parent menu item - assign it as the current menu... currentMenu = currentMenu.parentMenu; } else { // Go up the widget stack until we either run out of ancestors or find another parent menu... var parent = currentMenu.getParent(); while (parent && !parent.parentMenu) { parent = parent.getParent(); } if (parent && parent.parentMenu) { currentMenu = parent.parentMenu; } reachedMenuTop = (parent == null); } } var menuBar = (currentMenu._isMenuBar) ? currentMenu : null; return menuBar; }, /** * Added to support use in context menus * * @instance * @param {boolean} bool */ _setSelected: function alfresco_menus_AlfMenuGroup___setSelected(/*jshint unused:false*/bool) { this._selected = true; } }); });<|fim▁end|>
} } },
<|file_name|>views_query.py<|end_file_name|><|fim▁begin|># coding:utf-8 import json from django.http import HttpResponse from django.shortcuts import render from aircraft_config import AC_WQAR_CONFIG from list2string_and_echarts_function import Echarts_option from list2string_and_echarts_function import LIST_to_STR from influxdb_function import influxDB_interface from main_web.models import Stencil from arrow_time import today_date_for_influxd_sql from arrow_time import ten_day_ago_for_influxd_sql from arrow_time import three_day_ago_for_influxd_sql def all_childtable_index_list(request): if request.method == 'POST': post_data = request.POST date_range = post_data["date_range"] date_start = date_range.split(' to ')[0] date_end = date_range.split(' to ')[1] else: date_start = today_date_for_influxd_sql() date_end = today_date_for_influxd_sql() where_str = " WHERE time > " + "'" + date_start + "'" + " - 8h" + " AND time < " + "'" + date_end + "'" + " + 16h" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("DB_sector_index", "*", "index", where_str) if sector_index <> {}: df = sector_index['index'] result_json = df.to_json(orient="records") return render(request, 'all_childtable_index_list.html', {'result_json': result_json, 'date_start': date_start, 'date_end': date_end}) else: return render(request, 'all_childtable_index_list.html', {'date_start': date_start, 'date_end': date_end + " no data"}) # 该页面未使用 def query_index(request): if request.method == 'POST': post_data = request.POST if post_data["date_start"]=='' or post_data["date_end"] == '': date_start = ten_day_ago_for_influxd_sql() date_end = today_date_for_influxd_sql() else: date_start = post_data["date_start"] date_end = post_data["date_end"] AC_id = post_data["AC_id"] where_str = " WHERE time > " + "'" + date_start + "'" + " AND time < " + "'" + date_end + "'" + " + 1d" + " AND AC=" + "'" + AC_id + "'" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("DB_sector_index", "*", "index", where_str) if sector_index <> {}: df = sector_index['index'] result_json = df.to_json(orient="records") return render(request, 'single_plane_query.html', {'result_json': result_json, 'date_start': date_start, 'date_end': date_end, 'AC_id': AC_id}) else: return render(request, 'single_plane_query.html', {'date_start': date_start, 'date_end': date_end + " no data", 'AC_id': AC_id}) else: return render(request, 'single_plane_query.html') def runup_list(request): if request.method == 'POST': post_data = request.POST date_range = post_data["date_range"] date_start = date_range.split(' to ')[0] date_end = date_range.split(' to ')[1] else: date_start = ten_day_ago_for_influxd_sql() date_end = today_date_for_influxd_sql() where_str = " WHERE time > " + "'" + date_start + "'" + " - 8h" + " AND time < " + "'" + date_end + "'" + " + 16h" where_str = where_str + " AND FLT_status='GROUND'" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("DB_sector_index", "*", "index", where_str) df = sector_index['index'] result_json = df.to_json(orient="records") return render(request, 'all_childtable_index_list.html',{'result_json': result_json}) def tendency_total(request): if request.method == 'POST': post_data = request.POST date_range = post_data["date_range"] date_start = date_range.split(' to ')[0] date_end = date_range.split(' to ')[1] tendency_type = post_data["tendency_type"] where_str = " WHERE time > " + "'" + date_start + "'" + " AND time < " + "'" + date_end + "'" + " + 1d" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("tendency", "*", tendency_type, where_str) if sector_index <> {}: df = sector_index[tendency_type] result_json = df.to_json(orient="records") return render(request, 'tendency_total.html', {'result_json': result_json, 'date_start': date_start, 'date_end': date_end, }) else: return render(request, 'tendency_total.html', {'date_start': date_start, 'date_end': date_end + " no data", }) else: date_start = ten_day_ago_for_influxd_sql() date_end = today_date_for_influxd_sql() where_str = " WHERE time > " + "'" + date_start + "'" + " AND time < " + "'" + date_end + "'" + " + 1d" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("tendency", "*", "tendency_total", where_str) df = sector_index['tendency_total'] result_json = df.to_json(orient="records") return render(request, 'tendency_total.html', {'result_json': result_json, 'date_start': date_start, 'date_end': date_end}) def tendency_single(request): all_aircraft_list = json.dumps(AC_WQAR_CONFIG().all_aircraft()) if request.method == 'POST': post_data = request.POST date_range = post_data["date_range"] date_start = date_range.split(' to ')[0] date_end = date_range.split(' to ')[1] para_name = post_data["para_name"] para_name_sed = "\"" + para_name + "\"" exclude_list = ["AC","AC_sector","FLT_number","FLT_status","update_date","update_time"] exclude_list = map(lambda x:"\"" + x + "\"", exclude_list) exclude_list.append(para_name_sed) query_para = ",".join(exclude_list) print query_para AC_id = post_data["AC_id"] AC_id_sed = "\'" + AC_id + "\'" qar_conf = AC_WQAR_CONFIG().juge_config(AC_id) if qar_conf == "737_7": tendency_type = "tendency_737_7" elif qar_conf == "737_3C": tendency_type = "tendency_737_3C" where_str = " WHERE AC = " + AC_id_sed + " AND " + "time > " + "'" + date_start + "'" + " AND time < " + "'" + date_end + "'" + " + 1d" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("tendency", query_para, tendency_type, where_str) if sector_index <> {}: df = sector_index[tendency_type] result_json = df.to_json(orient="records") return render(request, 'tendency_single.html', {'all_ac':all_aircraft_list, 'result_json': result_json, 'date_start': date_start, 'date_end': date_end, }) else: return render(request, 'tendency_single.html', {'all_ac':all_aircraft_list, 'date_start': date_start, 'date_end': date_end + " no data", }) else: return render(request, 'tendency_single.html', {'all_ac':all_aircraft_list, 'result_json': {}}) def tendency_single_para_list(request): AC_id = request.GET.get('AC_id', None) qar_conf = AC_WQAR_CONFIG().juge_config(AC_id) if qar_conf == "737_7": mes = "tendency_737_7" elif qar_conf == "737_3C": mes = "tendency_737_3C" para_object = influxDB_interface().show_fields("tendency", mes) para_list = [] exclude_list = ["AC","AC_sector","FLT_number","FLT_status","update_date","update_time"] for item in para_object: para_name = item['fieldKey'] if para_name not in exclude_list: para_list.append(para_name) para_json = json.dumps(para_list) return HttpResponse(para_json) def childtable(request, sector_id): result_list = [] query_stencil = Stencil.objects.all() for item in query_stencil: dic_index = { 'NAME':item.NAME } result_list.append(dic_index) return render(request, 'childtable.html', {'sector_id': sector_id, 'stencil_option': result_list}) <|fim▁hole|> post_flight_id = request.GET.get('flight_id', None) print post_flight_id aircraft_id = post_flight_id[0:6] #获取模版内参数列表 stencil_object = Stencil.objects.get(NAME = post_NAME) list_3C, list_7 = LIST_to_STR().make_para_id_list() list_units_3C, list_units_7 = LIST_to_STR().make_para_units_list() list_WQAR256 = list_str.str_to_int(stencil_object.WQAR_737_3C) list_WQAR512 = list_str.str_to_int(stencil_object.WQAR_737_7) ac_wqar_config = AC_WQAR_CONFIG() echarts_option_256 = stencil_object.echarts_737_3C echarts_option_512 = stencil_object.echarts_737_7 dic_units = {} list_para_name = [] if aircraft_id in ac_wqar_config.WQAR_7_SERISE_list: model = list_WQAR512 list_units = list_units_7 ac_conf = '737_7' for item in model: list_para_name.append(list_7[int(item)]) dic_units[list_7[int(item)] ]= list_units[int(item)] str_echarts_option = echarts_option_512 elif aircraft_id in ac_wqar_config.WQAR_3C_SERISE_list: model = list_WQAR256 list_units = list_units_3C ac_conf = '737_3C' for item in model: list_para_name.append(list_3C[int(item)]) dic_units[list_3C[int(item)] ]= list_units[int(item)] str_echarts_option = echarts_option_256 else: return HttpResponse("无此机号") print list_para_name query_result = influxDB_interface().list_query( "CKG_QAR", list_para_name, ac_conf, post_flight_id) query_result.index = range(1,(len(query_result.index)+1),1) new_df = query_result.fillna('-') list_c1_c2 = new_df.to_dict(orient="records") para_name_dic = {} for key in list_c1_c2[0]: para_name_dic[key] = key list_c1_c2.insert(0, para_name_dic) list_c1_c2.append(dic_units) # 单位暂时不加上,待填坑 # 传递echarts设置信息 ec_op = Echarts_option() echarts_config_option = ec_op.str_to_obj(str_echarts_option) # 得出echarts_option中的逻辑值参数表列表 list_index_of_logic_echarts = ec_op.judge_logic_echart(echarts_config_option) result_json = json.dumps([list_c1_c2, echarts_config_option, list_index_of_logic_echarts]) return HttpResponse(result_json) def eFlow_total(request): date_start = three_day_ago_for_influxd_sql() date_end = today_date_for_influxd_sql() where_str = " WHERE time > " + "'" + date_start + "'" + " AND time < " + "'" + date_end + "'" + " + 1d" infdb_if = influxDB_interface() sector_index = infdb_if.inf_query("tendency", "*", "tendency_737_7", where_str) df = sector_index['tendency_737_7'] eflow_ac_list = ["B-7181", "B-7892", "B-7595", "B-7596", "B-7597", "B-7598", "B-7890", "B-7893", "B-7895", "B-7896", "B-7891", "B-7897", "B-1417", "B-1416"] df_eflow = df[df['AC'].isin(eflow_ac_list)] result_json = df_eflow.to_json(orient="records") return render(request, 'eFlow_total.html', {'result_json': result_json, 'date_start': date_start, 'date_end': date_end})<|fim▁end|>
def ajax_some_para(request): list_str = LIST_to_STR() post_NAME = request.GET.get('value_conf', None) print post_NAME
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.core.urlresolvers import reverse from django.core.mail import send_mail from django.shortcuts import render, redirect from django.contrib.auth.models import User from django.contrib.auth.decorators import login_required from .models import ActivateCode import uuid import os from django.http import HttpResponse import datetime def register(request): error = "" if request.method == "GET": return render(request, "user_register.html") else: username = request.POST['username'].strip() email = request.POST['email'].strip() password = request.POST['password'].strip() re_password = request.POST['re_password'].strip() if not username or not password or not email: error = "任何字段都不能为空" if password != re_password: error = "两次密码不一致" if User.objects.filter(username=username).count() > 0: error = "用户已存在" if User.objects.filter(email=email).count() > 0: error = "该邮箱已注册" if not error: user = User.objects.create_user(username=username, email=email, password=password) user.is_active = False user.save() new_code = str(uuid.uuid4()).replace("-", "") expire_time = datetime.datetime.now() + datetime.timedelta(days=2) code_record = ActivateCode(owner=user, code=new_code,<|fim▁hole|> "user_activate", args=[new_code])) send_mail('[python论坛]激活邮件', '您的激活链接为: %s' % activate_link, '[email protected]', [email], fail_silently=False) else: return render(request, "user_register.html", {"error": error}) return HttpResponse("请查收邮件激活帐户!") def activate(request, code): query = ActivateCode.objects.filter(code=code, expire_timestamp__gte=datetime.datetime.now()) if query.count() > 0: code_record = query[0] code_record.owner.is_active = True code_record.owner.save() return HttpResponse("激活成功") else: return HttpResponse("激活失败") @login_required def upload_avatar(request): if request.method == "GET": return render(request, "upload_avatar.html") else: profile = request.user.userprofile avatar_file = request.FILES.get("avatar", None) if not avatar_file: return HttpResponse("未选择文件") file_name = request.user.username + avatar_file.name if avatar_file.size > 50000: return HttpResponse("图片大小不能超过500KB") file_path = os.path.join("/usr/share/userres/avatar/", file_name) with open(file_path, 'wb+') as destination: for chunk in avatar_file.chunks(): destination.write(chunk) url = "http://res.myforum.com/avatar/%s" % file_name profile.avatar = url profile.save() return redirect("/")<|fim▁end|>
expire_timestamp=expire_time) code_record.save() activate_link = "http://%s%s" % (request.get_host(), reverse(
<|file_name|>blog_post.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe, re from frappe.website.website_generator import WebsiteGenerator from frappe.website.render import clear_cache from frappe.utils import today, cint, global_date_format, get_fullname from frappe.website.utils import find_first_image, get_comment_list from frappe.templates.pages.blog import get_children class BlogPost(WebsiteGenerator): condition_field = "published" template = "templates/generators/blog_post.html" save_versions = True order_by = "published_on desc" parent_website_route_field = "blog_category" page_title_field = "title" def validate(self): super(BlogPost, self).validate() if not self.blog_intro: self.blog_intro = self.content[:140]<|fim▁hole|> if self.blog_intro: self.blog_intro = self.blog_intro[:140] if self.published and not self.published_on: self.published_on = today() # update posts frappe.db.sql("""update tabBlogger set posts=(select count(*) from `tabBlog Post` where ifnull(blogger,'')=tabBlogger.name) where name=%s""", (self.blogger,)) def on_update(self): WebsiteGenerator.on_update(self) clear_cache("writers") def get_context(self, context): # this is for double precaution. usually it wont reach this code if not published if not cint(self.published): raise Exception, "This blog has not been published yet!" # temp fields context.full_name = get_fullname(self.owner) context.updated = global_date_format(self.published_on) if self.blogger: context.blogger_info = frappe.get_doc("Blogger", self.blogger).as_dict() context.description = self.blog_intro or self.content[:140] context.metatags = { "name": self.title, "description": context.description, } image = find_first_image(self.content) if image: context.metatags["image"] = image context.categories = frappe.db.sql_list("""select name from `tabBlog Category` order by name""") context.comment_list = get_comment_list(self.doctype, self.name) context.children = get_children() return context def clear_blog_cache(): for blog in frappe.db.sql_list("""select page_name from `tabBlog Post` where ifnull(published,0)=1"""): clear_cache(blog) clear_cache("writers") @frappe.whitelist(allow_guest=True) def get_blog_list(start=0, by=None, category=None): condition = "" if by: condition = " and t1.blogger='%s'" % by.replace("'", "\'") if category: condition += " and t1.blog_category='%s'" % category.replace("'", "\'") query = """\ select t1.title, t1.name, concat(t1.parent_website_route, "/", t1.page_name) as page_name, t1.published_on as creation, day(t1.published_on) as day, monthname(t1.published_on) as month, year(t1.published_on) as year, ifnull(t1.blog_intro, t1.content) as content, t2.full_name, t2.avatar, t1.blogger, (select count(name) from `tabComment` where comment_doctype='Blog Post' and comment_docname=t1.name) as comments from `tabBlog Post` t1, `tabBlogger` t2 where ifnull(t1.published,0)=1 and t1.blogger = t2.name %(condition)s order by published_on desc, name asc limit %(start)s, 20""" % {"start": start, "condition": condition} result = frappe.db.sql(query, as_dict=1) # strip html tags from content for res in result: res['published'] = global_date_format(res['creation']) res['content'] = res['content'][:140] return result<|fim▁end|>
self.blog_intro = re.sub("\<[^>]*\>", "", self.blog_intro)
<|file_name|>lambda_function.py<|end_file_name|><|fim▁begin|>import logging from ask import alexa import car_accidents import expected_population logger = logging.getLogger() logger.setLevel(logging.INFO)<|fim▁hole|>def lambda_handler(request_obj, context=None): return alexa.route_request(request_obj) @alexa.default def default_handler(request): logger.info('default_handler') return alexa.respond("Sorry, I don't understand.", end_session=True) @alexa.request("LaunchRequest") def launch_request_handler(request): logger.info('launch_request_handler') return alexa.respond('Ask me about any public data about Sweden.', end_session=True) @alexa.request("SessionEndedRequest") def session_ended_request_handler(request): logger.info('session_ended_request_handler') return alexa.respond('Goodbye.', end_session=True) @alexa.intent('AMAZON.CancelIntent') def cancel_intent_handler(request): logger.info('cancel_intent_handler') return alexa.respond('Okay.', end_session=True) @alexa.intent('AMAZON.HelpIntent') def help_intent_handler(request): logger.info('help_intent_handler') return alexa.respond('You can ask me about car accidents.', end_session=True) @alexa.intent('AMAZON.StopIntent') def stop_intent_handler(request): logger.info('stop_intent_handler') return alexa.respond('Okay.', end_session=True) @alexa.intent('CarAccidents') def car_accidents_intent_handler(request): logger.info('car_accidents_intent_handler') logger.info(request.get_slot_map()) city = request.get_slot_value('city') year = request.get_slot_value('year') if not city: return alexa.respond('Sorry, which city?') num_card_acc = car_accidents.get_num_accidents(year=int(year), city=city) logger.info('%s accidents in %s in %s', num_card_acc, city, year) return alexa.respond( ''' <speak> There were <say-as interpret-as="cardinal">%s</say-as> car accidents in %s in <say-as interpret-as="date" format="y">%s</say-as>, </speak> ''' % (num_card_acc, city, year), end_session=True, is_ssml=True) @alexa.intent('PopulationSweden') def population_intent_handler(request): logger.info('population_sweden_intent_handler') logger.info(request.get_slot_map()) year = request.get_slot_value('year') return alexa.respond( ''' <speak> in <say-as interpret-as="date" format="y">%s</say-as>, The expected population of Sweden is going to be <say-as interpret-as="cardinal">%s</say-as> </speak> ''' % (year, expected_population.get_expected_population(year)), end_session=True, is_ssml=True) @alexa.intent('WaterUsage') def water_usage_stockholm(request): year = request.get_slot_value('year') logger.info('water_usage_stockholm') logger.info(request.get_slot_map()) return alexa.respond( ''' <speak> the water consumption in Stockholm in <say-as interpret-as="date" format="y">%s</say-as>, is <say-as interpret-as="cardinal">%s</say-as> </speak> ''' % (year, car_accidents.get_water_usage_stockholm(year)), end_session=True, is_ssml=True) @alexa.intent('Apartments') def housing_numbers(request): year = request.get_slot_value('year') logger.info('apartments') logger.info(request.get_slot_map()) return alexa.respond( ''' <speak> the number of apartments built during that year in Stockholm, is <say-as interpret-as="cardinal">%s</say-as> </speak> ''' % (car_accidents.get_num_apartments_stockholm(year)), )<|fim▁end|>
<|file_name|>layout_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The layout thread. Performs layout on the DOM, builds display lists and sends them to be //! painted. #![allow(unsafe_code)] use animation; use app_units::Au; use azure::azure::AzColor; use construct::ConstructionResult; use context::{LayoutContext, SharedLayoutContext, heap_size_of_local_context}; use display_list_builder::ToGfxColor; use euclid::Matrix4D; use euclid::point::Point2D; use euclid::rect::Rect; use euclid::scale_factor::ScaleFactor; use euclid::size::Size2D; use flow::{self, Flow, ImmutableFlowUtils, MutableOwnedFlowUtils}; use flow_ref::{self, FlowRef}; use fnv::FnvHasher; use gfx::display_list::{ClippingRegion, DisplayItemMetadata, DisplayList, LayerInfo, OpaqueNode}; use gfx::display_list::{ScrollOffsetMap, StackingContext, StackingContextType, WebRenderImageInfo}; use gfx::font; use gfx::font_cache_thread::FontCacheThread; use gfx::font_context; use gfx::paint_thread::LayoutToPaintMsg; use gfx_traits::{color, Epoch, FragmentType, LayerId, ScrollPolicy, StackingContextId}; use heapsize::HeapSizeOf; use incremental::LayoutDamageComputation; use incremental::{REPAINT, STORE_OVERFLOW, REFLOW_OUT_OF_FLOW, REFLOW, REFLOW_ENTIRE_DOCUMENT}; use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use ipc_channel::router::ROUTER; use layout_debug; use layout_traits::LayoutThreadFactory; use log; use msg::constellation_msg::{PanicMsg, PipelineId}; use net_traits::image_cache_thread::UsePlaceholder; use net_traits::image_cache_thread::{ImageCacheChan, ImageCacheResult, ImageCacheThread}; use parallel; use profile_traits::mem::{self, Report, ReportKind, ReportsChan}; use profile_traits::time::{TimerMetadataFrameType, TimerMetadataReflowType}; use profile_traits::time::{self, TimerMetadata, profile}; use query::process_offset_parent_query; use query::{LayoutRPCImpl, process_content_box_request, process_content_boxes_request}; use query::{process_node_geometry_request, process_node_layer_id_request, process_node_scroll_area_request}; use query::{process_node_overflow_request, process_resolved_style_request, process_margin_style_query}; use script::layout_interface::OpaqueStyleAndLayoutData; use script::layout_interface::{LayoutRPC, OffsetParentResponse, NodeOverflowResponse, MarginStyleResponse}; use script::layout_interface::{Msg, NewLayoutThreadInfo, Reflow, ReflowQueryType, ScriptReflow}; use script::reporter::CSSErrorReporter; use script_traits::{ConstellationControlMsg, LayoutControlMsg, LayoutMsg as ConstellationMsg}; use script_traits::{StackingContextScrollState, UntrustedNodeAddress}; use sequential; use serde_json; use std::borrow::ToOwned; use std::collections::HashMap; use std::hash::BuildHasherDefault; use std::ops::{Deref, DerefMut}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::mpsc::{channel, Sender, Receiver}; use std::sync::{Arc, Mutex, MutexGuard, RwLock}; use style::animation::Animation; use style::computed_values::{filter, mix_blend_mode}; use style::context::ReflowGoal; use style::dom::{TDocument, TElement, TNode}; use style::error_reporting::ParseErrorReporter; use style::logical_geometry::LogicalPoint; use style::media_queries::{Device, MediaType}; use style::parallel::WorkQueueData; use style::properties::ComputedValues; use style::selector_matching::USER_OR_USER_AGENT_STYLESHEETS; use style::servo::{SharedStyleContext, Stylesheet, Stylist}; use style::stylesheets::CSSRuleIteratorExt; use traversal::RecalcStyleAndConstructFlows; use url::Url; use util::geometry::MAX_RECT; use util::ipc::OptionalIpcSender; use util::opts; use util::thread; use util::thread_state; use util::workqueue::WorkQueue; use webrender_helpers::{WebRenderDisplayListConverter, WebRenderFrameBuilder}; use webrender_traits; use wrapper::{LayoutNode, NonOpaqueStyleAndLayoutData, ServoLayoutNode}; /// The number of screens of data we're allowed to generate display lists for in each direction. pub const DISPLAY_PORT_SIZE_FACTOR: i32 = 8; /// The number of screens we have to traverse before we decide to generate new display lists. const DISPLAY_PORT_THRESHOLD_SIZE_FACTOR: i32 = 4; /// Mutable data belonging to the LayoutThread. /// /// This needs to be protected by a mutex so we can do fast RPCs. pub struct LayoutThreadData { /// The channel on which messages can be sent to the constellation. pub constellation_chan: IpcSender<ConstellationMsg>, /// The root stacking context. pub display_list: Option<Arc<DisplayList>>, /// Performs CSS selector matching and style resolution. pub stylist: Arc<Stylist>, /// A queued response for the union of the content boxes of a node. pub content_box_response: Rect<Au>, /// A queued response for the content boxes of a node. pub content_boxes_response: Vec<Rect<Au>>, /// A queued response for the client {top, left, width, height} of a node in pixels. pub client_rect_response: Rect<i32>, pub layer_id_response: Option<LayerId>, /// A queued response for the node at a given point pub hit_test_response: (Option<DisplayItemMetadata>, bool), /// A pair of overflow property in x and y pub overflow_response: NodeOverflowResponse, /// A queued response for the scroll {top, left, width, height} of a node in pixels. pub scroll_area_response: Rect<i32>, /// A queued response for the resolved style property of an element. pub resolved_style_response: Option<String>, /// A queued response for the offset parent/rect of a node. pub offset_parent_response: OffsetParentResponse, /// A queued response for the offset parent/rect of a node. pub margin_style_response: MarginStyleResponse, /// Scroll offsets of stacking contexts. This will only be populated if WebRender is in use. pub stacking_context_scroll_offsets: ScrollOffsetMap, } /// Information needed by the layout thread. pub struct LayoutThread { /// The ID of the pipeline that we belong to. id: PipelineId, /// The URL of the pipeline that we belong to. url: Url, /// Is the current reflow of an iframe, as opposed to a root window? is_iframe: bool, /// The port on which we receive messages from the script thread. port: Receiver<Msg>, /// The port on which we receive messages from the constellation. pipeline_port: Receiver<LayoutControlMsg>, /// The port on which we receive messages from the image cache image_cache_receiver: Receiver<ImageCacheResult>, /// The channel on which the image cache can send messages to ourself. image_cache_sender: ImageCacheChan, /// The port on which we receive messages from the font cache thread. font_cache_receiver: Receiver<()>, /// The channel on which the font cache can send messages to us. font_cache_sender: IpcSender<()>, /// The channel on which messages can be sent to the constellation. constellation_chan: IpcSender<ConstellationMsg>, /// The channel on which messages can be sent to the script thread. script_chan: IpcSender<ConstellationControlMsg>, /// The channel on which messages can be sent to the painting thread. paint_chan: OptionalIpcSender<LayoutToPaintMsg>, /// The channel on which messages can be sent to the time profiler. time_profiler_chan: time::ProfilerChan, /// The channel on which messages can be sent to the memory profiler. mem_profiler_chan: mem::ProfilerChan, /// The channel on which messages can be sent to the image cache. image_cache_thread: ImageCacheThread, /// Public interface to the font cache thread. font_cache_thread: FontCacheThread, /// Is this the first reflow in this LayoutThread? first_reflow: bool, /// The workers that we use for parallel operation. parallel_traversal: Option<WorkQueue<SharedLayoutContext, WorkQueueData>>, /// Starts at zero, and increased by one every time a layout completes. /// This can be used to easily check for invalid stale data. generation: u32, /// A channel on which new animations that have been triggered by style recalculation can be /// sent. new_animations_sender: Sender<Animation>, /// Receives newly-discovered animations. new_animations_receiver: Receiver<Animation>, /// The number of Web fonts that have been requested but not yet loaded. outstanding_web_fonts: Arc<AtomicUsize>, /// The root of the flow tree. root_flow: Option<FlowRef>, /// The position and size of the visible rect for each layer. We do not build display lists /// for any areas more than `DISPLAY_PORT_SIZE_FACTOR` screens away from this area. visible_rects: Arc<HashMap<LayerId, Rect<Au>, BuildHasherDefault<FnvHasher>>>, /// The list of currently-running animations. running_animations: Arc<RwLock<HashMap<OpaqueNode, Vec<Animation>>>>, /// The list of animations that have expired since the last style recalculation. expired_animations: Arc<RwLock<HashMap<OpaqueNode, Vec<Animation>>>>, /// A counter for epoch messages epoch: Epoch, /// The size of the viewport. This may be different from the size of the screen due to viewport /// constraints. viewport_size: Size2D<Au>, /// A mutex to allow for fast, read-only RPC of layout's internal data /// structures, while still letting the LayoutThread modify them. /// /// All the other elements of this struct are read-only. rw_data: Arc<Mutex<LayoutThreadData>>, /// The CSS error reporter for all CSS loaded in this layout thread error_reporter: CSSErrorReporter, <|fim▁hole|> webrender_image_cache: Arc<RwLock<HashMap<(Url, UsePlaceholder), WebRenderImageInfo, BuildHasherDefault<FnvHasher>>>>, // Webrender interface, if enabled. webrender_api: Option<webrender_traits::RenderApi>, } impl LayoutThreadFactory for LayoutThread { type Message = Msg; /// Spawns a new layout thread. fn create(id: PipelineId, url: Url, is_iframe: bool, chan: (Sender<Msg>, Receiver<Msg>), pipeline_port: IpcReceiver<LayoutControlMsg>, constellation_chan: IpcSender<ConstellationMsg>, panic_chan: IpcSender<PanicMsg>, script_chan: IpcSender<ConstellationControlMsg>, paint_chan: OptionalIpcSender<LayoutToPaintMsg>, image_cache_thread: ImageCacheThread, font_cache_thread: FontCacheThread, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: mem::ProfilerChan, content_process_shutdown_chan: IpcSender<()>, webrender_api_sender: Option<webrender_traits::RenderApiSender>) { thread::spawn_named_with_send_on_panic(format!("LayoutThread {:?}", id), thread_state::LAYOUT, move || { { // Ensures layout thread is destroyed before we send shutdown message let sender = chan.0; let layout = LayoutThread::new(id, url, is_iframe, chan.1, pipeline_port, constellation_chan, script_chan, paint_chan, image_cache_thread, font_cache_thread, time_profiler_chan, mem_profiler_chan.clone(), webrender_api_sender); let reporter_name = format!("layout-reporter-{}", id); mem_profiler_chan.run_with_memory_reporting(|| { layout.start(); }, reporter_name, sender, Msg::CollectReports); } let _ = content_process_shutdown_chan.send(()); }, Some(id), panic_chan); } } /// The `LayoutThread` `rw_data` lock must remain locked until the first reflow, /// as RPC calls don't make sense until then. Use this in combination with /// `LayoutThread::lock_rw_data` and `LayoutThread::return_rw_data`. pub enum RWGuard<'a> { /// If the lock was previously held, from when the thread started. Held(MutexGuard<'a, LayoutThreadData>), /// If the lock was just used, and has been returned since there has been /// a reflow already. Used(MutexGuard<'a, LayoutThreadData>), } impl<'a> Deref for RWGuard<'a> { type Target = LayoutThreadData; fn deref(&self) -> &LayoutThreadData { match *self { RWGuard::Held(ref x) => &**x, RWGuard::Used(ref x) => &**x, } } } impl<'a> DerefMut for RWGuard<'a> { fn deref_mut(&mut self) -> &mut LayoutThreadData { match *self { RWGuard::Held(ref mut x) => &mut **x, RWGuard::Used(ref mut x) => &mut **x, } } } struct RwData<'a, 'b: 'a> { rw_data: &'b Arc<Mutex<LayoutThreadData>>, possibly_locked_rw_data: &'a mut Option<MutexGuard<'b, LayoutThreadData>>, } impl<'a, 'b: 'a> RwData<'a, 'b> { /// If no reflow has happened yet, this will just return the lock in /// `possibly_locked_rw_data`. Otherwise, it will acquire the `rw_data` lock. /// /// If you do not wish RPCs to remain blocked, just drop the `RWGuard` /// returned from this function. If you _do_ wish for them to remain blocked, /// use `block`. fn lock(&mut self) -> RWGuard<'b> { match self.possibly_locked_rw_data.take() { None => RWGuard::Used(self.rw_data.lock().unwrap()), Some(x) => RWGuard::Held(x), } } /// If no reflow has ever been triggered, this will keep the lock, locked /// (and saved in `possibly_locked_rw_data`). If it has been, the lock will /// be unlocked. fn block(&mut self, rw_data: RWGuard<'b>) { match rw_data { RWGuard::Used(x) => drop(x), RWGuard::Held(x) => *self.possibly_locked_rw_data = Some(x), } } } fn add_font_face_rules(stylesheet: &Stylesheet, device: &Device, font_cache_thread: &FontCacheThread, font_cache_sender: &IpcSender<()>, outstanding_web_fonts_counter: &Arc<AtomicUsize>) { if opts::get().load_webfonts_synchronously { let (sender, receiver) = ipc::channel().unwrap(); for font_face in stylesheet.effective_rules(&device).font_face() { let effective_sources = font_face.effective_sources(); font_cache_thread.add_web_font(font_face.family.clone(), effective_sources, sender.clone()); receiver.recv().unwrap(); } } else { for font_face in stylesheet.effective_rules(&device).font_face() { let effective_sources = font_face.effective_sources(); outstanding_web_fonts_counter.fetch_add(1, Ordering::SeqCst); font_cache_thread.add_web_font(font_face.family.clone(), effective_sources, (*font_cache_sender).clone()); } } } impl LayoutThread { /// Creates a new `LayoutThread` structure. fn new(id: PipelineId, url: Url, is_iframe: bool, port: Receiver<Msg>, pipeline_port: IpcReceiver<LayoutControlMsg>, constellation_chan: IpcSender<ConstellationMsg>, script_chan: IpcSender<ConstellationControlMsg>, paint_chan: OptionalIpcSender<LayoutToPaintMsg>, image_cache_thread: ImageCacheThread, font_cache_thread: FontCacheThread, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: mem::ProfilerChan, webrender_api_sender: Option<webrender_traits::RenderApiSender>) -> LayoutThread { let device = Device::new( MediaType::Screen, opts::get().initial_window_size.as_f32() * ScaleFactor::new(1.0)); let parallel_traversal = if opts::get().layout_threads != 1 { Some(WorkQueue::new("LayoutWorker", thread_state::LAYOUT, opts::get().layout_threads)) } else { None }; // Create the channel on which new animations can be sent. let (new_animations_sender, new_animations_receiver) = channel(); // Proxy IPC messages from the pipeline to the layout thread. let pipeline_receiver = ROUTER.route_ipc_receiver_to_new_mpsc_receiver(pipeline_port); // Ask the router to proxy IPC messages from the image cache thread to the layout thread. let (ipc_image_cache_sender, ipc_image_cache_receiver) = ipc::channel().unwrap(); let image_cache_receiver = ROUTER.route_ipc_receiver_to_new_mpsc_receiver(ipc_image_cache_receiver); // Ask the router to proxy IPC messages from the font cache thread to the layout thread. let (ipc_font_cache_sender, ipc_font_cache_receiver) = ipc::channel().unwrap(); let font_cache_receiver = ROUTER.route_ipc_receiver_to_new_mpsc_receiver(ipc_font_cache_receiver); let stylist = Arc::new(Stylist::new(device)); let outstanding_web_fonts_counter = Arc::new(AtomicUsize::new(0)); for stylesheet in &*USER_OR_USER_AGENT_STYLESHEETS { add_font_face_rules(stylesheet, &stylist.device, &font_cache_thread, &ipc_font_cache_sender, &outstanding_web_fonts_counter); } LayoutThread { id: id, url: url, is_iframe: is_iframe, port: port, pipeline_port: pipeline_receiver, script_chan: script_chan.clone(), constellation_chan: constellation_chan.clone(), paint_chan: paint_chan, time_profiler_chan: time_profiler_chan, mem_profiler_chan: mem_profiler_chan, image_cache_thread: image_cache_thread, font_cache_thread: font_cache_thread, first_reflow: true, image_cache_receiver: image_cache_receiver, image_cache_sender: ImageCacheChan(ipc_image_cache_sender), font_cache_receiver: font_cache_receiver, font_cache_sender: ipc_font_cache_sender, parallel_traversal: parallel_traversal, generation: 0, new_animations_sender: new_animations_sender, new_animations_receiver: new_animations_receiver, outstanding_web_fonts: outstanding_web_fonts_counter, root_flow: None, visible_rects: Arc::new(HashMap::with_hasher(Default::default())), running_animations: Arc::new(RwLock::new(HashMap::new())), expired_animations: Arc::new(RwLock::new(HashMap::new())), epoch: Epoch(0), viewport_size: Size2D::new(Au(0), Au(0)), webrender_api: webrender_api_sender.map(|wr| wr.create_api()), rw_data: Arc::new(Mutex::new( LayoutThreadData { constellation_chan: constellation_chan, display_list: None, stylist: stylist, content_box_response: Rect::zero(), content_boxes_response: Vec::new(), client_rect_response: Rect::zero(), layer_id_response: None, hit_test_response: (None, false), scroll_area_response: Rect::zero(), overflow_response: NodeOverflowResponse(None), resolved_style_response: None, offset_parent_response: OffsetParentResponse::empty(), margin_style_response: MarginStyleResponse::empty(), stacking_context_scroll_offsets: HashMap::new(), })), error_reporter: CSSErrorReporter { pipelineid: id, script_chan: Arc::new(Mutex::new(script_chan)), }, webrender_image_cache: Arc::new(RwLock::new(HashMap::with_hasher(Default::default()))), } } /// Starts listening on the port. fn start(mut self) { let rw_data = self.rw_data.clone(); let mut possibly_locked_rw_data = Some(rw_data.lock().unwrap()); let mut rw_data = RwData { rw_data: &rw_data, possibly_locked_rw_data: &mut possibly_locked_rw_data, }; while self.handle_request(&mut rw_data) { // Loop indefinitely. } } // Create a layout context for use in building display lists, hit testing, &c. fn build_shared_layout_context(&self, rw_data: &LayoutThreadData, screen_size_changed: bool, goal: ReflowGoal) -> SharedLayoutContext { SharedLayoutContext { style_context: SharedStyleContext { viewport_size: self.viewport_size.clone(), screen_size_changed: screen_size_changed, stylist: rw_data.stylist.clone(), generation: self.generation, goal: goal, new_animations_sender: Mutex::new(self.new_animations_sender.clone()), running_animations: self.running_animations.clone(), expired_animations: self.expired_animations.clone(), error_reporter: self.error_reporter.clone(), }, image_cache_thread: self.image_cache_thread.clone(), image_cache_sender: Mutex::new(self.image_cache_sender.clone()), font_cache_thread: Mutex::new(self.font_cache_thread.clone()), visible_rects: self.visible_rects.clone(), webrender_image_cache: self.webrender_image_cache.clone(), } } /// Receives and dispatches messages from the script and constellation threads fn handle_request<'a, 'b>(&mut self, possibly_locked_rw_data: &mut RwData<'a, 'b>) -> bool { enum Request { FromPipeline(LayoutControlMsg), FromScript(Msg), FromImageCache, FromFontCache, } let request = { let port_from_script = &self.port; let port_from_pipeline = &self.pipeline_port; let port_from_image_cache = &self.image_cache_receiver; let port_from_font_cache = &self.font_cache_receiver; select! { msg = port_from_pipeline.recv() => { Request::FromPipeline(msg.unwrap()) }, msg = port_from_script.recv() => { Request::FromScript(msg.unwrap()) }, msg = port_from_image_cache.recv() => { msg.unwrap(); Request::FromImageCache }, msg = port_from_font_cache.recv() => { msg.unwrap(); Request::FromFontCache } } }; match request { Request::FromPipeline(LayoutControlMsg::SetVisibleRects(new_visible_rects)) => { self.handle_request_helper(Msg::SetVisibleRects(new_visible_rects), possibly_locked_rw_data) }, Request::FromPipeline(LayoutControlMsg::SetStackingContextScrollStates( new_scroll_states)) => { self.handle_request_helper(Msg::SetStackingContextScrollStates(new_scroll_states), possibly_locked_rw_data) }, Request::FromPipeline(LayoutControlMsg::TickAnimations) => { self.handle_request_helper(Msg::TickAnimations, possibly_locked_rw_data) }, Request::FromPipeline(LayoutControlMsg::GetCurrentEpoch(sender)) => { self.handle_request_helper(Msg::GetCurrentEpoch(sender), possibly_locked_rw_data) }, Request::FromPipeline(LayoutControlMsg::GetWebFontLoadState(sender)) => { self.handle_request_helper(Msg::GetWebFontLoadState(sender), possibly_locked_rw_data) }, Request::FromPipeline(LayoutControlMsg::ExitNow) => { self.handle_request_helper(Msg::ExitNow, possibly_locked_rw_data) }, Request::FromScript(msg) => { self.handle_request_helper(msg, possibly_locked_rw_data) }, Request::FromImageCache => { self.repaint(possibly_locked_rw_data) }, Request::FromFontCache => { let _rw_data = possibly_locked_rw_data.lock(); self.outstanding_web_fonts.fetch_sub(1, Ordering::SeqCst); font_context::invalidate_font_caches(); self.script_chan.send(ConstellationControlMsg::WebFontLoaded(self.id)).unwrap(); true }, } } /// Repaint the scene, without performing style matching. This is typically /// used when an image arrives asynchronously and triggers a relayout and /// repaint. /// TODO: In the future we could detect if the image size hasn't changed /// since last time and avoid performing a complete layout pass. fn repaint<'a, 'b>(&mut self, possibly_locked_rw_data: &mut RwData<'a, 'b>) -> bool { let mut rw_data = possibly_locked_rw_data.lock(); if let Some(mut root_flow) = self.root_flow.clone() { let flow = flow::mut_base(flow_ref::deref_mut(&mut root_flow)); flow.restyle_damage.insert(REPAINT); } let reflow_info = Reflow { goal: ReflowGoal::ForDisplay, page_clip_rect: MAX_RECT, }; let mut layout_context = self.build_shared_layout_context(&*rw_data, false, reflow_info.goal); self.perform_post_style_recalc_layout_passes(&reflow_info, &mut *rw_data, &mut layout_context); true } /// Receives and dispatches messages from other threads. fn handle_request_helper<'a, 'b>(&mut self, request: Msg, possibly_locked_rw_data: &mut RwData<'a, 'b>) -> bool { match request { Msg::AddStylesheet(style_info) => { self.handle_add_stylesheet(style_info, possibly_locked_rw_data) } Msg::SetQuirksMode => self.handle_set_quirks_mode(possibly_locked_rw_data), Msg::GetRPC(response_chan) => { response_chan.send(box LayoutRPCImpl(self.rw_data.clone()) as Box<LayoutRPC + Send>).unwrap(); }, Msg::Reflow(data) => { profile(time::ProfilerCategory::LayoutPerform, self.profiler_metadata(), self.time_profiler_chan.clone(), || self.handle_reflow(&data, possibly_locked_rw_data)); }, Msg::TickAnimations => self.tick_all_animations(possibly_locked_rw_data), Msg::ReflowWithNewlyLoadedWebFont => { self.reflow_with_newly_loaded_web_font(possibly_locked_rw_data) } Msg::SetVisibleRects(new_visible_rects) => { self.set_visible_rects(new_visible_rects, possibly_locked_rw_data); } Msg::SetStackingContextScrollStates(new_scroll_states) => { self.set_stacking_context_scroll_states(new_scroll_states, possibly_locked_rw_data); } Msg::ReapStyleAndLayoutData(dead_data) => { unsafe { self.handle_reap_style_and_layout_data(dead_data) } } Msg::CollectReports(reports_chan) => { self.collect_reports(reports_chan, possibly_locked_rw_data); }, Msg::GetCurrentEpoch(sender) => { let _rw_data = possibly_locked_rw_data.lock(); sender.send(self.epoch).unwrap(); }, Msg::GetWebFontLoadState(sender) => { let _rw_data = possibly_locked_rw_data.lock(); let outstanding_web_fonts = self.outstanding_web_fonts.load(Ordering::SeqCst); sender.send(outstanding_web_fonts != 0).unwrap(); }, Msg::CreateLayoutThread(info) => { self.create_layout_thread(info) } Msg::SetFinalUrl(final_url) => { self.url = final_url; }, Msg::PrepareToExit(response_chan) => { self.prepare_to_exit(response_chan); return false }, Msg::ExitNow => { debug!("layout: ExitNow received"); self.exit_now(); return false } } true } fn collect_reports<'a, 'b>(&self, reports_chan: ReportsChan, possibly_locked_rw_data: &mut RwData<'a, 'b>) { let mut reports = vec![]; // FIXME(njn): Just measuring the display tree for now. let rw_data = possibly_locked_rw_data.lock(); let display_list = rw_data.display_list.as_ref(); let formatted_url = &format!("url({})", self.url); reports.push(Report { path: path![formatted_url, "layout-thread", "display-list"], kind: ReportKind::ExplicitJemallocHeapSize, size: display_list.map_or(0, |sc| sc.heap_size_of_children()), }); let stylist = rw_data.stylist.as_ref(); reports.push(Report { path: path![formatted_url, "layout-thread", "stylist"], kind: ReportKind::ExplicitJemallocHeapSize, size: stylist.heap_size_of_children(), }); // The LayoutThread has a context in TLS... reports.push(Report { path: path![formatted_url, "layout-thread", "local-context"], kind: ReportKind::ExplicitJemallocHeapSize, size: heap_size_of_local_context(), }); // ... as do each of the LayoutWorkers, if present. if let Some(ref traversal) = self.parallel_traversal { let sizes = traversal.heap_size_of_tls(heap_size_of_local_context); for (i, size) in sizes.iter().enumerate() { reports.push(Report { path: path![formatted_url, format!("layout-worker-{}-local-context", i)], kind: ReportKind::ExplicitJemallocHeapSize, size: *size, }); } } reports_chan.send(reports); } fn create_layout_thread(&self, info: NewLayoutThreadInfo) { LayoutThread::create(info.id, info.url.clone(), info.is_parent, info.layout_pair, info.pipeline_port, info.constellation_chan, info.panic_chan, info.script_chan.clone(), info.paint_chan.to::<LayoutToPaintMsg>(), self.image_cache_thread.clone(), self.font_cache_thread.clone(), self.time_profiler_chan.clone(), self.mem_profiler_chan.clone(), info.content_process_shutdown_chan, self.webrender_api.as_ref().map(|wr| wr.clone_sender())); } /// Enters a quiescent state in which no new messages will be processed until an `ExitNow` is /// received. A pong is immediately sent on the given response channel. fn prepare_to_exit(&mut self, response_chan: Sender<()>) { response_chan.send(()).unwrap(); loop { match self.port.recv().unwrap() { Msg::ReapStyleAndLayoutData(dead_data) => { unsafe { self.handle_reap_style_and_layout_data(dead_data) } } Msg::ExitNow => { debug!("layout thread is exiting..."); self.exit_now(); break } Msg::CollectReports(_) => { // Just ignore these messages at this point. } _ => { panic!("layout: unexpected message received after `PrepareToExitMsg`") } } } } /// Shuts down the layout thread now. If there are any DOM nodes left, layout will now (safely) /// crash. fn exit_now(&mut self) { if let Some(ref mut traversal) = self.parallel_traversal { traversal.shutdown() } self.paint_chan.send(LayoutToPaintMsg::Exit).unwrap(); } fn handle_add_stylesheet<'a, 'b>(&self, stylesheet: Arc<Stylesheet>, possibly_locked_rw_data: &mut RwData<'a, 'b>) { // Find all font-face rules and notify the font cache of them. // GWTODO: Need to handle unloading web fonts. let rw_data = possibly_locked_rw_data.lock(); if stylesheet.is_effective_for_device(&rw_data.stylist.device) { add_font_face_rules(&*stylesheet, &rw_data.stylist.device, &self.font_cache_thread, &self.font_cache_sender, &self.outstanding_web_fonts); } possibly_locked_rw_data.block(rw_data); } /// Sets quirks mode for the document, causing the quirks mode stylesheet to be used. fn handle_set_quirks_mode<'a, 'b>(&self, possibly_locked_rw_data: &mut RwData<'a, 'b>) { let mut rw_data = possibly_locked_rw_data.lock(); Arc::get_mut(&mut rw_data.stylist).unwrap().set_quirks_mode(true); possibly_locked_rw_data.block(rw_data); } fn try_get_layout_root<N: LayoutNode>(&self, node: N) -> Option<FlowRef> { let mut data = match node.mutate_layout_data() { Some(x) => x, None => return None, }; let result = data.flow_construction_result.swap_out(); let mut flow = match result { ConstructionResult::Flow(mut flow, abs_descendants) => { // Note: Assuming that the root has display 'static' (as per // CSS Section 9.3.1). Otherwise, if it were absolutely // positioned, it would return a reference to itself in // `abs_descendants` and would lead to a circular reference. // Set Root as CB for any remaining absolute descendants. flow.set_absolute_descendants(abs_descendants); flow } _ => return None, }; flow_ref::deref_mut(&mut flow).mark_as_root(); Some(flow) } /// Performs layout constraint solving. /// /// This corresponds to `Reflow()` in Gecko and `layout()` in WebKit/Blink and should be /// benchmarked against those two. It is marked `#[inline(never)]` to aid profiling. #[inline(never)] fn solve_constraints(layout_root: &mut FlowRef, shared_layout_context: &SharedLayoutContext) { let _scope = layout_debug_scope!("solve_constraints"); sequential::traverse_flow_tree_preorder(layout_root, shared_layout_context); } /// Performs layout constraint solving in parallel. /// /// This corresponds to `Reflow()` in Gecko and `layout()` in WebKit/Blink and should be /// benchmarked against those two. It is marked `#[inline(never)]` to aid profiling. #[inline(never)] fn solve_constraints_parallel(traversal: &mut WorkQueue<SharedLayoutContext, WorkQueueData>, layout_root: &mut FlowRef, profiler_metadata: Option<TimerMetadata>, time_profiler_chan: time::ProfilerChan, shared_layout_context: &SharedLayoutContext) { let _scope = layout_debug_scope!("solve_constraints_parallel"); // NOTE: this currently computes borders, so any pruning should separate that // operation out. parallel::traverse_flow_tree_preorder(layout_root, profiler_metadata, time_profiler_chan, shared_layout_context, traversal); } fn compute_abs_pos_and_build_display_list(&mut self, data: &Reflow, layout_root: &mut FlowRef, shared_layout_context: &mut SharedLayoutContext, rw_data: &mut LayoutThreadData) { let writing_mode = flow::base(&**layout_root).writing_mode; let (metadata, sender) = (self.profiler_metadata(), self.time_profiler_chan.clone()); profile(time::ProfilerCategory::LayoutDispListBuild, metadata.clone(), sender.clone(), || { flow::mut_base(flow_ref::deref_mut(layout_root)).stacking_relative_position = LogicalPoint::zero(writing_mode).to_physical(writing_mode, self.viewport_size); flow::mut_base(flow_ref::deref_mut(layout_root)).clip = ClippingRegion::from_rect(&data.page_clip_rect); if flow::base(&**layout_root).restyle_damage.contains(REPAINT) || rw_data.display_list.is_none() { let mut root_stacking_context = StackingContext::new(StackingContextId::new(0), StackingContextType::Real, &Rect::zero(), &Rect::zero(), 0, filter::T::new(Vec::new()), mix_blend_mode::T::normal, Matrix4D::identity(), Matrix4D::identity(), true, false, None); let display_list_entries = sequential::build_display_list_for_subtree(layout_root, &mut root_stacking_context, shared_layout_context); debug!("Done building display list."); let root_background_color = get_root_flow_background_color( flow_ref::deref_mut(layout_root)); let root_size = { let root_flow = flow::base(&**layout_root); if rw_data.stylist.viewport_constraints().is_some() { root_flow.position.size.to_physical(root_flow.writing_mode) } else { root_flow.overflow.scroll.size } }; let origin = Rect::new(Point2D::new(Au(0), Au(0)), root_size); root_stacking_context.bounds = origin; root_stacking_context.overflow = origin; root_stacking_context.layer_info = Some(LayerInfo::new(layout_root.layer_id(), ScrollPolicy::Scrollable, None, root_background_color)); rw_data.display_list = Some(Arc::new(DisplayList::new(root_stacking_context, &mut Some(display_list_entries)))) } if data.goal == ReflowGoal::ForDisplay { let display_list = (*rw_data.display_list.as_ref().unwrap()).clone(); if opts::get().dump_display_list { display_list.print(); } if opts::get().dump_display_list_json { println!("{}", serde_json::to_string_pretty(&display_list).unwrap()); } debug!("Layout done!"); self.epoch.next(); if opts::get().use_webrender { // TODO: Avoid the temporary conversion and build webrender sc/dl directly! let Epoch(epoch_number) = self.epoch; let epoch = webrender_traits::Epoch(epoch_number); let pipeline_id = self.id.to_webrender(); // TODO(gw) For now only create a root scrolling layer! let mut frame_builder = WebRenderFrameBuilder::new(pipeline_id); let root_scroll_layer_id = frame_builder.next_scroll_layer_id(); let sc_id = rw_data.display_list.as_ref().unwrap().convert_to_webrender( &mut self.webrender_api.as_mut().unwrap(), pipeline_id, epoch, Some(root_scroll_layer_id), &mut frame_builder); let root_background_color = get_root_flow_background_color( flow_ref::deref_mut(layout_root)); let root_background_color = webrender_traits::ColorF::new(root_background_color.r, root_background_color.g, root_background_color.b, root_background_color.a); let viewport_size = Size2D::new(self.viewport_size.width.to_f32_px(), self.viewport_size.height.to_f32_px()); let api = self.webrender_api.as_ref().unwrap(); api.set_root_stacking_context(sc_id, root_background_color, epoch, pipeline_id, viewport_size, frame_builder.stacking_contexts, frame_builder.display_lists, frame_builder.auxiliary_lists_builder .finalize()); } else { self.paint_chan .send(LayoutToPaintMsg::PaintInit(self.epoch, display_list)) .unwrap(); } } }); } /// The high-level routine that performs layout threads. fn handle_reflow<'a, 'b>(&mut self, data: &ScriptReflow, possibly_locked_rw_data: &mut RwData<'a, 'b>) { let document = unsafe { ServoLayoutNode::new(&data.document) }; let document = document.as_document().unwrap(); debug!("layout: received layout request for: {}", self.url); let mut rw_data = possibly_locked_rw_data.lock(); let node: ServoLayoutNode = match document.root_node() { None => { // Since we cannot compute anything, give spec-required placeholders. debug!("layout: No root node: bailing"); match data.query_type { ReflowQueryType::ContentBoxQuery(_) => { rw_data.content_box_response = Rect::zero(); }, ReflowQueryType::ContentBoxesQuery(_) => { rw_data.content_boxes_response = Vec::new(); }, ReflowQueryType::HitTestQuery(_, _) => { rw_data.hit_test_response = (None, false); }, ReflowQueryType::NodeGeometryQuery(_) => { rw_data.client_rect_response = Rect::zero(); }, ReflowQueryType::NodeLayerIdQuery(_) => { rw_data.layer_id_response = None; }, ReflowQueryType::NodeScrollGeometryQuery(_) => { rw_data.scroll_area_response = Rect::zero(); }, ReflowQueryType::NodeOverflowQuery(_) => { rw_data.overflow_response = NodeOverflowResponse(None); }, ReflowQueryType::ResolvedStyleQuery(_, _, _) => { rw_data.resolved_style_response = None; }, ReflowQueryType::OffsetParentQuery(_) => { rw_data.offset_parent_response = OffsetParentResponse::empty(); }, ReflowQueryType::MarginStyleQuery(_) => { rw_data.margin_style_response = MarginStyleResponse::empty(); }, ReflowQueryType::NoQuery => {} } return; }, Some(x) => x, }; debug!("layout: received layout request for: {}", self.url); if log_enabled!(log::LogLevel::Debug) { node.dump(); } let initial_viewport = data.window_size.initial_viewport; let old_viewport_size = self.viewport_size; let current_screen_size = Size2D::new(Au::from_f32_px(initial_viewport.width.get()), Au::from_f32_px(initial_viewport.height.get())); // Calculate the actual viewport as per DEVICE-ADAPT § 6 let device = Device::new(MediaType::Screen, initial_viewport); Arc::get_mut(&mut rw_data.stylist).unwrap().set_device(device, &data.document_stylesheets); let constraints = rw_data.stylist.viewport_constraints().clone(); self.viewport_size = match constraints { Some(ref constraints) => { debug!("Viewport constraints: {:?}", constraints); // other rules are evaluated against the actual viewport Size2D::new(Au::from_f32_px(constraints.size.width.get()), Au::from_f32_px(constraints.size.height.get())) } None => current_screen_size, }; // Handle conditions where the entire flow tree is invalid. let mut needs_dirtying = false; let viewport_size_changed = self.viewport_size != old_viewport_size; if viewport_size_changed { if let Some(constraints) = constraints { // let the constellation know about the viewport constraints rw_data.constellation_chan .send(ConstellationMsg::ViewportConstrained(self.id, constraints)) .unwrap(); } // FIXME (#10104): Only dirty nodes affected by vh/vw/vmin/vmax styles. if data.document_stylesheets.iter().any(|sheet| sheet.dirty_on_viewport_size_change) { needs_dirtying = true; } } // If the entire flow tree is invalid, then it will be reflowed anyhow. needs_dirtying |= Arc::get_mut(&mut rw_data.stylist).unwrap().update(&data.document_stylesheets, data.stylesheets_changed); let needs_reflow = viewport_size_changed && !needs_dirtying; unsafe { if needs_dirtying { LayoutThread::dirty_all_nodes(node); } } if needs_reflow { if let Some(mut flow) = self.try_get_layout_root(node) { LayoutThread::reflow_all_nodes(flow_ref::deref_mut(&mut flow)); } } let modified_elements = document.drain_modified_elements(); if !needs_dirtying { for (el, snapshot) in modified_elements { let hint = rw_data.stylist.compute_restyle_hint(&el, &snapshot, el.get_state()); el.note_restyle_hint(hint); } } // Create a layout context for use throughout the following passes. let mut shared_layout_context = self.build_shared_layout_context(&*rw_data, viewport_size_changed, data.reflow_info.goal); if node.is_dirty() || node.has_dirty_descendants() { // Recalculate CSS styles and rebuild flows and fragments. profile(time::ProfilerCategory::LayoutStyleRecalc, self.profiler_metadata(), self.time_profiler_chan.clone(), || { // Perform CSS selector matching and flow construction. match self.parallel_traversal { None => { sequential::traverse_dom::<ServoLayoutNode, RecalcStyleAndConstructFlows>( node, &shared_layout_context); } Some(ref mut traversal) => { parallel::traverse_dom::<ServoLayoutNode, RecalcStyleAndConstructFlows>( node, &shared_layout_context, traversal); } } }); // TODO(pcwalton): Measure energy usage of text shaping, perhaps? let text_shaping_time = (font::get_and_reset_text_shaping_performance_counter() as u64) / (opts::get().layout_threads as u64); time::send_profile_data(time::ProfilerCategory::LayoutTextShaping, self.profiler_metadata(), self.time_profiler_chan.clone(), 0, text_shaping_time, 0, 0); // Retrieve the (possibly rebuilt) root flow. self.root_flow = self.try_get_layout_root(node); } // Perform post-style recalculation layout passes. self.perform_post_style_recalc_layout_passes(&data.reflow_info, &mut rw_data, &mut shared_layout_context); if let Some(mut root_flow) = self.root_flow.clone() { match data.query_type { ReflowQueryType::ContentBoxQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.content_box_response = process_content_box_request(node, &mut root_flow); }, ReflowQueryType::ContentBoxesQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.content_boxes_response = process_content_boxes_request(node, &mut root_flow); }, ReflowQueryType::HitTestQuery(point, update_cursor) => { let point = Point2D::new(Au::from_f32_px(point.x), Au::from_f32_px(point.y)); let result = match rw_data.display_list { None => panic!("Tried to hit test with no display list"), Some(ref display_list) => { display_list.hit_test(&point, &rw_data.stacking_context_scroll_offsets) } }; rw_data.hit_test_response = if result.len() > 0 { (Some(result[0]), update_cursor) } else { (None, update_cursor) }; }, ReflowQueryType::NodeGeometryQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.client_rect_response = process_node_geometry_request(node, &mut root_flow); }, ReflowQueryType::NodeScrollGeometryQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.scroll_area_response = process_node_scroll_area_request(node, &mut root_flow); }, ReflowQueryType::NodeOverflowQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.overflow_response = process_node_overflow_request(node); }, ReflowQueryType::NodeLayerIdQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.layer_id_response = Some(process_node_layer_id_request(node)); }, ReflowQueryType::ResolvedStyleQuery(node, ref pseudo, ref property) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.resolved_style_response = process_resolved_style_request(node, pseudo, property, &mut root_flow); }, ReflowQueryType::OffsetParentQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.offset_parent_response = process_offset_parent_query(node, &mut root_flow); }, ReflowQueryType::MarginStyleQuery(node) => { let node = unsafe { ServoLayoutNode::new(&node) }; rw_data.margin_style_response = process_margin_style_query(node); }, ReflowQueryType::NoQuery => {} } } } fn set_visible_rects<'a, 'b>(&mut self, new_visible_rects: Vec<(LayerId, Rect<Au>)>, possibly_locked_rw_data: &mut RwData<'a, 'b>) -> bool { let mut rw_data = possibly_locked_rw_data.lock(); // First, determine if we need to regenerate the display lists. This will happen if the // layers have moved more than `DISPLAY_PORT_THRESHOLD_SIZE_FACTOR` away from their last // positions. let mut must_regenerate_display_lists = false; let mut old_visible_rects = HashMap::with_hasher(Default::default()); let inflation_amount = Size2D::new(self.viewport_size.width * DISPLAY_PORT_THRESHOLD_SIZE_FACTOR, self.viewport_size.height * DISPLAY_PORT_THRESHOLD_SIZE_FACTOR); for &(ref layer_id, ref new_visible_rect) in &new_visible_rects { match self.visible_rects.get(layer_id) { None => { old_visible_rects.insert(*layer_id, *new_visible_rect); } Some(old_visible_rect) => { old_visible_rects.insert(*layer_id, *old_visible_rect); if !old_visible_rect.inflate(inflation_amount.width, inflation_amount.height) .intersects(new_visible_rect) { must_regenerate_display_lists = true; } } } } if !must_regenerate_display_lists { // Update `visible_rects` in case there are new layers that were discovered. self.visible_rects = Arc::new(old_visible_rects); return true } debug!("regenerating display lists!"); for &(ref layer_id, ref new_visible_rect) in &new_visible_rects { old_visible_rects.insert(*layer_id, *new_visible_rect); } self.visible_rects = Arc::new(old_visible_rects); // Regenerate the display lists. let reflow_info = Reflow { goal: ReflowGoal::ForDisplay, page_clip_rect: MAX_RECT, }; let mut layout_context = self.build_shared_layout_context(&*rw_data, false, reflow_info.goal); self.perform_post_main_layout_passes(&reflow_info, &mut *rw_data, &mut layout_context); true } fn set_stacking_context_scroll_states<'a, 'b>( &mut self, new_scroll_states: Vec<StackingContextScrollState>, possibly_locked_rw_data: &mut RwData<'a, 'b>) { let mut rw_data = possibly_locked_rw_data.lock(); let mut script_scroll_states = vec![]; let mut layout_scroll_states = HashMap::new(); for new_scroll_state in &new_scroll_states { let offset = new_scroll_state.scroll_offset; layout_scroll_states.insert(new_scroll_state.stacking_context_id, offset); if new_scroll_state.stacking_context_id == StackingContextId::root() { script_scroll_states.push((UntrustedNodeAddress::from_id(0), offset)) } else if !new_scroll_state.stacking_context_id.is_special() && new_scroll_state.stacking_context_id.fragment_type() == FragmentType::FragmentBody { let id = new_scroll_state.stacking_context_id.id(); script_scroll_states.push((UntrustedNodeAddress::from_id(id), offset)) } } let _ = self.script_chan .send(ConstellationControlMsg::SetScrollState(self.id, script_scroll_states)); rw_data.stacking_context_scroll_offsets = layout_scroll_states } fn tick_all_animations<'a, 'b>(&mut self, possibly_locked_rw_data: &mut RwData<'a, 'b>) { let mut rw_data = possibly_locked_rw_data.lock(); self.tick_animations(&mut rw_data); } pub fn tick_animations(&mut self, rw_data: &mut LayoutThreadData) { let reflow_info = Reflow { goal: ReflowGoal::ForDisplay, page_clip_rect: MAX_RECT, }; let mut layout_context = self.build_shared_layout_context(&*rw_data, false, reflow_info.goal); if let Some(mut root_flow) = self.root_flow.clone() { // Perform an abbreviated style recalc that operates without access to the DOM. let animations = self.running_animations.read().unwrap(); profile(time::ProfilerCategory::LayoutStyleRecalc, self.profiler_metadata(), self.time_profiler_chan.clone(), || { animation::recalc_style_for_animations(flow_ref::deref_mut(&mut root_flow), &*animations) }); } self.perform_post_style_recalc_layout_passes(&reflow_info, &mut *rw_data, &mut layout_context); } fn reflow_with_newly_loaded_web_font<'a, 'b>(&mut self, possibly_locked_rw_data: &mut RwData<'a, 'b>) { let mut rw_data = possibly_locked_rw_data.lock(); font_context::invalidate_font_caches(); let reflow_info = Reflow { goal: ReflowGoal::ForDisplay, page_clip_rect: MAX_RECT, }; let mut layout_context = self.build_shared_layout_context(&*rw_data, false, reflow_info.goal); // No need to do a style recalc here. if self.root_flow.is_none() { return } self.perform_post_style_recalc_layout_passes(&reflow_info, &mut *rw_data, &mut layout_context); } fn perform_post_style_recalc_layout_passes(&mut self, data: &Reflow, rw_data: &mut LayoutThreadData, layout_context: &mut SharedLayoutContext) { if let Some(mut root_flow) = self.root_flow.clone() { // Kick off animations if any were triggered, expire completed ones. animation::update_animation_state(&self.constellation_chan, &mut *self.running_animations.write().unwrap(), &mut *self.expired_animations.write().unwrap(), &self.new_animations_receiver, self.id); profile(time::ProfilerCategory::LayoutRestyleDamagePropagation, self.profiler_metadata(), self.time_profiler_chan.clone(), || { // Call `compute_layout_damage` even in non-incremental mode, because it sets flags // that are needed in both incremental and non-incremental traversals. let damage = flow_ref::deref_mut(&mut root_flow).compute_layout_damage(); if opts::get().nonincremental_layout || damage.contains(REFLOW_ENTIRE_DOCUMENT) { flow_ref::deref_mut(&mut root_flow).reflow_entire_document() } }); if opts::get().trace_layout { layout_debug::begin_trace(root_flow.clone()); } // Resolve generated content. profile(time::ProfilerCategory::LayoutGeneratedContent, self.profiler_metadata(), self.time_profiler_chan.clone(), || sequential::resolve_generated_content(&mut root_flow, &layout_context)); // Guess float placement. profile(time::ProfilerCategory::LayoutFloatPlacementSpeculation, self.profiler_metadata(), self.time_profiler_chan.clone(), || sequential::guess_float_placement(flow_ref::deref_mut(&mut root_flow))); // Perform the primary layout passes over the flow tree to compute the locations of all // the boxes. if flow::base(&*root_flow).restyle_damage.intersects(REFLOW | REFLOW_OUT_OF_FLOW) { profile(time::ProfilerCategory::LayoutMain, self.profiler_metadata(), self.time_profiler_chan.clone(), || { let profiler_metadata = self.profiler_metadata(); match self.parallel_traversal { None => { // Sequential mode. LayoutThread::solve_constraints(&mut root_flow, &layout_context) } Some(ref mut parallel) => { // Parallel mode. LayoutThread::solve_constraints_parallel(parallel, &mut root_flow, profiler_metadata, self.time_profiler_chan.clone(), &*layout_context); } } }); } profile(time::ProfilerCategory::LayoutStoreOverflow, self.profiler_metadata(), self.time_profiler_chan.clone(), || { let layout_context = LayoutContext::new(&*layout_context); sequential::store_overflow(&layout_context, flow_ref::deref_mut(&mut root_flow) as &mut Flow); }); self.perform_post_main_layout_passes(data, rw_data, layout_context); } } fn perform_post_main_layout_passes(&mut self, data: &Reflow, rw_data: &mut LayoutThreadData, layout_context: &mut SharedLayoutContext) { // Build the display list if necessary, and send it to the painter. if let Some(mut root_flow) = self.root_flow.clone() { self.compute_abs_pos_and_build_display_list(data, &mut root_flow, &mut *layout_context, rw_data); self.first_reflow = false; if opts::get().trace_layout { layout_debug::end_trace(); } if opts::get().dump_flow_tree { root_flow.print("Post layout flow tree".to_owned()); } self.generation += 1; } } unsafe fn dirty_all_nodes<N: LayoutNode>(node: N) { for node in node.traverse_preorder() { // TODO(cgaebel): mark nodes which are sensitive to media queries as // "changed": // > node.set_changed(true); node.set_dirty(true); node.set_dirty_descendants(true); } } fn reflow_all_nodes(flow: &mut Flow) { debug!("reflowing all nodes!"); flow::mut_base(flow).restyle_damage.insert(REPAINT | STORE_OVERFLOW | REFLOW); for child in flow::child_iter_mut(flow) { LayoutThread::reflow_all_nodes(child); } } /// Handles a message to destroy layout data. Layout data must be destroyed on *this* thread /// because the struct type is transmuted to a different type on the script side. unsafe fn handle_reap_style_and_layout_data(&self, data: OpaqueStyleAndLayoutData) { let ptr: *mut () = *data.ptr; let non_opaque: NonOpaqueStyleAndLayoutData = ptr as *mut _; let _ = Box::from_raw(non_opaque); } /// Returns profiling information which is passed to the time profiler. fn profiler_metadata(&self) -> Option<TimerMetadata> { Some(TimerMetadata { url: self.url.to_string(), iframe: if self.is_iframe { TimerMetadataFrameType::IFrame } else { TimerMetadataFrameType::RootWindow }, incremental: if self.first_reflow { TimerMetadataReflowType::FirstReflow } else { TimerMetadataReflowType::Incremental }, }) } } // The default computed value for background-color is transparent (see // http://dev.w3.org/csswg/css-backgrounds/#background-color). However, we // need to propagate the background color from the root HTML/Body // element (http://dev.w3.org/csswg/css-backgrounds/#special-backgrounds) if // it is non-transparent. The phrase in the spec "If the canvas background // is not opaque, what shows through is UA-dependent." is handled by rust-layers // clearing the frame buffer to white. This ensures that setting a background // color on an iframe element, while the iframe content itself has a default // transparent background color is handled correctly. fn get_root_flow_background_color(flow: &mut Flow) -> AzColor { if !flow.is_block_like() { return color::transparent() } let block_flow = flow.as_mut_block(); let kid = match block_flow.base.children.iter_mut().next() { None => return color::transparent(), Some(kid) => kid, }; if !kid.is_block_like() { return color::transparent() } let kid_block_flow = kid.as_block(); kid_block_flow.fragment .style .resolve_color(kid_block_flow.fragment.style.get_background().background_color) .to_gfx_color() }<|fim▁end|>
<|file_name|>ValuesSelectedEvent.java<|end_file_name|><|fim▁begin|>package com.smartgwt.mobile.client.internal.widgets.events; import com.google.gwt.event.shared.GwtEvent; import com.google.gwt.event.shared.HasHandlers;<|fim▁hole|> public class ValuesSelectedEvent extends GwtEvent<ValuesSelectedHandler> { private static Type<ValuesSelectedHandler> TYPE = null; public static Type<ValuesSelectedHandler> getType() { if (TYPE == null) TYPE = new Type<ValuesSelectedHandler>(); return TYPE; } public static <S extends HasValuesSelectedHandlers & HasHandlers> void fire(S source, Object[] values) { if (TYPE != null) { final ValuesSelectedEvent event = new ValuesSelectedEvent(values); source.fireEvent(event); } } private Object[] values; private ValuesSelectedEvent(Object[] values) { this.values = values; } public final Object[] getValues() { return values; } @Override public final Type<ValuesSelectedHandler> getAssociatedType() { return TYPE; } @Override protected void dispatch(ValuesSelectedHandler handler) { handler._onValuesSelected(this); } }<|fim▁end|>
<|file_name|>array.ts<|end_file_name|><|fim▁begin|>/*----------------------------------------------------------------------------- | Copyright (c) 2014-2017, PhosphorJS Contributors | | Distributed under the terms of the BSD 3-Clause License. | | The full license is in the file LICENSE, distributed with this software. |----------------------------------------------------------------------------*/ /** * The namespace for array-specific algorithms. */ export namespace ArrayExt { /** * Find the index of the first occurrence of a value in an array. * * @param array - The array-like object to search. * * @param value - The value to locate in the array. Values are * compared using strict `===` equality. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the first occurrence of the value, or `-1` * if the value is not found. * * #### Notes * If `stop < start` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = ['one', 'two', 'three', 'four', 'one']; * ArrayExt.firstIndexOf(data, 'red'); // -1 * ArrayExt.firstIndexOf(data, 'one'); // 0 * ArrayExt.firstIndexOf(data, 'one', 1); // 4 * ArrayExt.firstIndexOf(data, 'two', 2); // -1 * ArrayExt.firstIndexOf(data, 'two', 2, 1); // 1 * ``` */ export function firstIndexOf<T>(array: ArrayLike<T>, value: T, start = 0, stop = -1): number { let n = array.length; if (n === 0) { return -1; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let span: number; if (stop < start) { span = (stop + 1) + (n - start); } else { span = stop - start + 1; } for (let i = 0; i < span; ++i) { let j = (start + i) % n; if (array[j] === value) { return j; } } return -1; } /** * Find the index of the last occurrence of a value in an array. * * @param array - The array-like object to search. * * @param value - The value to locate in the array. Values are * compared using strict `===` equality. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the last occurrence of the value, or `-1` * if the value is not found. * * #### Notes * If `start < stop` the search will wrap at the front of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = ['one', 'two', 'three', 'four', 'one']; * ArrayExt.lastIndexOf(data, 'red'); // -1 * ArrayExt.lastIndexOf(data, 'one'); // 4 * ArrayExt.lastIndexOf(data, 'one', 1); // 0 * ArrayExt.lastIndexOf(data, 'two', 0); // -1 * ArrayExt.lastIndexOf(data, 'two', 0, 1); // 1 * ``` */ export function lastIndexOf<T>(array: ArrayLike<T>, value: T, start = -1, stop = 0): number { let n = array.length; if (n === 0) { return -1; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let span: number; if (start < stop) { span = (start + 1) + (n - stop); } else { span = start - stop + 1; } for (let i = 0; i < span; ++i) { let j = (start - i + n) % n; if (array[j] === value) { return j; } } return -1; } /** * Find the index of the first value which matches a predicate. * * @param array - The array-like object to search. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the first matching value, or `-1` if no * matching value is found. * * #### Notes * If `stop < start` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * Modifying the length of the array while searching. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * let data = [1, 2, 3, 4, 3, 2, 1]; * ArrayExt.findFirstIndex(data, isEven); // 1 * ArrayExt.findFirstIndex(data, isEven, 4); // 5 * ArrayExt.findFirstIndex(data, isEven, 6); // -1 * ArrayExt.findFirstIndex(data, isEven, 6, 5); // 1 * ``` */ export function findFirstIndex<T>(array: ArrayLike<T>, fn: (value: T, index: number) => boolean, start = 0, stop = -1): number { let n = array.length; if (n === 0) { return -1; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let span: number; if (stop < start) { span = (stop + 1) + (n - start); } else { span = stop - start + 1; } for (let i = 0; i < span; ++i) { let j = (start + i) % n; if (fn(array[j], j)) { return j; } } return -1; } /** * Find the index of the last value which matches a predicate. * * @param object - The array-like object to search. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the last matching value, or `-1` if no * matching value is found. * * #### Notes * If `start < stop` the search will wrap at the front of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * Modifying the length of the array while searching. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * let data = [1, 2, 3, 4, 3, 2, 1]; * ArrayExt.findLastIndex(data, isEven); // 5 * ArrayExt.findLastIndex(data, isEven, 4); // 3 * ArrayExt.findLastIndex(data, isEven, 0); // -1 * ArrayExt.findLastIndex(data, isEven, 0, 1); // 5 * ``` */ export function findLastIndex<T>(array: ArrayLike<T>, fn: (value: T, index: number) => boolean, start = -1, stop = 0): number { let n = array.length; if (n === 0) { return -1; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let d: number; if (start < stop) { d = (start + 1) + (n - stop); } else { d = start - stop + 1; } for (let i = 0; i < d; ++i) { let j = (start - i + n) % n; if (fn(array[j], j)) { return j; } } return -1; } /** * Find the first value which matches a predicate. * * @param array - The array-like object to search. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The first matching value, or `undefined` if no matching * value is found. * * #### Notes * If `stop < start` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * Modifying the length of the array while searching. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * let data = [1, 2, 3, 4, 3, 2, 1]; * ArrayExt.findFirstValue(data, isEven); // 2 * ArrayExt.findFirstValue(data, isEven, 2); // 4 * ArrayExt.findFirstValue(data, isEven, 6); // undefined * ArrayExt.findFirstValue(data, isEven, 6, 5); // 2 * ``` */ export function findFirstValue<T>(array: ArrayLike<T>, fn: (value: T, index: number) => boolean, start = 0, stop = -1): T | undefined { let index = findFirstIndex(array, fn, start, stop); return index !== -1 ? array[index] : undefined; } /** * Find the last value which matches a predicate. * * @param object - The array-like object to search. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @returns The last matching value, or `undefined` if no matching * value is found. * * #### Notes * If `start < stop` the search will wrap at the front of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * Modifying the length of the array while searching. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * let data = [1, 2, 3, 4, 3, 2, 1]; * ArrayExt.findLastValue(data, isEven); // 2 * ArrayExt.findLastValue(data, isEven, 4); // 4 * ArrayExt.findLastValue(data, isEven, 0); // undefined * ArrayExt.findLastValue(data, isEven, 0, 1); // 2 * ``` */ export function findLastValue<T>(array: ArrayLike<T>, fn: (value: T, index: number) => boolean, start = -1, stop = 0): T | undefined { let index = findLastIndex(array, fn, start, stop); return index !== -1 ? array[index] : undefined; } /** * Find the index of the first element which compares `>=` to a value. * * @param array - The sorted array-like object to search. * * @param value - The value to locate in the array. * * @param fn - The 3-way comparison function to apply to the values. * It should return `< 0` if an element is less than a value, `0` if * an element is equal to a value, or `> 0` if an element is greater * than a value. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the first element which compares `>=` to the * value, or `length` if there is no such element. If the computed * index for `stop` is less than `start`, then the computed index * for `start` is returned. * * #### Notes * The array must already be sorted in ascending order according to * the comparison function. * * #### Complexity * Logarithmic. * * #### Undefined Behavior * Searching a range which is not sorted in ascending order. * * A `start` or `stop` which is non-integral. * * Modifying the length of the array while searching. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function numberCmp(a: number, b: number): number { * return a - b; * } * * let data = [0, 3, 4, 7, 7, 9]; * ArrayExt.lowerBound(data, 0, numberCmp); // 0 * ArrayExt.lowerBound(data, 6, numberCmp); // 3 * ArrayExt.lowerBound(data, 7, numberCmp); // 3 * ArrayExt.lowerBound(data, -1, numberCmp); // 0 * ArrayExt.lowerBound(data, 10, numberCmp); // 6 * ``` */ export function lowerBound<T, U>(array: ArrayLike<T>, value: U, fn: (element: T, value: U) => number, start = 0, stop = -1): number { let n = array.length; if (n === 0) { return 0; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let begin = start; let span = stop - start + 1; while (span > 0) { let half = span >> 1; let middle = begin + half; if (fn(array[middle], value) < 0) { begin = middle + 1; span -= half + 1; } else { span = half; } } return begin; } /** * Find the index of the first element which compares `>` than a value. * * @param array - The sorted array-like object to search. * * @param value - The value to locate in the array. * * @param fn - The 3-way comparison function to apply to the values. * It should return `< 0` if an element is less than a value, `0` if * an element is equal to a value, or `> 0` if an element is greater * than a value. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the first element which compares `>` than the * value, or `length` if there is no such element. If the computed * index for `stop` is less than `start`, then the computed index * for `start` is returned. * * #### Notes * The array must already be sorted in ascending order according to * the comparison function. * * #### Complexity * Logarithmic. * * #### Undefined Behavior * Searching a range which is not sorted in ascending order. * * A `start` or `stop` which is non-integral. * * Modifying the length of the array while searching. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function numberCmp(a: number, b: number): number { * return a - b; * } * * let data = [0, 3, 4, 7, 7, 9]; * ArrayExt.upperBound(data, 0, numberCmp); // 1 * ArrayExt.upperBound(data, 6, numberCmp); // 3 * ArrayExt.upperBound(data, 7, numberCmp); // 5 * ArrayExt.upperBound(data, -1, numberCmp); // 0 * ArrayExt.upperBound(data, 10, numberCmp); // 6 * ``` */ export function upperBound<T, U>(array: ArrayLike<T>, value: U, fn: (element: T, value: U) => number, start = 0, stop = -1): number { let n = array.length; if (n === 0) { return 0; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let begin = start; let span = stop - start + 1; while (span > 0) { let half = span >> 1; let middle = begin + half; if (fn(array[middle], value) > 0) { span = half; } else { begin = middle + 1; span -= half + 1; } } return begin; } /** * Test whether two arrays are shallowly equal. * * @param a - The first array-like object to compare. * * @param b - The second array-like object to compare. * * @param fn - The comparison function to apply to the elements. It * should return `true` if the elements are "equal". The default * compares elements using strict `===` equality. * * @returns Whether the two arrays are shallowly equal. * * #### Complexity * Linear. * * #### Undefined Behavior * Modifying the length of the arrays while comparing. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let d1 = [0, 3, 4, 7, 7, 9]; * let d2 = [0, 3, 4, 7, 7, 9]; * let d3 = [42]; * ArrayExt.shallowEqual(d1, d2); // true * ArrayExt.shallowEqual(d2, d3); // false * ``` */ export function shallowEqual<T>(a: ArrayLike<T>, b: ArrayLike<T>, fn?: (a: T, b: T) => boolean): boolean { // Check for object identity first. if (a === b) { return true; } // Bail early if the lengths are different. if (a.length !== b.length) { return false; } // Compare each element for equality. for (let i = 0, n = a.length; i < n; ++i) { if (fn ? !fn(a[i], b[i]) : a[i] !== b[i]) { return false; } } // The array are shallowly equal. return true; } /** * Create a slice of an array subject to an optional step. * * @param array - The array-like object of interest. * * @param options - The options for configuring the slice. * * @returns A new array with the specified values. * * @throws An exception if the slice `step` is `0`. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start`, `stop`, or `step` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 3, 4, 7, 7, 9]; * ArrayExt.slice(data); // [0, 3, 4, 7, 7, 9] * ArrayExt.slice(data, { start: 2 }); // [4, 7, 7, 9] * ArrayExt.slice(data, { start: 0, stop: 4 }); // [0, 3, 4, 7] * ArrayExt.slice(data, { step: 2 }); // [0, 4, 7] * ArrayExt.slice(data, { step: -1 }); // [9, 7, 7, 4, 3, 0] * ``` */ export function slice<T>(array: ArrayLike<T>, options: slice.IOptions = {}): T[] { // Extract the options. let { start, stop, step } = options; // Set up the `step` value. if (step === undefined) { step = 1; } // Validate the step size. if (step === 0) { throw new Error('Slice `step` cannot be zero.'); } // Look up the length of the array. let n = array.length; // Set up the `start` value. if (start === undefined) { start = step < 0 ? n - 1 : 0; } else if (start < 0) { start = Math.max(start + n, step < 0 ? -1 : 0); } else if (start >= n) { start = step < 0 ? n - 1 : n; } // Set up the `stop` value. if (stop === undefined) { stop = step < 0 ? -1 : n; } else if (stop < 0) { stop = Math.max(stop + n, step < 0 ? -1 : 0); } else if (stop >= n) { stop = step < 0 ? n - 1 : n; } // Compute the slice length. let length; if ((step < 0 && stop >= start) || (step > 0 && start >= stop)) { length = 0; } else if (step < 0) { length = Math.floor((stop - start + 1) / step + 1); } else { length = Math.floor((stop - start - 1) / step + 1); } // Compute the sliced result. let result: T[] = []; for (let i = 0; i < length; ++i) { result[i] = array[start + i * step]; } // Return the result. return result; } /** * The namespace for the `slice` function statics. */ export namespace slice { /** * The options for the `slice` function. */ export interface IOptions { /** * The starting index of the slice, inclusive. * * Negative values are taken as an offset from the end * of the array. * * The default is `0` if `step > 0` else `n - 1`. */ start?: number; /** * The stopping index of the slice, exclusive. * * Negative values are taken as an offset from the end * of the array. * * The default is `n` if `step > 0` else `-n - 1`. */ stop?: number; /** * The step value for the slice. * * This must not be `0`. * * The default is `1`. */ step?: number; } } /** * An array-like object which supports item assignment. */ export type MutableArrayLike<T> = { readonly length: number; [index: number]: T; }; /** * Move an element in an array from one index to another. * * @param array - The mutable array-like object of interest. * * @param fromIndex - The index of the element to move. Negative * values are taken as an offset from the end of the array. * * @param toIndex - The target index of the element. Negative * values are taken as an offset from the end of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `fromIndex` or `toIndex` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from from '@phosphor/algorithm'; * * let data = [0, 1, 2, 3, 4]; * ArrayExt.move(data, 1, 2); // [0, 2, 1, 3, 4] * ArrayExt.move(data, 4, 2); // [0, 2, 4, 1, 3] * ``` */ export function move<T>(array: MutableArrayLike<T>, fromIndex: number, toIndex: number): void { let n = array.length; if (n <= 1) { return; } if (fromIndex < 0) { fromIndex = Math.max(0, fromIndex + n); } else { fromIndex = Math.min(fromIndex, n - 1); } if (toIndex < 0) { toIndex = Math.max(0, toIndex + n); } else { toIndex = Math.min(toIndex, n - 1); } if (fromIndex === toIndex) { return } let value = array[fromIndex]; let d = fromIndex < toIndex ? 1 : -1; for (let i = fromIndex; i !== toIndex; i += d) { array[i] = array[i + d]; } array[toIndex] = value; } /** * Reverse an array in-place. * * @param array - The mutable array-like object of interest. * * @param start - The index of the first element in the range to be * reversed, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * reversed, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` index which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 1, 2, 3, 4]; * ArrayExt.reverse(data, 1, 3); // [0, 3, 2, 1, 4] * ArrayExt.reverse(data, 3); // [0, 3, 2, 4, 1] * ArrayExt.reverse(data); // [1, 4, 2, 3, 0] * ``` */ export function reverse<T>(array: MutableArrayLike<T>, start = 0, stop = -1): void { let n = array.length; if (n <= 1) { return; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } while (start < stop) { let a = array[start]; let b = array[stop]; array[start++] = b; array[stop--] = a; } } /** * Rotate the elements of an array in-place. * * @param array - The mutable array-like object of interest. * * @param delta - The amount of rotation to apply to the elements. A * positive value will rotate the elements to the left. A negative * value will rotate the elements to the right. * * @param start - The index of the first element in the range to be * rotated, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * rotated, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * #### Complexity * Linear. *<|fim▁hole|> * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 1, 2, 3, 4]; * ArrayExt.rotate(data, 2); // [2, 3, 4, 0, 1] * ArrayExt.rotate(data, -2); // [0, 1, 2, 3, 4] * ArrayExt.rotate(data, 10); // [0, 1, 2, 3, 4] * ArrayExt.rotate(data, 9); // [4, 0, 1, 2, 3] * ArrayExt.rotate(data, 2, 1, 3); // [4, 2, 0, 1, 3] * ``` */ export function rotate<T>(array: MutableArrayLike<T>, delta: number, start = 0, stop = -1): void { let n = array.length; if (n <= 1) { return; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } if (start >= stop) { return; } let length = stop - start + 1; if (delta > 0) { delta = delta % length; } else if (delta < 0) { delta = ((delta % length) + length) % length; } if (delta === 0) { return; } let pivot = start + delta; reverse(array, start, pivot - 1); reverse(array, pivot, stop); reverse(array, start, stop); } /** * Fill an array with a static value. * * @param array - The mutable array-like object to fill. * * @param value - The static value to use to fill the array. * * @param start - The index of the first element in the range to be * filled, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * filled, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * #### Notes * If `stop < start` the fill will wrap at the end of the array. * * #### Complexity * Linear. * * #### Undefined Behavior * A `start` or `stop` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = ['one', 'two', 'three', 'four']; * ArrayExt.fill(data, 'r'); // ['r', 'r', 'r', 'r'] * ArrayExt.fill(data, 'g', 1); // ['r', 'g', 'g', 'g'] * ArrayExt.fill(data, 'b', 2, 3); // ['r', 'g', 'b', 'b'] * ArrayExt.fill(data, 'z', 3, 1); // ['z', 'z', 'b', 'z'] * ``` */ export function fill<T>(array: MutableArrayLike<T>, value: T, start = 0, stop = -1): void { let n = array.length; if (n === 0) { return; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let span: number; if (stop < start) { span = (stop + 1) + (n - start); } else { span = stop - start + 1; } for (let i = 0; i < span; ++i) { array[(start + i) % n] = value; } } /** * Insert a value into an array at a specific index. * * @param array - The array of interest. * * @param index - The index at which to insert the value. Negative * values are taken as an offset from the end of the array. * * @param value - The value to set at the specified index. * * #### Complexity * Linear. * * #### Undefined Behavior * An `index` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 1, 2]; * ArrayExt.insert(data, 0, -1); // [-1, 0, 1, 2] * ArrayExt.insert(data, 2, 12); // [-1, 0, 12, 1, 2] * ArrayExt.insert(data, -1, 7); // [-1, 0, 12, 1, 7, 2] * ArrayExt.insert(data, 6, 19); // [-1, 0, 12, 1, 7, 2, 19] * ``` */ export function insert<T>(array: Array<T>, index: number, value: T): void { let n = array.length; if (index < 0) { index = Math.max(0, index + n); } else { index = Math.min(index, n); } for (let i = n; i > index; --i) { array[i] = array[i - 1]; } array[index] = value; } /** * Remove and return a value at a specific index in an array. * * @param array - The array of interest. * * @param index - The index of the value to remove. Negative values * are taken as an offset from the end of the array. * * @returns The value at the specified index, or `undefined` if the * index is out of range. * * #### Complexity * Linear. * * #### Undefined Behavior * An `index` which is non-integral. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 12, 23, 39, 14, 12, 75]; * ArrayExt.removeAt(data, 2); // 23 * ArrayExt.removeAt(data, -2); // 12 * ArrayExt.removeAt(data, 10); // undefined; * ``` */ export function removeAt<T>(array: Array<T>, index: number): T | undefined { let n = array.length; if (index < 0) { index += n; } if (index < 0 || index >= n) { return undefined; } let value = array[index]; for (let i = index + 1; i < n; ++i) { array[i - 1] = array[i]; } array.length = n - 1; return value; } /** * Remove the first occurrence of a value from an array. * * @param array - The array of interest. * * @param value - The value to remove from the array. Values are * compared using strict `===` equality. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the removed value, or `-1` if the value * is not contained in the array. * * #### Notes * If `stop < start` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 12, 23, 39, 14, 12, 75]; * ArrayExt.removeFirstOf(data, 12); // 1 * ArrayExt.removeFirstOf(data, 17); // -1 * ArrayExt.removeFirstOf(data, 39, 3); // -1 * ArrayExt.removeFirstOf(data, 39, 3, 2); // 2 * ``` */ export function removeFirstOf<T>(array: Array<T>, value: T, start = 0, stop = -1): number { let index = firstIndexOf(array, value, start, stop); if (index !== -1) { removeAt(array, index); } return index; } /** * Remove the last occurrence of a value from an array. * * @param array - The array of interest. * * @param value - The value to remove from the array. Values are * compared using strict `===` equality. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @returns The index of the removed value, or `-1` if the value * is not contained in the array. * * #### Notes * If `start < stop` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [0, 12, 23, 39, 14, 12, 75]; * ArrayExt.removeLastOf(data, 12); // 5 * ArrayExt.removeLastOf(data, 17); // -1 * ArrayExt.removeLastOf(data, 39, 2); // -1 * ArrayExt.removeLastOf(data, 39, 2, 3); // 3 * ``` */ export function removeLastOf<T>(array: Array<T>, value: T, start = -1, stop = 0): number { let index = lastIndexOf(array, value, start, stop); if (index !== -1) { removeAt(array, index); } return index; } /** * Remove all occurrences of a value from an array. * * @param array - The array of interest. * * @param value - The value to remove from the array. Values are * compared using strict `===` equality. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The number of elements removed from the array. * * #### Notes * If `stop < start` the search will conceptually wrap at the end of * the array, however the array will be traversed front-to-back. * * #### Complexity * Linear. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * let data = [14, 12, 23, 39, 14, 12, 19, 14]; * ArrayExt.removeAllOf(data, 12); // 2 * ArrayExt.removeAllOf(data, 17); // 0 * ArrayExt.removeAllOf(data, 14, 1, 4); // 1 * ``` */ export function removeAllOf<T>(array: Array<T>, value: T, start = 0, stop = -1): number { let n = array.length; if (n === 0) { return 0; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let count = 0; for (let i = 0; i < n; ++i) { if (start <= stop && (i >= start && i <= stop) && array[i] === value) { count++; } else if (stop < start && (i <= stop || i >= start) && array[i] === value) { count++; } else if (count > 0) { array[i - count] = array[i]; } } if (count > 0) { array.length = n - count; } return count; } /** * Remove the first occurrence of a value which matches a predicate. * * @param array - The array of interest. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The removed `{ index, value }`, which will be `-1` and * `undefined` if the value is not contained in the array. * * #### Notes * If `stop < start` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * let data = [0, 12, 23, 39, 14, 12, 75]; * ArrayExt.removeFirstWhere(data, isEven); // { index: 0, value: 0 } * ArrayExt.removeFirstWhere(data, isEven, 2); // { index: 3, value: 14 } * ArrayExt.removeFirstWhere(data, isEven, 4); // { index: -1, value: undefined } * ``` */ export function removeFirstWhere<T>(array: Array<T>, fn: (value: T, index: number) => boolean, start = 0, stop = -1): { index: number, value: T | undefined } { let value: T | undefined; let index = findFirstIndex(array, fn, start, stop); if (index !== -1) { value = removeAt(array, index); } return { index, value }; } /** * Remove the last occurrence of a value which matches a predicate. * * @param array - The array of interest. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @returns The removed `{ index, value }`, which will be `-1` and * `undefined` if the value is not contained in the array. * * #### Notes * If `start < stop` the search will wrap at the end of the array. * * #### Complexity * Linear. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * let data = [0, 12, 23, 39, 14, 12, 75]; * ArrayExt.removeLastWhere(data, isEven); // { index: 5, value: 12 } * ArrayExt.removeLastWhere(data, isEven, 2); // { index: 1, value: 12 } * ArrayExt.removeLastWhere(data, isEven, 2, 1); // { index: -1, value: undefined } * ``` */ export function removeLastWhere<T>(array: Array<T>, fn: (value: T, index: number) => boolean, start = -1, stop = 0): { index: number, value: T | undefined } { let value: T | undefined; let index = findLastIndex(array, fn, start, stop); if (index !== -1) { value = removeAt(array, index); } return { index, value }; } /** * Remove all occurrences of values which match a predicate. * * @param array - The array of interest. * * @param fn - The predicate function to apply to the values. * * @param start - The index of the first element in the range to be * searched, inclusive. The default value is `0`. Negative values * are taken as an offset from the end of the array. * * @param stop - The index of the last element in the range to be * searched, inclusive. The default value is `-1`. Negative values * are taken as an offset from the end of the array. * * @returns The number of elements removed from the array. * * #### Notes * If `stop < start` the search will conceptually wrap at the end of * the array, however the array will be traversed front-to-back. * * #### Complexity * Linear. * * #### Example * ```typescript * import { ArrayExt } from '@phosphor/algorithm'; * * function isEven(value: number): boolean { * return value % 2 === 0; * } * * function isNegative(value: number): boolean { * return value < 0; * } * * let data = [0, 12, -13, -9, 23, 39, 14, -15, 12, 75]; * ArrayExt.removeAllWhere(data, isEven); // 4 * ArrayExt.removeAllWhere(data, isNegative, 0, 3); // 2 * ``` */ export function removeAllWhere<T>(array: Array<T>, fn: (value: T, index: number) => boolean, start = 0, stop = -1): number { let n = array.length; if (n === 0) { return 0; } if (start < 0) { start = Math.max(0, start + n); } else { start = Math.min(start, n - 1); } if (stop < 0) { stop = Math.max(0, stop + n); } else { stop = Math.min(stop, n - 1); } let count = 0; for (let i = 0; i < n; ++i) { if (start <= stop && (i >= start && i <= stop) && fn(array[i], i)) { count++; } else if (stop < start && (i <= stop || i >= start) && fn(array[i], i)) { count++; } else if (count > 0) { array[i - count] = array[i]; } } if (count > 0) { array.length = n - count; } return count; } }<|fim▁end|>
* #### Undefined Behavior * A `delta`, `start`, or `stop` which is non-integral. * * #### Example
<|file_name|>run_model.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf8 -*- import os import argparse import tensorflow as tf from gym import wrappers from yarll.environment.registration import make class ModelRunner(object): """ Run an already learned model. Currently only supports one variation of an environment. """ def __init__(self, env, model_directory: str, save_directory: str, **usercfg) -> None: super(ModelRunner, self).__init__() self.env = env self.model_directory = model_directory self.save_directory = save_directory self.config = dict( episode_max_length=self.env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps'), repeat_n_actions=1 ) self.config.update(usercfg) self.session = tf.Session() self.saver = tf.train.import_meta_graph(os.path.join(self.model_directory, "model.meta")) self.saver.restore(self.session, os.path.join(self.model_directory, "model")) self.action = tf.get_collection("action")[0] self.states = tf.get_collection("states")[0] def choose_action(self, state): """Choose an action.""" return self.session.run([self.action], feed_dict={self.states: [state]})[0] def get_trajectory(self, render: bool = False): """ Run agent-environment loop for one whole episode (trajectory) Return dictionary of results """ state = self.env.reset() for _ in range(self.config["episode_max_length"]): action = self.choose_action(state) for _ in range(self.config["repeat_n_actions"]): _, _, done, _ = self.env.step(action) if done: # Don't continue if episode has already ended break if done: break if render: self.env.render() return def run(self): for _ in range(self.config["n_iter"]): self.get_trajectory() parser = argparse.ArgumentParser() parser.add_argument("environment", metavar="env", type=str, help="Gym environment to execute the model on.") parser.add_argument("model_directory", type=str, help="Directory from where model files are loaded.") parser.add_argument("save_directory", type=str, help="Directory where results of running the model are saved")<|fim▁hole|> def main(): args = parser.parse_args() env = make(args.environment) runner = ModelRunner(env, args.model_directory, args.save_directory, n_iter=args.iterations) try: runner.env = wrappers.Monitor(runner.env, args.save_directory, video_callable=False, force=True) runner.run() except KeyboardInterrupt: pass if __name__ == "__main__": main()<|fim▁end|>
parser.add_argument("--iterations", default=100, type=int, help="Number of iterations to run the algorithm.")
<|file_name|>test-helpers.js<|end_file_name|><|fim▁begin|>var assert = require('assert'); var Helpers = require('../../lib/Helpers'); var common = require('../common'); <|fim▁hole|> Helpers.escapeQuery(Dialect, "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches'"), "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches'" ); assert.equal( Helpers.escapeQuery(Dialect, "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE ?", ['peaches']), "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches'" ); assert.equal( Helpers.escapeQuery(Dialect, "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE ? AND `number` > ?", ['peaches', 12]), "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches' AND `number` > 12" ); assert.equal( Helpers.escapeQuery(Dialect, "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE ? AND `number` == ?", ['peaches']), "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches' AND `number` == NULL" ); assert.equal( Helpers.escapeQuery(Dialect, "SELECT * FROM abc WHERE LOWER(abc.??) LIKE ? AND abc.?? > ?", ['stuff', 'peaches', 'number', 12]), "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches' AND abc.`number` > 12" ); assert.equal( Helpers.escapeQuery(Dialect, "SELECT * FROM abc WHERE LOWER(abc.??) LIKE ? AND ?? == ?", ['stuff', 'peaches', 'number']), "SELECT * FROM abc WHERE LOWER(abc.`stuff`) LIKE 'peaches' AND `number` == NULL" ); // Should match at most 2 '?' at a time assert.equal( Helpers.escapeQuery(Dialect, "?????", ['a', 'b', 'c']), "`a``b`'c'" ); // Should not modify provided array var arr = ['a', 'b', 'c']; assert.equal( arr.join(','), 'a,b,c' )<|fim▁end|>
var Dialect = common.getDialect('mysql'); assert.equal(
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from networkx.algorithms.chordal.chordal_alg import *<|fim▁end|>
<|file_name|>TopoVolumeTypeItemProvider.java<|end_file_name|><|fim▁begin|>/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.gml.provider; import java.util.Collection; import java.util.List; import net.opengis.gml.GmlFactory; import net.opengis.gml.GmlPackage; import net.opengis.gml.TopoVolumeType; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.util.FeatureMap; import org.eclipse.emf.ecore.util.FeatureMapUtil; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link net.opengis.gml.TopoVolumeType} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class TopoVolumeTypeItemProvider extends AbstractTopologyTypeItemProvider implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TopoVolumeTypeItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); } return itemPropertyDescriptors; } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(GmlPackage.eINSTANCE.getTopoVolumeType_DirectedTopoSolid()); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns TopoVolumeType.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/TopoVolumeType")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --><|fim▁hole|> @Override public String getText(Object object) { String label = ((TopoVolumeType)object).getId(); return label == null || label.length() == 0 ? getString("_UI_TopoVolumeType_type") : getString("_UI_TopoVolumeType_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(TopoVolumeType.class)) { case GmlPackage.TOPO_VOLUME_TYPE__DIRECTED_TOPO_SOLID: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (GmlPackage.eINSTANCE.getTopoVolumeType_DirectedTopoSolid(), GmlFactory.eINSTANCE.createDirectedTopoSolidPropertyType())); } /** * This returns the label text for {@link org.eclipse.emf.edit.command.CreateChildCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getCreateChildText(Object owner, Object feature, Object child, Collection<?> selection) { Object childFeature = feature; Object childObject = child; if (childFeature instanceof EStructuralFeature && FeatureMapUtil.isFeatureMap((EStructuralFeature)childFeature)) { FeatureMap.Entry entry = (FeatureMap.Entry)childObject; childFeature = entry.getEStructuralFeature(); childObject = entry.getValue(); } boolean qualify = childFeature == GmlPackage.eINSTANCE.getAbstractGMLType_Name() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CoordinateOperationName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_CsName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_DatumName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_EllipsoidName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_GroupName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MeridianName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_MethodName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_ParameterName() || childFeature == GmlPackage.eINSTANCE.getDocumentRoot_SrsName(); if (qualify) { return getString ("_UI_CreateChild_text2", new Object[] { getTypeText(childObject), getFeatureText(childFeature), getTypeText(owner) }); } return super.getCreateChildText(owner, feature, child, selection); } }<|fim▁end|>
* @generated */
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate mml; fn main() {<|fim▁hole|> // let _ = mml::src2both("src", dest.replace("-", "_").as_str()); }<|fim▁end|>
// let dest: String = concat!("target/doc/", env!("CARGO_PKG_NAME")).to_string();
<|file_name|>new_executable.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "os" "github.com/remeh/mehtadata/db" "github.com/remeh/mehtadata/model" ) func NewExecutable(flags Flags) (int64, bool, error) { // mandatory name := os.Getenv("NAME") filepath := os.Getenv("FILEPATH") platformName := os.Getenv("PLATFORM_NAME") // not mandatory description := os.Getenv("DESCRIPTION") genres := os.Getenv("GENRES") publisher := os.Getenv("PUBLISHER") developer := os.Getenv("DEVELOPER") releaseDate := os.Getenv("RELEASE_DATE")<|fim▁hole|> if StringsHasContent(platformName, name, filepath) { ok = true } if !ok { fmt.Println(`Can't create a new executable. Mandatory infos: NAME : name of the executable to create FILEPATH : filepath to the executable to start PLATFORM_NAME : name of the platform containing this executable Not mandatory: DESCRIPTION : description of the executable GENRES : genres of the executable PUBLISHER : publisher of the executable DEVELOPER : developer of the executable RELEASE_DATE : release date of the executable PLAYERS : players of the executable RATING : rating of the executable `) return -1, false, fmt.Errorf("Missing fields.") } exec := model.Executable{ Name: name, Filepath: filepath, Description: description, Genres: genres, Publisher: publisher, Developer: developer, ReleaseDate: releaseDate, Players: players, Rating: rating, } return db.CreateExecutable(flags.DestSqlite, platformName, exec) }<|fim▁end|>
players := os.Getenv("PLAYERS") rating := os.Getenv("RATING") ok := false
<|file_name|>Activator.java<|end_file_name|><|fim▁begin|>package com.gilecode.langlocker; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.ui.plugin.AbstractUIPlugin; import org.osgi.framework.BundleContext; /** * The activator class controls the plug-in life cycle */ public class Activator extends AbstractUIPlugin {<|fim▁hole|> // The shared instance private static Activator plugin; /** * The constructor */ public Activator() { super(); } /* * (non-Javadoc) * * @see * org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext * ) */ public void start(BundleContext context) throws Exception { super.start(context); plugin = this; } /* * (non-Javadoc) * * @see * org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext * ) */ public void stop(BundleContext context) throws Exception { plugin = null; super.stop(context); } /** * Returns the shared instance * * @return the shared instance */ public static Activator getDefault() { return plugin; } /** * Returns an image descriptor for the image file at the given plug-in * relative path * * @param path * the path * @return the image descriptor */ public static ImageDescriptor getImageDescriptor(String path) { return imageDescriptorFromPlugin(PLUGIN_ID, path); } }<|fim▁end|>
// The plug-in ID public static final String PLUGIN_ID = "com.gilecode.langlocker"; //$NON-NLS-1$
<|file_name|>synthetic_rope.py<|end_file_name|><|fim▁begin|>import numpy as np import cv2 from scipy import interpolate from random import randint import IPython from alan.rgbd.basic_imaging import cos,sin from alan.synthetic.synthetic_util import rand_sign from alan.core.points import Point """ generates rope using non-holonomic car model dynamics (moves with turn radius) generates labels at ends of rope parameters: h, w of image matrix l, w of rope returns: image matrix with rope drawn [left label, right label] """ def get_rope_car(h = 420, w = 420, rope_l_pixels = 800 , rope_w_pixels = 8, pix_per_step = 10, steps_per_curve = 10, lo_turn_delta = 5, hi_turn_delta = 10): #randomize start init_pos = np.array([randint(0, w - 1), randint(0, h - 1), randint(0, 360)]) all_positions = np.array([init_pos]) #dependent parameter (use float division) num_curves = int(rope_l_pixels/(steps_per_curve * pix_per_step * 1.0)) #point generation for c in range(num_curves): turn_delta = rand_sign() * randint(lo_turn_delta, hi_turn_delta) for s in range(steps_per_curve): curr_pos = all_positions[-1] delta_pos = np.array([pix_per_step * cos(curr_pos[2]), pix_per_step * sin(curr_pos[2]), turn_delta]) all_positions = np.append(all_positions, [curr_pos + delta_pos], axis = 0) #center the points (avoid leaving image bounds) mid_x_points = (min(all_positions[:,0]) + max(all_positions[:,0]))/2.0 mid_y_points = (min(all_positions[:,1]) + max(all_positions[:,1]))/2.0 for pos in all_positions: pos[0] -= (mid_x_points - w/2.0) pos[1] -= (mid_y_points - h/2.0) #draw rope image = np.zeros((h, w)) prev_pos = all_positions[0] for curr_pos in all_positions[1:]: cv2.line(image, (int(prev_pos[0]), int(prev_pos[1])), (int(curr_pos[0]), int(curr_pos[1])), 255, rope_w_pixels) prev_pos = curr_pos #get endpoint labels, sorted by x labels = [all_positions[0], all_positions[-1]] if labels[0][0] > labels[1][0]: labels = [labels[1], labels[0]] #labels = [[l[0], l[1], l[2] + 90] for l in labels] #Ignoring Rotation for Now labels = [[l[0], l[1], 0] for l in labels] #rejection sampling for num_label in range(2): c_label = labels[num_label] #case 1- endpoints not in image if check_bounds(c_label, [w, h]) == -1: return image, labels, -1 #case 2- endpoint on top of other rope segment if check_overlap(c_label, [w, h], image, rope_w_pixels) == -1: return image, labels, -1 return image, labels, 1 def check_bounds(label, bounds): bound_tolerance = 5 for dim in range(2): if label[dim] < bound_tolerance or label[dim] > (bounds[dim] - 1 - bound_tolerance): return -1 return 0 def check_overlap(label, bounds, image, rope_w_pixels): lb = [] ub = [] for dim in range(2): lb.append(int(max(0, label[dim] - rope_w_pixels))) ub.append(int(min(bounds[dim] - 1, label[dim] + rope_w_pixels))) pixel_sum = 0 for x in range(lb[0], ub[0]): for y in range(lb[1], ub[1]):<|fim▁hole|> expected_sum = 0.6 * (ub[1] - lb[1]) * (ub[0] - lb[0]) if pixel_sum > expected_sum: return -1 return 0<|fim▁end|>
pixel_sum += (image[y][x]/255.0) #if more than 60% of adjacent (2 * rope_w x 2 * rope_w) pixels are white, endpoint is probably lying on rope