prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.contrib.auth.decorators import user_passes_test
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.http import Http404, HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.encoding import smart_unicode, iri_to_uri
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.cache import never_cache
from upy.contrib.rosetta.conf import settings as rosetta_settings
from upy.contrib.rosetta.polib import pofile
from upy.contrib.rosetta.poutil import find_pos, pagination_range
from upy.contrib.rosetta.signals import entry_changed, post_save
from upy.contrib.rosetta.storage import get_storage
import re
from upy.contrib import rosetta
import datetime
import unicodedata
import hashlib
import os
def home(request):
"""
Displays a list of messages to be translated
"""
def fix_nls(in_, out_):
"""Fixes submitted translations by filtering carriage returns and pairing
newlines at the begging and end of the translated string with the original
"""
if 0 == len(in_) or 0 == len(out_):
return out_
if "\r" in out_ and "\r" not in in_:
out_ = out_.replace("\r", '')
if "\n" == in_[0] and "\n" != out_[0]:
out_ = "\n" + out_
elif "\n" != in_[0] and "\n" == out_[0]:
out_ = out_.lstrip()
if "\n" == in_[-1] and "\n" != out_[-1]:
out_ = out_ + "\n"
elif "\n" != in_[-1] and "\n" == out_[-1]:
out_ = out_.rstrip()
return out_
storage = get_storage(request)
version = rosetta.get_version(True)
if storage.has('rosetta_i18n_fn'):
rosetta_i18n_fn = storage.get('rosetta_i18n_fn')
rosetta_i18n_app = get_app_name(rosetta_i18n_fn)
rosetta_i18n_lang_code = storage.get('rosetta_i18n_lang_code')
rosetta_i18n_lang_bidi = rosetta_i18n_lang_code.split('-')[0] in settings.LANGUAGES_BIDI
rosetta_i18n_write = storage.get('rosetta_i18n_write', True)
if rosetta_i18n_write:
rosetta_i18n_pofile = pofile(rosetta_i18n_fn, wrapwidth=rosetta_settings.POFILE_WRAP_WIDTH)
for entry in rosetta_i18n_pofile:
entry.md5hash = hashlib.md5(
entry.msgid.encode("utf8") +
entry.msgstr.encode("utf8") +
(entry.msgctxt and entry.msgctxt.encode("utf8") or "")
).hexdigest()
else:
rosetta_i18n_pofile = storage.get('rosetta_i18n_pofile')
if 'filter' in request.GET:
if request.GET.get('filter') in ('untranslated', 'translated', 'fuzzy', 'all'):
filter_ = request.GET.get('filter')
storage.set('rosetta_i18n_filter', filter_)
return HttpResponseRedirect(reverse('rosetta-home'))
rosetta_i18n_filter = storage.get('rosetta_i18n_filter', 'all')
if '_next' in request.POST:<|fim▁hole|> for key, value in request.POST.items():
md5hash = None
plural_id = None
if rx_plural.match(key):
md5hash = str(rx_plural.match(key).groups()[0])
# polib parses .po files into unicode strings, but
# doesn't bother to convert plural indexes to int,
# so we need unicode here.
plural_id = unicode(rx_plural.match(key).groups()[1])
elif rx.match(key):
md5hash = str(rx.match(key).groups()[0])
if md5hash is not None:
entry = rosetta_i18n_pofile.find(md5hash, 'md5hash')
# If someone did a makemessage, some entries might
# have been removed, so we need to check.
if entry:
old_msgstr = entry.msgstr
if plural_id is not None:
#plural_string = fix_nls(entry.msgstr_plural[plural_id], value)
plural_string = fix_nls(entry.msgid_plural, value)
entry.msgstr_plural[plural_id] = plural_string
else:
entry.msgstr = fix_nls(entry.msgid, value)
is_fuzzy = bool(request.POST.get('f_%s' % md5hash, False))
old_fuzzy = 'fuzzy' in entry.flags
if old_fuzzy and not is_fuzzy:
entry.flags.remove('fuzzy')
elif not old_fuzzy and is_fuzzy:
entry.flags.append('fuzzy')
file_change = True
if old_msgstr != value or old_fuzzy != is_fuzzy:
entry_changed.send(sender=entry,
user=request.user,
old_msgstr=old_msgstr,
old_fuzzy=old_fuzzy,
pofile=rosetta_i18n_fn,
language_code=rosetta_i18n_lang_code,
)
else:
storage.set('rosetta_last_save_error', True)
if file_change and rosetta_i18n_write:
try:
# Provide defaults in case authorization is not required.
request.user.first_name = getattr(request.user, 'first_name', 'Anonymous')
request.user.last_name = getattr(request.user, 'last_name', 'User')
request.user.email = getattr(request.user, 'email', '[email protected]')
rosetta_i18n_pofile.metadata['Last-Translator'] = unicodedata.normalize('NFKD', u"%s %s <%s>" % (request.user.first_name, request.user.last_name, request.user.email)).encode('ascii', 'ignore')
rosetta_i18n_pofile.metadata['X-Translated-Using'] = u"django-rosetta %s" % rosetta.get_version(False)
rosetta_i18n_pofile.metadata['PO-Revision-Date'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M%z')
except UnicodeDecodeError:
pass
try:
rosetta_i18n_pofile.save()
po_filepath, ext = os.path.splitext(rosetta_i18n_fn)
save_as_mo_filepath = po_filepath + '.mo'
rosetta_i18n_pofile.save_as_mofile(save_as_mo_filepath)
post_save.send(sender=None, language_code=rosetta_i18n_lang_code, request=request)
# Try auto-reloading via the WSGI daemon mode reload mechanism
if rosetta_settings.WSGI_AUTO_RELOAD and \
'mod_wsgi.process_group' in request.environ and \
request.environ.get('mod_wsgi.process_group', None) and \
'SCRIPT_FILENAME' in request.environ and \
int(request.environ.get('mod_wsgi.script_reloading', '0')):
try:
os.utime(request.environ.get('SCRIPT_FILENAME'), None)
except OSError:
pass
# Try auto-reloading via uwsgi daemon reload mechanism
if rosetta_settings.UWSGI_AUTO_RELOAD:
try:
import uwsgi
# pretty easy right?
uwsgi.reload()
except:
# we may not be running under uwsgi :P
pass
except:
storage.set('rosetta_i18n_write', False)
storage.set('rosetta_i18n_pofile', rosetta_i18n_pofile)
# Retain query arguments
query_arg = '?_next=1'
if 'query' in request.GET or 'query' in request.POST:
query_arg += '&query=%s' % request.REQUEST.get('query')
if 'page' in request.GET:
query_arg += '&page=%d&_next=1' % int(request.GET.get('page'))
return HttpResponseRedirect(reverse('rosetta-home') + iri_to_uri(query_arg))
rosetta_i18n_lang_name = _(storage.get('rosetta_i18n_lang_name'))
rosetta_i18n_lang_code = storage.get('rosetta_i18n_lang_code')
if 'query' in request.REQUEST and request.REQUEST.get('query', '').strip():
query = request.REQUEST.get('query').strip()
rx = re.compile(re.escape(query), re.IGNORECASE)
paginator = Paginator([e for e in rosetta_i18n_pofile if not e.obsolete and rx.search(smart_unicode(e.msgstr) + smart_unicode(e.msgid) + u''.join([o[0] for o in e.occurrences]))], rosetta_settings.MESSAGES_PER_PAGE)
else:
if rosetta_i18n_filter == 'untranslated':
paginator = Paginator(rosetta_i18n_pofile.untranslated_entries(), rosetta_settings.MESSAGES_PER_PAGE)
elif rosetta_i18n_filter == 'translated':
paginator = Paginator(rosetta_i18n_pofile.translated_entries(), rosetta_settings.MESSAGES_PER_PAGE)
elif rosetta_i18n_filter == 'fuzzy':
paginator = Paginator([e for e in rosetta_i18n_pofile.fuzzy_entries() if not e.obsolete], rosetta_settings.MESSAGES_PER_PAGE)
else:
paginator = Paginator([e for e in rosetta_i18n_pofile if not e.obsolete], rosetta_settings.MESSAGES_PER_PAGE)
if 'page' in request.GET and int(request.GET.get('page')) <= paginator.num_pages and int(request.GET.get('page')) > 0:
page = int(request.GET.get('page'))
else:
page = 1
if '_next' in request.GET or '_next' in request.POST:
page += 1
if page > paginator.num_pages:
page = 1
query_arg = '?page=%d' % page
return HttpResponseRedirect(reverse('rosetta-home') + iri_to_uri(query_arg))
rosetta_messages = paginator.page(page).object_list
if rosetta_settings.MAIN_LANGUAGE and rosetta_settings.MAIN_LANGUAGE != rosetta_i18n_lang_code:
main_language = None
for language in settings.LANGUAGES:
if language[0] == rosetta_settings.MAIN_LANGUAGE:
main_language = _(language[1])
break
fl = ("/%s/" % rosetta_settings.MAIN_LANGUAGE).join(rosetta_i18n_fn.split("/%s/" % rosetta_i18n_lang_code))
po = pofile(fl)
main_messages = []
for message in rosetta_messages:
message.main_lang = po.find(message.msgid).msgstr
needs_pagination = paginator.num_pages > 1
if needs_pagination:
if paginator.num_pages >= 10:
page_range = pagination_range(1, paginator.num_pages, page)
else:
page_range = range(1, 1 + paginator.num_pages)
try:
ADMIN_MEDIA_PREFIX = settings.ADMIN_MEDIA_PREFIX
ADMIN_IMAGE_DIR = ADMIN_MEDIA_PREFIX + 'img/admin/'
except AttributeError:
ADMIN_MEDIA_PREFIX = settings.STATIC_URL + 'admin/'
ADMIN_IMAGE_DIR = ADMIN_MEDIA_PREFIX + 'img/'
ENABLE_TRANSLATION_SUGGESTIONS = rosetta_settings.BING_APP_ID and rosetta_settings.ENABLE_TRANSLATION_SUGGESTIONS
BING_APP_ID = rosetta_settings.BING_APP_ID
MESSAGES_SOURCE_LANGUAGE_NAME = rosetta_settings.MESSAGES_SOURCE_LANGUAGE_NAME
MESSAGES_SOURCE_LANGUAGE_CODE = rosetta_settings.MESSAGES_SOURCE_LANGUAGE_CODE
if storage.has('rosetta_last_save_error'):
storage.delete('rosetta_last_save_error')
rosetta_last_save_error = True
return render_to_response('pofile.html', locals(), context_instance=RequestContext(request))
else:
return list_languages(request, do_session_warn=True)
home = never_cache(home)
home = user_passes_test(lambda user: can_translate(user), settings.LOGIN_URL)(home)
def download_file(request):
import zipfile
from StringIO import StringIO
storage = get_storage(request)
# original filename
rosetta_i18n_fn = storage.get('rosetta_i18n_fn', None)
# in-session modified catalog
rosetta_i18n_pofile = storage.get('rosetta_i18n_pofile', None)
# language code
rosetta_i18n_lang_code = storage.get('rosetta_i18n_lang_code', None)
if not rosetta_i18n_lang_code or not rosetta_i18n_pofile or not rosetta_i18n_fn:
return HttpResponseRedirect(reverse('rosetta-home'))
try:
if len(rosetta_i18n_fn.split('/')) >= 5:
offered_fn = '_'.join(rosetta_i18n_fn.split('/')[-5:])
else:
offered_fn = rosetta_i18n_fn.split('/')[-1]
po_fn = str(rosetta_i18n_fn.split('/')[-1])
mo_fn = str(po_fn.replace('.po', '.mo')) # not so smart, huh
zipdata = StringIO()
zipf = zipfile.ZipFile(zipdata, mode="w")
zipf.writestr(po_fn, unicode(rosetta_i18n_pofile).encode("utf8"))
zipf.writestr(mo_fn, rosetta_i18n_pofile.to_binary())
zipf.close()
zipdata.seek(0)
response = HttpResponse(zipdata.read())
response['Content-Disposition'] = 'attachment; filename=%s.%s.zip' % (offered_fn, rosetta_i18n_lang_code)
response['Content-Type'] = 'application/x-zip'
return response
except Exception:
return HttpResponseRedirect(reverse('rosetta-home'))
download_file = never_cache(download_file)
download_file = user_passes_test(lambda user: can_translate(user), settings.LOGIN_URL)(download_file)
def list_languages(request, do_session_warn=False):
"""
Lists the languages for the current project, the gettext catalog files
that can be translated and their translation progress
"""
storage = get_storage(request)
languages = []
if 'filter' in request.GET:
if request.GET.get('filter') in ('project', 'third-party', 'django', 'all'):
filter_ = request.GET.get('filter')
storage.set('rosetta_i18n_catalog_filter', filter_)
return HttpResponseRedirect(reverse('rosetta-pick-file'))
rosetta_i18n_catalog_filter = storage.get('rosetta_i18n_catalog_filter', 'project')
third_party_apps = rosetta_i18n_catalog_filter in ('all', 'third-party')
django_apps = rosetta_i18n_catalog_filter in ('all', 'django')
project_apps = rosetta_i18n_catalog_filter in ('all', 'project')
has_pos = False
for language in settings.LANGUAGES:
pos = find_pos(language[0], project_apps=project_apps, django_apps=django_apps, third_party_apps=third_party_apps)
has_pos = has_pos or len(pos)
languages.append(
(language[0],
_(language[1]),
[(get_app_name(l), os.path.realpath(l), pofile(l)) for l in pos],
)
)
try:
ADMIN_MEDIA_PREFIX = settings.ADMIN_MEDIA_PREFIX
except AttributeError:
ADMIN_MEDIA_PREFIX = settings.STATIC_URL + 'admin/'
version = rosetta.get_version(True)
do_session_warn = do_session_warn and 'SessionRosettaStorage' in rosetta_settings.STORAGE_CLASS and 'signed_cookies' in settings.SESSION_ENGINE
return render_to_response('rosetta_languages.html', locals(), context_instance=RequestContext(request))
list_languages = never_cache(list_languages)
list_languages = user_passes_test(lambda user: can_translate(user), settings.LOGIN_URL)(list_languages)
def get_app_name(path):
app = path.split("/locale")[0].split("/")[-1]
return app
def lang_sel(request, langid, idx):
"""
Selects a file to be translated
"""
storage = get_storage(request)
if langid not in [l[0] for l in settings.LANGUAGES]:
raise Http404
else:
rosetta_i18n_catalog_filter = storage.get('rosetta_i18n_catalog_filter', 'project')
third_party_apps = rosetta_i18n_catalog_filter in ('all', 'third-party')
django_apps = rosetta_i18n_catalog_filter in ('all', 'django')
project_apps = rosetta_i18n_catalog_filter in ('all', 'project')
file_ = find_pos(langid, project_apps=project_apps, django_apps=django_apps, third_party_apps=third_party_apps)[int(idx)]
storage.set('rosetta_i18n_lang_code', langid)
storage.set('rosetta_i18n_lang_name', unicode([l[1] for l in settings.LANGUAGES if l[0] == langid][0]))
storage.set('rosetta_i18n_fn', file_)
po = pofile(file_)
for entry in po:
entry.md5hash = hashlib.md5(
entry.msgid.encode("utf8") +
entry.msgstr.encode("utf8") +
(entry.msgctxt and entry.msgctxt.encode("utf8") or "")
).hexdigest()
storage.set('rosetta_i18n_pofile', po)
try:
os.utime(file_, None)
storage.set('rosetta_i18n_write', True)
except OSError:
storage.set('rosetta_i18n_write', False)
return HttpResponseRedirect(reverse('rosetta-home'))
lang_sel = never_cache(lang_sel)
lang_sel = user_passes_test(lambda user: can_translate(user), settings.LOGIN_URL)(lang_sel)
def can_translate(user):
if not getattr(settings, 'ROSETTA_REQUIRES_AUTH', True):
return True
if not user.is_authenticated():
return False
elif user.is_superuser:
return True
elif getattr(settings, 'ALLOW_STAFF_TO_ROSETTA') and user.is_staff:
return True
else:
try:
from django.contrib.auth.models import Group
translators = Group.objects.get(name='translators')
return translators in user.groups.all()
except Group.DoesNotExist:
return False<|fim▁end|>
|
rx = re.compile(r'^m_([0-9a-f]+)')
rx_plural = re.compile(r'^m_([0-9a-f]+)_([0-9]+)')
file_change = False
|
<|file_name|>types.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package authorization
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// +genclient=true
// +nonNamespaced=true
// +noMethods=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// SubjectAccessReview checks whether or not a user or group can perform an action. Not filling in a
// spec.namespace means "in all namespaces".
type SubjectAccessReview struct {
metav1.TypeMeta
metav1.ObjectMeta
// Spec holds information about the request being evaluated
Spec SubjectAccessReviewSpec
// Status is filled in by the server and indicates whether the request is allowed or not
Status SubjectAccessReviewStatus
}
// +genclient=true
// +nonNamespaced=true
// +noMethods=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// SelfSubjectAccessReview checks whether or the current user can perform an action. Not filling in a
// spec.namespace means "in all namespaces". Self is a special case, because users should always be able
// to check whether they can perform an action
type SelfSubjectAccessReview struct {
metav1.TypeMeta
metav1.ObjectMeta
// Spec holds information about the request being evaluated.
Spec SelfSubjectAccessReviewSpec
// Status is filled in by the server and indicates whether the request is allowed or not
Status SubjectAccessReviewStatus
}
// +genclient=true
// +noMethods=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// LocalSubjectAccessReview checks whether or not a user or group can perform an action in a given namespace.
// Having a namespace scoped resource makes it much easier to grant namespace scoped policy that includes permissions
// checking.
type LocalSubjectAccessReview struct {
metav1.TypeMeta
metav1.ObjectMeta
// Spec holds information about the request being evaluated. spec.namespace must be equal to the namespace
// you made the request against. If empty, it is defaulted.
Spec SubjectAccessReviewSpec
// Status is filled in by the server and indicates whether the request is allowed or not
Status SubjectAccessReviewStatus
}
// ResourceAttributes includes the authorization attributes available for resource requests to the Authorizer interface
type ResourceAttributes struct {
// Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces
// "" (empty) is defaulted for LocalSubjectAccessReviews
// "" (empty) is empty for cluster-scoped resources
// "" (empty) means "all" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
Namespace string
// Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. "*" means all.
Verb string
// Group is the API Group of the Resource. "*" means all.
Group string
// Version is the API Version of the Resource. "*" means all.
Version string
// Resource is one of the existing resource types. "*" means all.
Resource string
// Subresource is one of the existing resource types. "" means none.
Subresource string
// Name is the name of the resource being requested for a "get" or deleted for a "delete". "" (empty) means all.
Name string
}
<|fim▁hole|> Path string
// Verb is the standard HTTP verb
Verb string
}
// SubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAttributes
// and NonResourceAttributes must be set
type SubjectAccessReviewSpec struct {
// ResourceAttributes describes information for a resource access request
ResourceAttributes *ResourceAttributes
// NonResourceAttributes describes information for a non-resource access request
NonResourceAttributes *NonResourceAttributes
// User is the user you're testing for.
// If you specify "User" but not "Group", then is it interpreted as "What if User were not a member of any groups
User string
// Groups is the groups you're testing for.
Groups []string
// Extra corresponds to the user.Info.GetExtra() method from the authenticator. Since that is input to the authorizer
// it needs a reflection here.
Extra map[string]ExtraValue
}
// ExtraValue masks the value so protobuf can generate
// +protobuf.nullable=true
type ExtraValue []string
// SelfSubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAttributes
// and NonResourceAttributes must be set
type SelfSubjectAccessReviewSpec struct {
// ResourceAttributes describes information for a resource access request
ResourceAttributes *ResourceAttributes
// NonResourceAttributes describes information for a non-resource access request
NonResourceAttributes *NonResourceAttributes
}
// SubjectAccessReviewStatus
type SubjectAccessReviewStatus struct {
// Allowed is required. True if the action would be allowed, false otherwise.
Allowed bool
// Reason is optional. It indicates why a request was allowed or denied.
Reason string
// EvaluationError is an indication that some error occurred during the authorization check.
// It is entirely possible to get an error and be able to continue determine authorization status in spite of it.
// For instance, RBAC can be missing a role, but enough roles are still present and bound to reason about the request.
EvaluationError string
}<|fim▁end|>
|
// NonResourceAttributes includes the authorization attributes available for non-resource requests to the Authorizer interface
type NonResourceAttributes struct {
// Path is the URL path of the request
|
<|file_name|>UIVisibilityBase.hpp<|end_file_name|><|fim▁begin|>#ifndef UIVISIBILITYBASE_HPP
#define UIVISIBILITYBASE_HPP
/*
This file is part of AlgAudio.
AlgAudio, Copyright (C) 2015 CeTA - Audiovisual Technology Center
AlgAudio is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the<|fim▁hole|>AlgAudio is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with AlgAudio. If not, see <http://www.gnu.org/licenses/>.
*/
namespace AlgAudio{
class UIVisibilityBase{
public:
enum class DisplayMode{
Visible, /**< A Visible widget is drawn just normally. */
EmptySpace, /**< An EmptySpace widget is not drawn, but it takes as much space as it would normally take. */
Invisible, /**< An Invisible widget is not drawn, and it takes zero area. */
};
/** Sets widget display mode. \see DisplayModes */
void SetDisplayMode(DisplayMode m){
if(m == display_mode) return;
display_mode = m;
on_display_mode_changed.Happen();
}
/** Returns true if the contents of the widget are supposed to be drawn, i.e.
* whether display mode is 'visible'. When implementing a custom widget,
* do do not need to test for being drawn in CustomDraw, if a widget is not
* supposed to be drawn, CustomDraw will never be called. */
inline bool IsDrawn() const {return display_mode == DisplayMode::Visible;}
/** Returns true if this widget is marked as invisible. */
inline bool IsInvisible() const {return display_mode == DisplayMode::Invisible; }
/** Triggered when visibility changes. */
Signal<> on_display_mode_changed;
protected:
UIVisibilityBase() {} // Only construcible when inherited
DisplayMode display_mode = DisplayMode::Visible;
};
} // namespace AlgAudio
#endif // UIVISIBILITYBASE_HPP<|fim▁end|>
|
License, or (at your option) any later version.
|
<|file_name|>0072_auto_20180502_0834.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-02 08:34
from __future__ import unicode_literals
import dirtyfields.dirtyfields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
('ninetofiver', '0071_apikey_name'),
]
operations = [
migrations.CreateModel(
name='WhereaboutDate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('starts_at', models.DateTimeField()),
('ends_at', models.DateTimeField()),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_ninetofiver.whereaboutdate_set+', to='contenttypes.ContentType')),
('timesheet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='ninetofiver.Timesheet')),
],
options={
'ordering': ['id'],
'abstract': False,
'base_manager_name': 'base_objects',
},
bases=(dirtyfields.dirtyfields.DirtyFieldsMixin, models.Model),
managers=[
('objects', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.RemoveField(
model_name='whereabout',
name='day',
),
migrations.RemoveField(
model_name='whereabout',
name='timesheet',
),
migrations.AddField(
model_name='whereabout',
name='description',
field=models.TextField(blank=True, max_length=255, null=True),<|fim▁hole|> name='user',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AlterField(
model_name='whereabout',
name='location',
field=models.CharField(choices=[('home', 'Home'), ('office', 'Office'), ('out_of_office', 'Out of office'), ('other', 'Other')], max_length=32),
),
migrations.AddField(
model_name='whereaboutdate',
name='whereabout',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ninetofiver.Whereabout'),
),
]<|fim▁end|>
|
),
migrations.AddField(
model_name='whereabout',
|
<|file_name|>LocatorUtilTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client;
import junit.framework.TestCase;
import org.junit.Assert;
import com.vaadin.client.componentlocator.LocatorUtil;
/*
* Test LocatorUtil.isUIElement() & isNotificaitonElement methods
*/
public class LocatorUtilTest extends TestCase {
public void testIsUI1() {
boolean isUI = LocatorUtil.isUIElement("com.vaadin.ui.UI");
Assert.assertTrue(isUI);
}
public void testIsUI2() {
boolean isUI = LocatorUtil.isUIElement("/com.vaadin.ui.UI");
Assert.assertTrue(isUI);
}
<|fim▁hole|> .isUIElement("//com.vaadin.ui.UI[RandomString");
Assert.assertTrue(isUI);
}
public void testIsUI4() {
boolean isUI = LocatorUtil.isUIElement("//com.vaadin.ui.UI[0]");
Assert.assertTrue(isUI);
}
public void testIsNotification1() {
boolean isUI = LocatorUtil
.isNotificationElement("com.vaadin.ui.VNotification");
Assert.assertTrue(isUI);
}
public void testIsNotification2() {
boolean isUI = LocatorUtil
.isNotificationElement("com.vaadin.ui.Notification");
Assert.assertTrue(isUI);
}
public void testIsNotification3() {
boolean isUI = LocatorUtil
.isNotificationElement("/com.vaadin.ui.VNotification[");
Assert.assertTrue(isUI);
}
public void testIsNotification4() {
boolean isUI = LocatorUtil
.isNotificationElement("//com.vaadin.ui.VNotification[0]");
Assert.assertTrue(isUI);
}
}<|fim▁end|>
|
public void testIsUI3() {
boolean isUI = LocatorUtil
|
<|file_name|>NSMachPort.py<|end_file_name|><|fim▁begin|>"""
LLDB AppKit formatters
part of The LLVM Compiler Infrastructure
This file is distributed under the University of Illinois Open Source
License. See LICENSE.TXT for details.
"""
# example summary provider for NSMachPort
# the real summary is now C++ code built into LLDB
import lldb
import ctypes
import lldb.runtime.objc.objc_runtime
import lldb.formatters.metrics<|fim▁hole|>statistics = lldb.formatters.metrics.Metrics()
statistics.add_metric('invalid_isa')
statistics.add_metric('invalid_pointer')
statistics.add_metric('unknown_class')
statistics.add_metric('code_notrun')
# despite the similary to synthetic children providers, these classes are not
# trying to provide anything but the port number of an NSMachPort, so they need not
# obey the interface specification for synthetic children providers
class NSMachPortKnown_SummaryProvider:
def adjust_for_architecture(self):
pass
def __init__(self, valobj, params):
logger = lldb.formatters.Logger.Logger()
self.valobj = valobj;
self.sys_params = params
if not(self.sys_params.types_cache.NSUInteger):
if self.sys_params.is_64_bit:
self.sys_params.types_cache.NSUInteger = self.valobj.GetType().GetBasicType(lldb.eBasicTypeUnsignedLong)
else:
self.sys_params.types_cache.NSUInteger = self.valobj.GetType().GetBasicType(lldb.eBasicTypeUnsignedInt)
self.update();
def update(self):
logger = lldb.formatters.Logger.Logger()
self.adjust_for_architecture();
# one pointer is the ISA
# then we have one other internal pointer, plus
# 4 bytes worth of flags. hence, these values
def offset(self):
logger = lldb.formatters.Logger.Logger()
if self.sys_params.is_64_bit:
return 20
else:
return 12
def port(self):
logger = lldb.formatters.Logger.Logger()
vport = self.valobj.CreateChildAtOffset("port",
self.offset(),
self.sys_params.types_cache.NSUInteger)
return vport.GetValueAsUnsigned(0)
class NSMachPortUnknown_SummaryProvider:
def adjust_for_architecture(self):
pass
def __init__(self, valobj, params):
logger = lldb.formatters.Logger.Logger()
self.valobj = valobj;
self.sys_params = params
self.update();
def update(self):
logger = lldb.formatters.Logger.Logger()
self.adjust_for_architecture();
def port(self):
logger = lldb.formatters.Logger.Logger()
stream = lldb.SBStream()
self.valobj.GetExpressionPath(stream)
num_children_vo = self.valobj.CreateValueFromExpression("port","(int)[" + stream.GetData() + " machPort]")
if num_children_vo.IsValid():
return num_children_vo.GetValueAsUnsigned(0)
return '<variable is not NSMachPort>'
def GetSummary_Impl(valobj):
logger = lldb.formatters.Logger.Logger()
global statistics
class_data,wrapper =lldb.runtime.objc.objc_runtime.Utilities.prepare_class_detection(valobj,statistics)
if wrapper:
return wrapper
name_string = class_data.class_name()
logger >> "class name is: " + str(name_string)
if name_string == 'NSMachPort':
wrapper = NSMachPortKnown_SummaryProvider(valobj, class_data.sys_params)
statistics.metric_hit('code_notrun',valobj)
else:
wrapper = NSMachPortUnknown_SummaryProvider(valobj, class_data.sys_params)
statistics.metric_hit('unknown_class',valobj.GetName() + " seen as " + name_string)
return wrapper;
def NSMachPort_SummaryProvider (valobj,dict):
logger = lldb.formatters.Logger.Logger()
provider = GetSummary_Impl(valobj);
if provider != None:
if isinstance(provider,lldb.runtime.objc.objc_runtime.SpecialSituation_Description):
return provider.message()
try:
summary = provider.port();
except:
summary = None
logger >> "got summary " + str(summary)
if summary == None:
summary = '<variable is not NSMachPort>'
if isinstance(summary, basestring):
return summay
return 'mach port: ' + str(summary)
return 'Summary Unavailable'
def __lldb_init_module(debugger,dict):
debugger.HandleCommand("type summary add -F NSMachPort.NSMachPort_SummaryProvider NSMachPort")<|fim▁end|>
|
import lldb.formatters.Logger
|
<|file_name|>kimsufi_eu.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.10.0
var api, baseOVHKonnector, connector, name, slug;
baseOVHKonnector = require('../lib/base_ovh_konnector');
name = 'Kimsufi EU';
slug = 'kimsufi_eu';
api = {
endpoint: 'kimsufi-eu',<|fim▁hole|> appSecret: ''
};
connector = module.exports = baseOVHKonnector.createNew(api, name, slug);<|fim▁end|>
|
appKey: '',
|
<|file_name|>input_helper.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This tool helps you rebase your package to the latest version
# Copyright (C) 2013-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hráček <[email protected]>
# Tomáš Hozza <[email protected]>
# Nikola Forró <[email protected]>
# František Nečas <[email protected]>
<|fim▁hole|>
from rebasehelper.logger import CustomLogger
logger: CustomLogger = cast(CustomLogger, logging.getLogger(__name__))
class InputHelper:
"""Class for command line interaction with the user."""
@staticmethod
def strtobool(message):
"""Converts a user message to a corresponding truth value.
This method is a replacement for deprecated strtobool from distutils,
its behaviour remains the same.
Args:
message (str): Message to evaluate.
Returns:
bool: True on 'y', 'yes', 't', 'true', 'on' and '1'.
False on 'n', 'no', 'f', 'false', 'off' and '0'.
Raises:
ValueError: On any other value.
"""
message = message.lower()
if message in ('y', 'yes', 't', 'true', 'on', '1'):
return True
elif message in ('n', 'no', 'f', 'false', 'off', '0'):
return False
raise ValueError('No conversion to truth value for "{}"'.format(message))
@classmethod
def get_message(cls, message, default_yes=True, any_input=False):
"""Prompts a user with yes/no message and gets the response.
Args:
message (str): Prompt string.
default_yes (bool): If the default value should be YES.
any_input (bool): Whether to return default value regardless of input.
Returns:
bool: True or False, based on user's input.
"""
if default_yes:
choice = '[Y/n]'
else:
choice = '[y/N]'
if any_input:
msg = '{0} '.format(message)
else:
msg = '{0} {1}? '.format(message, choice)
while True:
user_input = input(msg).lower()
if not user_input or any_input:
return True if default_yes else False
try:
user_input = cls.strtobool(user_input)
except ValueError:
logger.error('You have to type y(es) or n(o).')
continue
if any_input:
return True
else:
return bool(user_input)<|fim▁end|>
|
import logging
from typing import cast
|
<|file_name|>Suppressions.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for<|fim▁hole|> *
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.advisor.v2017_04_19;
import com.microsoft.azure.arm.collection.SupportsCreating;
import rx.Completable;
import rx.Observable;
import com.microsoft.azure.management.advisor.v2017_04_19.implementation.SuppressionsInner;
import com.microsoft.azure.arm.model.HasInner;
/**
* Type representing Suppressions.
*/
public interface Suppressions extends SupportsCreating<SuppressionContract.DefinitionStages.Blank>, HasInner<SuppressionsInner> {
/**
* Retrieves the list of snoozed or dismissed suppressions for a subscription. The snoozed or dismissed attribute of a recommendation is referred to as a suppression.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<SuppressionContract> listAsync();
/**
* Obtains the details of a suppression.
*
* @param resourceUri The fully qualified Azure Resource Manager identifier of the resource to which the recommendation applies.
* @param recommendationId The recommendation ID.
* @param name The name of the suppression.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<SuppressionContract> getAsync(String resourceUri, String recommendationId, String name);
/**
* Enables the activation of a snoozed or dismissed recommendation. The snoozed or dismissed attribute of a recommendation is referred to as a suppression.
*
* @param resourceUri The fully qualified Azure Resource Manager identifier of the resource to which the recommendation applies.
* @param recommendationId The recommendation ID.
* @param name The name of the suppression.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Completable deleteAsync(String resourceUri, String recommendationId, String name);
}<|fim▁end|>
|
* license information.
|
<|file_name|>373.js<|end_file_name|><|fim▁begin|>require("./46.js");<|fim▁hole|>require("./93.js");
require("./186.js");
require("./372.js");
module.exports = 373;<|fim▁end|>
| |
<|file_name|>GuiAchievements.java<|end_file_name|><|fim▁begin|>// Decompiled by Jad v1.5.8g. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.kpdus.com/jad.html
// Decompiler options: packimports(3) braces deadcode fieldsfirst
package net.minecraft.src;
import java.util.List;
import java.util.Random;
import net.minecraft.client.Minecraft;
import org.lwjgl.input.Mouse;
import org.lwjgl.opengl.GL11;
// Referenced classes of package net.minecraft.src:
// GuiScreen, AchievementList, Achievement, GuiSmallButton,
// StatCollector, GuiButton, GameSettings, KeyBinding,
// FontRenderer, MathHelper, RenderEngine, Block,
// StatFileWriter, RenderItem, RenderHelper
public class GuiAchievements extends GuiScreen
{
private static final int field_27126_s;
private static final int field_27125_t;
private static final int field_27124_u;
private static final int field_27123_v;
protected int field_27121_a;
protected int field_27119_i;
protected int field_27118_j;
protected int field_27117_l;
protected double field_27116_m;
protected double field_27115_n;
protected double field_27114_o;
protected double field_27113_p;
protected double field_27112_q;
protected double field_27111_r;
private int field_27122_w;
private StatFileWriter field_27120_x;
public GuiAchievements(StatFileWriter p_i575_1_)
{
field_27121_a = 256;
field_27119_i = 202;
field_27118_j = 0;
field_27117_l = 0;
field_27122_w = 0;
field_27120_x = p_i575_1_;
char c = '\215';
char c1 = '\215';
field_27116_m = field_27114_o = field_27112_q = AchievementList.field_25195_b.field_25075_a * 24 - c / 2 - 12;
field_27115_n = field_27113_p = field_27111_r = AchievementList.field_25195_b.field_25074_b * 24 - c1 / 2;
}
public void func_6448_a()
{
field_949_e.clear();
field_949_e.add(new GuiSmallButton(1, field_951_c / 2 + 24, field_950_d / 2 + 74, 80, 20, StatCollector.func_25200_a("gui.done")));
}
protected void func_572_a(GuiButton p_572_1_)
{
if(p_572_1_.field_938_f == 1)
{
field_945_b.func_6272_a(null);
field_945_b.func_6259_e();
}
super.func_572_a(p_572_1_);
}
protected void func_580_a(char p_580_1_, int p_580_2_)
{
if(p_580_2_ == field_945_b.field_6304_y.field_1570_o.field_1370_b)
{
field_945_b.func_6272_a(null);
field_945_b.func_6259_e();
} else
{
super.func_580_a(p_580_1_, p_580_2_);
}
}
public void func_571_a(int p_571_1_, int p_571_2_, float p_571_3_)
{
if(Mouse.isButtonDown(0))
{
int i = (field_951_c - field_27121_a) / 2;
int j = (field_950_d - field_27119_i) / 2;
int k = i + 8;
int l = j + 17;
if((field_27122_w == 0 || field_27122_w == 1) && p_571_1_ >= k && p_571_1_ < k + 224 && p_571_2_ >= l && p_571_2_ < l + 155)
{
if(field_27122_w == 0)
{
field_27122_w = 1;
} else
{
field_27114_o -= p_571_1_ - field_27118_j;
field_27113_p -= p_571_2_ - field_27117_l;
field_27112_q = field_27116_m = field_27114_o;
field_27111_r = field_27115_n = field_27113_p;
}
field_27118_j = p_571_1_;
field_27117_l = p_571_2_;
}
if(field_27112_q < (double)field_27126_s)
{
field_27112_q = field_27126_s;
}
if(field_27111_r < (double)field_27125_t)
{
field_27111_r = field_27125_t;
}
if(field_27112_q >= (double)field_27124_u)
{
field_27112_q = field_27124_u - 1;
}
if(field_27111_r >= (double)field_27123_v)
{
field_27111_r = field_27123_v - 1;
}
} else
{
field_27122_w = 0;
}
func_578_i();
func_27109_b(p_571_1_, p_571_2_, p_571_3_);
GL11.glDisable(2896);
GL11.glDisable(2929);
func_27110_k();
GL11.glEnable(2896);
GL11.glEnable(2929);
}
public void func_570_g()
{
field_27116_m = field_27114_o;
field_27115_n = field_27113_p;
double d = field_27112_q - field_27114_o;
double d1 = field_27111_r - field_27113_p;
if(d * d + d1 * d1 < 4D)
{
field_27114_o += d;
field_27113_p += d1;
} else
{
field_27114_o += d * 0.84999999999999998D;
field_27113_p += d1 * 0.84999999999999998D;
}
}
protected void func_27110_k()
{
int i = (field_951_c - field_27121_a) / 2;
int j = (field_950_d - field_27119_i) / 2;
field_6451_g.func_873_b("Achievements", i + 15, j + 5, 0x404040);
}
protected void func_27109_b(int p_27109_1_, int p_27109_2_, float p_27109_3_)
{
int i = MathHelper.func_1108_b(field_27116_m + (field_27114_o - field_27116_m) * (double)p_27109_3_);
int j = MathHelper.func_1108_b(field_27115_n + (field_27113_p - field_27115_n) * (double)p_27109_3_);
if(i < field_27126_s)
{
i = field_27126_s;
}
if(j < field_27125_t)
{
j = field_27125_t;
}
if(i >= field_27124_u)
{
i = field_27124_u - 1;
}
if(j >= field_27123_v)
{
j = field_27123_v - 1;
}
int k = field_945_b.field_6315_n.func_1070_a("/terrain.png");
int l = field_945_b.field_6315_n.func_1070_a("/achievement/bg.png");
int i1 = (field_951_c - field_27121_a) / 2;
int j1 = (field_950_d - field_27119_i) / 2;
int k1 = i1 + 16;
int l1 = j1 + 17;
field_923_k = 0.0F;
GL11.glDepthFunc(518);
GL11.glPushMatrix();
GL11.glTranslatef(0.0F, 0.0F, -200F);
GL11.glEnable(3553);
GL11.glDisable(2896);
GL11.glEnable(32826);
GL11.glEnable(2903);
field_945_b.field_6315_n.func_1076_b(k);
int i2 = i + 288 >> 4;
int j2 = j + 288 >> 4;
int k2 = (i + 288) % 16;
int l2 = (j + 288) % 16;
Random random = new Random();
for(int i3 = 0; i3 * 16 - l2 < 155; i3++)
{
float f = 0.6F - ((float)(j2 + i3) / 25F) * 0.3F;
GL11.glColor4f(f, f, f, 1.0F);
for(int k3 = 0; k3 * 16 - k2 < 224; k3++)
{
random.setSeed(1234 + i2 + k3);
random.nextInt();
int j4 = random.nextInt(1 + j2 + i3) + (j2 + i3) / 2;
int l4 = Block.field_393_F.field_378_bb;
if(j4 > 37 || j2 + i3 == 35)
{
l4 = Block.field_403_A.field_378_bb;
} else
if(j4 == 22)
{
if(random.nextInt(2) == 0)
{
l4 = Block.field_391_ax.field_378_bb;
} else
{
l4 = Block.field_433_aO.field_378_bb;
}
} else
if(j4 == 10)
{
l4 = Block.field_388_I.field_378_bb;<|fim▁hole|> if(j4 == 8)
{
l4 = Block.field_386_J.field_378_bb;
} else
if(j4 > 4)
{
l4 = Block.field_338_u.field_378_bb;
} else
if(j4 > 0)
{
l4 = Block.field_336_w.field_378_bb;
}
func_550_b((k1 + k3 * 16) - k2, (l1 + i3 * 16) - l2, l4 % 16 << 4, (l4 >> 4) << 4, 16, 16);
}
}
GL11.glEnable(2929);
GL11.glDepthFunc(515);
GL11.glDisable(3553);
for(int j3 = 0; j3 < AchievementList.field_27388_e.size(); j3++)
{
Achievement achievement1 = (Achievement)AchievementList.field_27388_e.get(j3);
if(achievement1.field_25076_c == null)
{
continue;
}
int l3 = (achievement1.field_25075_a * 24 - i) + 11 + k1;
int k4 = (achievement1.field_25074_b * 24 - j) + 11 + l1;
int i5 = (achievement1.field_25076_c.field_25075_a * 24 - i) + 11 + k1;
int l5 = (achievement1.field_25076_c.field_25074_b * 24 - j) + 11 + l1;
boolean flag = field_27120_x.func_27183_a(achievement1);
boolean flag1 = field_27120_x.func_27181_b(achievement1);
char c = Math.sin(((double)(System.currentTimeMillis() % 600L) / 600D) * 3.1415926535897931D * 2D) <= 0.59999999999999998D ? '\202' : '\377';
int i8 = 0xff000000;
if(flag)
{
i8 = 0xff707070;
} else
if(flag1)
{
i8 = 65280 + (c << 24);
}
func_27100_a(l3, i5, k4, i8);
func_27099_b(i5, k4, l5, i8);
}
Achievement achievement = null;
RenderItem renderitem = new RenderItem();
RenderHelper.func_41089_c();
GL11.glDisable(2896);
GL11.glEnable(32826);
GL11.glEnable(2903);
for(int i4 = 0; i4 < AchievementList.field_27388_e.size(); i4++)
{
Achievement achievement2 = (Achievement)AchievementList.field_27388_e.get(i4);
int j5 = achievement2.field_25075_a * 24 - i;
int i6 = achievement2.field_25074_b * 24 - j;
if(j5 < -24 || i6 < -24 || j5 > 224 || i6 > 155)
{
continue;
}
if(field_27120_x.func_27183_a(achievement2))
{
float f1 = 1.0F;
GL11.glColor4f(f1, f1, f1, 1.0F);
} else
if(field_27120_x.func_27181_b(achievement2))
{
float f2 = Math.sin(((double)(System.currentTimeMillis() % 600L) / 600D) * 3.1415926535897931D * 2D) >= 0.59999999999999998D ? 0.8F : 0.6F;
GL11.glColor4f(f2, f2, f2, 1.0F);
} else
{
float f3 = 0.3F;
GL11.glColor4f(f3, f3, f3, 1.0F);
}
field_945_b.field_6315_n.func_1076_b(l);
int k6 = k1 + j5;
int j7 = l1 + i6;
if(achievement2.func_27093_f())
{
func_550_b(k6 - 2, j7 - 2, 26, 202, 26, 26);
} else
{
func_550_b(k6 - 2, j7 - 2, 0, 202, 26, 26);
}
if(!field_27120_x.func_27181_b(achievement2))
{
float f4 = 0.1F;
GL11.glColor4f(f4, f4, f4, 1.0F);
renderitem.field_27004_a = false;
}
GL11.glEnable(2896);
GL11.glEnable(2884);
renderitem.func_161_a(field_945_b.field_6314_o, field_945_b.field_6315_n, achievement2.field_27097_d, k6 + 3, j7 + 3);
GL11.glDisable(2896);
if(!field_27120_x.func_27181_b(achievement2))
{
renderitem.field_27004_a = true;
}
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
if(p_27109_1_ >= k1 && p_27109_2_ >= l1 && p_27109_1_ < k1 + 224 && p_27109_2_ < l1 + 155 && p_27109_1_ >= k6 && p_27109_1_ <= k6 + 22 && p_27109_2_ >= j7 && p_27109_2_ <= j7 + 22)
{
achievement = achievement2;
}
}
GL11.glDisable(2929);
GL11.glEnable(3042);
GL11.glColor4f(1.0F, 1.0F, 1.0F, 1.0F);
field_945_b.field_6315_n.func_1076_b(l);
func_550_b(i1, j1, 0, 0, field_27121_a, field_27119_i);
GL11.glPopMatrix();
field_923_k = 0.0F;
GL11.glDepthFunc(515);
GL11.glDisable(2929);
GL11.glEnable(3553);
super.func_571_a(p_27109_1_, p_27109_2_, p_27109_3_);
if(achievement != null)
{
String s = StatCollector.func_25200_a(achievement.func_44020_i());
String s1 = achievement.func_27090_e();
int k5 = p_27109_1_ + 12;
int j6 = p_27109_2_ - 4;
if(field_27120_x.func_27181_b(achievement))
{
int l6 = Math.max(field_6451_g.func_871_a(s), 120);
int k7 = field_6451_g.func_27277_a(s1, l6);
if(field_27120_x.func_27183_a(achievement))
{
k7 += 12;
}
func_549_a(k5 - 3, j6 - 3, k5 + l6 + 3, j6 + k7 + 3 + 12, 0xc0000000, 0xc0000000);
field_6451_g.func_27278_a(s1, k5, j6 + 12, l6, 0xffa0a0a0);
if(field_27120_x.func_27183_a(achievement))
{
field_6451_g.func_50103_a(StatCollector.func_25200_a("achievement.taken"), k5, j6 + k7 + 4, 0xff9090ff);
}
} else
{
int i7 = Math.max(field_6451_g.func_871_a(s), 120);
String s2 = StatCollector.func_25199_a("achievement.requires", new Object[] {
StatCollector.func_25200_a(achievement.field_25076_c.func_44020_i())
});
int l7 = field_6451_g.func_27277_a(s2, i7);
func_549_a(k5 - 3, j6 - 3, k5 + i7 + 3, j6 + l7 + 12 + 3, 0xc0000000, 0xc0000000);
field_6451_g.func_27278_a(s2, k5, j6 + 12, i7, 0xff705050);
}
field_6451_g.func_50103_a(s, k5, j6, field_27120_x.func_27181_b(achievement) ? achievement.func_27093_f() ? -128 : -1 : achievement.func_27093_f() ? 0xff808040 : 0xff808080);
}
GL11.glEnable(2929);
GL11.glEnable(2896);
RenderHelper.func_1159_a();
}
public boolean func_6450_b()
{
return true;
}
static
{
field_27126_s = AchievementList.field_27392_a * 24 - 112;
field_27125_t = AchievementList.field_27391_b * 24 - 112;
field_27124_u = AchievementList.field_27390_c * 24 - 77;
field_27123_v = AchievementList.field_27389_d * 24 - 77;
}
}<|fim▁end|>
|
} else
|
<|file_name|>01nn_otto.py<|end_file_name|><|fim▁begin|>__author__ = 'alexs'
import theano.tensor as T
import theano
import numpy as np
import cPickle
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
import numpy as np
import random
import json
def getReferenceLabels():
referenceLabels = dict()
for i in range(0, 9):
reference_out = [0.0 for x in range(0, 9)]
reference_out[i] = 0.99
referenceLabels[i] = reference_out
return referenceLabels
def compare(result_label, given_label, reference_labels):
givenKey = 0
resultedKey = 0
refGivenScore = 1000
refResultedScore = 1000
for key in reference_labels.keys():
score1 = np.sum(np.abs(np.array(given_label) - np.array(reference_labels[key])))
score2 = np.sum(np.abs(result_label - np.array(reference_labels[key])))
if score1 < refGivenScore:
refGivenScore = score1
givenKey = key
if score2 < refResultedScore:
refResultedScore = score2
resultedKey = key
if resultedKey == givenKey:
return True
return False
def makeW(rows, columns, start=-2, end=2):
w = np.random.uniform(start, end, (rows, columns))
return w
def updates_weights_function(weights, memories, cost_function, learning_rate=0.01, momentum_learning_rate=0.005):
gradients = T.grad(cost_function, weights) # keep in mind len(gradients) == len(weights)
update_lists = []
for i in range(0, len(weights)):
weight = weights[i]
gradient = gradients[i]
memory = memories[i]
change = learning_rate * gradient + momentum_learning_rate * memory
new_val = weight - change
update_lists.append((weight, new_val))
update_lists.append((memory, change))
return update_lists
class NN():
def __init__(self):
self.layers = []
self.weights = []
self.weights_memory = []
self.cost = None
self.train = None
self.updates = None
self.activate = None
self.activatwe = None
self.output = None
def build(self, givenWeights=None):
# first: init or build the in-between weight matrixes
for i in range(0, len(self.layers) - 1):
n = self.layers[i].size
m = self.layers[i + 1].size
if givenWeights:
w_values = givenWeights[i]
else:
w_values = makeW(n, m)
w_memory_values = np.zeros((n, m))
w = theano.shared(value=w_values, name="w_" + str(i) + "_" + str(i + 1))
w_memory = theano.shared(value=w_memory_values, name="w_memory_" + str(i) + "_" + str(i + 1))
self.weights.append(w)
self.weights_memory.append(w_memory)
# now build the model
inputVector = T.matrix("inputVector")
labels = T.matrix("labels")
out = None
net = None
workingV = inputVector
l2 = 0.0
l1 = 0.0
for i in range(0, len(self.weights)):
w = self.weights[i]
l2 += T.sum(w * w)
l1 += T.sum(T.abs_(w))
out = T.dot(workingV, w)
net = T.maximum(0, out)
workingV = net
self.cost = T.mean(T.pow(labels - out, 2)) + 0.005 * l2 + 0.005 * l1
self.output = net
self.updates = updates_weights_function(self.weights, self.weights_memory, self.cost)
self.train = theano.function([inputVector, labels], outputs=self.cost, updates=self.updates)
self.activate = theano.function([inputVector, labels], outputs=self.cost)
self.activatwe = theano.function([inputVector], outputs=self.output)
def addLayer(self, layer):
self.layers.append(layer)
def snapshotWeigths(self, experimentId):
with open(str(experimentId) + ".dat", "w") as f:
for w in self.weights:
numeric_value = w.get_value().tolist()
f.write(json.dumps(numeric_value) + "\n")
def resume(self, experimentId="default"):
ww = []
with open(str(experimentId) + ".dat", "r") as f:
for line in f.readlines():
w = np.array(json.loads(line))
ww.append(w)
self.build(ww)
def trainData(self, train_set_input, train_set_labels,
valid_set_input, valid_set_labels,
test_set_input, test_set_labels,
nrOfEpochs=10000, batch_size=1000, experimentId="default"):
reference_labels = getReferenceLabels()
for ep in range(0, nrOfEpochs):
# random.shuffle(train_data)
overallError = 0.0
for j in range(0, len(train_set_input), batch_size):
endInterval = j + batch_size
if j + batch_size > len(train_set_input):
endInterval = len(train_set_input) - 1
i = train_set_input[j:endInterval]
r = train_set_labels[j:endInterval]
self.train(i, r)
for j in range(0, len(train_set_input), batch_size):
endInterval = j + batch_size
if j + batch_size > len(train_set_input):
endInterval = len(train_set_input) - 1
i = train_set_input[j:endInterval]
r = train_set_labels[j:endInterval]
overallError += self.activate(i, r)
posItems = 0.0
failedItems = 0.0
for valid_in, given_label in zip(valid_set_input, valid_set_labels):
result_label = self.activatwe([valid_in])
ok = compare(result_label, given_label, reference_labels)
if ok:
posItems += 1.0
else:
failedItems += 1.0
precision = posItems / (posItems + failedItems)
print(
"[{epoch}] error: {error} precision: {precision}".format(epoch=ep, error=overallError,
precision=precision))
# running tests
self.snapshotWeigths(experimentId)
if test_set_input and test_set_labels:
print("=================== TESTS ==================")
posItems = 0.0
failedItems = 0.0
for valid_in, given_label in zip(test_set_input, test_set_labels):
result_label = self.activatwe([valid_in])
ok = compare(result_label, given_label, reference_labels)
if ok:
posItems += 1.0
else:
failedItems += 1.0
precision = posItems / (posItems + failedItems)
print("Accuracy on {nrOfTests} tests is {precision}".format(nrOfTests=str(len(test_set_input)),
precision=str(precision)))
print("============================================")<|fim▁hole|> self.size = size
class SigmoidLayer(Layer):
def __init__(self, size):
self.size = size
class StandardOutputWithSigmoid(Layer):
def __init__(self, size):
self.size = size
class InverseOutputLayerWithSigmoid(Layer):
def __init__(self, size):
self.size = size
def transformInput(inputList):
res = []
for input in inputList:
res.append(np.array(input, dtype="float32"))
return res
def transformOutput(outputList, size):
res = []
for out in outputList:
reference_out = [0.1 for x in range(0, size)]
reference_out[out] = 0.88
res.append(np.array(reference_out, dtype="float32"))
return res
def retrieve_training_set():
all_collections = []
df = pd.read_csv("/Users/alexs/work_phd/otto_group_challenge/train.csv")
X = df.values.copy()
np.random.shuffle(X)
X, labels = X[:, 1:-1].astype(np.float32), X[:, -1]
print labels
encoder = LabelEncoder()
encoded_labels = encoder.fit_transform(labels).astype(np.int32)
all_labels = []
scaler = StandardScaler()
Z = scaler.fit_transform(X)
for encoded_label in encoded_labels:
l = [0.0 for x in range(0, 9)]
l[encoded_label] = 0.99
all_labels.append(l)
return [Z, all_labels]
def retrieveTrainValidationSet(train_set, percentage=20.0):
train = train_set[0]
label = train_set[1]
all = []
for i in range(0, len(train)):
all.append((train[i], label[i]))
random.shuffle(all)
offset = int(len(train) * (percentage / 100.0))
validation_final = []
validation_input = []
validation_label = []
for i in range(0, offset):
(vi, vl) = all.pop(0)
validation_input.append(vi)
validation_label.append(vl)
validation_final.append(validation_input)
validation_final.append(validation_label)
training_final = []
training_in = []
training_label = []
for (ti, tl) in all:
training_in.append(ti)
training_label.append(tl)
training_final.append(training_in)
training_final.append(training_label)
return training_final, validation_final
def retrieve_test_set():
df = pd.read_csv("/Users/alexs/work_phd/otto_group_challenge/test.csv")
X = df.values.copy()
np.random.shuffle(X)
X = X[:, 1:].astype(np.float32)
scaler = StandardScaler()
Z = scaler.fit_transform(X)
return Z
def getClosest(out, reference):
refGivenScore = 1000
givenKey = 0
for key in reference.keys():
score1 = np.sum(np.abs(np.array(out) - np.array(reference[key])))
if score1 < refGivenScore:
refGivenScore = score1
givenKey = key
# p = [0 for i in range(0,9)]
# p[key]=1
cleaned_p = []
for p in reference[givenKey]:
if p < 0.4:
cleaned_p.append(0)
elif p > 0.8:
cleaned_p.append(0.95)
else:
cleaned_p.append(p)
return [str(p) for p in cleaned_p]
def main():
nn = NN()
nn.addLayer(SigmoidLayer(93))
nn.addLayer(SigmoidLayer(30))
nn.addLayer(InverseOutputLayerWithSigmoid(9))
nn.build()
# nn.resume()
original_data = retrieve_training_set()
test_data = retrieve_test_set()
batch_size = 2000
for i in range(0, 100):
print("BIG ITERATION: " + str(i))
training_set, validation_set = retrieveTrainValidationSet(original_data, percentage=30)
train_set_input, train_set_labels = training_set[0], training_set[1]
valid_set_input, valid_set_labels = validation_set[0], validation_set[1]
nn.trainData(train_set_input, train_set_labels,
valid_set_input, valid_set_labels,
None, None,
nrOfEpochs=10, batch_size=batch_size)
batch_size = batch_size - 50
if (batch_size < 100):
batch_size = 100
print("RUNNING THE TESTS")
referenceLabels = getReferenceLabels()
with open("submission.dat", "w") as w:
w.write("id,Class_1,Class_2,Class_3,Class_4,Class_5,Class_6,Class_7,Class_8,Class_9\n")
counter = 1
for test in test_data:
resultedLabel = nn.activatwe([test])
out = getClosest(resultedLabel, referenceLabels)
#w.write(str(counter) + "," + ",".join(out) + "\n")
a = [str(p) for p in resultedLabel[0]]
w.write(str(counter) + "," + ",".join(a) + "\n")
counter += 1
if __name__ == '__main__':
main()<|fim▁end|>
|
class Layer():
def __init__(self, size):
|
<|file_name|>util.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2013 Google Inc. All rights reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Utilities for asynchronous operations.
*/
goog.provide('e2e.async.util');
goog.require('e2e.async.Result');
goog.require('goog.async.Deferred');
/**
* Wraps a function within a port.
* @param {function(...*):*} callback The callback to use.
* @return {!MessagePort} The port that wraps the callback.
*/
e2e.async.util.wrapFunction = function(callback) {
var mc = new MessageChannel();
mc.port1.onmessage = function(event) {
var args = [];
for (var i = 0; i < event.data.arguments.length; i++) {
var arg = event.data.arguments[i];
if (goog.isObject(arg) && typeof arg.__port__ === 'number') {
args.push(e2e.async.util.unwrapFunction(event.ports[arg.__port__]));
} else {
args.push(arg);
}
}
try {
var returnValue = callback.apply(null, args);
if (goog.async.Deferred && returnValue instanceof goog.async.Deferred) {
returnValue.addCallback(function(ret) {
e2e.async.util.return_(event.target, ret, '');
}).addErrback(function(err) {
e2e.async.util.return_(event.target, undefined, String(err));
});
} else {
e2e.async.util.return_(event.target, returnValue, '');
}
} catch (e) {
if (e instanceof Error) {
e2e.async.util.return_(event.target, undefined, String(e.message));
} else {
e2e.async.util.return_(event.target, undefined, 'Unknown error');
}
}
};
return mc.port2;
};
/**
* Sends a return message to the port.
* @param {MessagePort} port The port to respond to.
* @param {*} returnValue The return value of the function.
* @param {string} error The error to send.
* @private
*/
e2e.async.util.return_ = function(port, returnValue, error) {
port.postMessage({<|fim▁hole|>
/**
* Unwraps a function from a port.
* @param {MessagePort} port The port that is wrapping the function.
* @return {function(...*):!e2e.async.Result} A function that calls the wrapped
* function and returns a deferred result object.
*/
e2e.async.util.unwrapFunction = function(port) {
return function() {
var result = new e2e.async.Result();
port.onmessage = function(event) {
if (event.data.error) {
result.errback(event.data.error);
} else {
result.callback(event.data.returnValue);
}
};
var args = [];
var ports = [];
for (var i = 0; i < arguments.length; i++) {
if (typeof arguments[i] == 'function') {
var wrappedPort = e2e.async.util.wrapFunction(arguments[i]);
ports.push(wrappedPort);
args.push({
'__port__': ports.length - 1
});
} else {
args.push(arguments[i]);
}
}
port.postMessage({
'arguments': args
}, ports);
return result;
};
};<|fim▁end|>
|
'returnValue': returnValue,
'error': error
});
};
|
<|file_name|>doc2vector.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
import sys
sys.path.append("./")
import pandas as pd
import gensim
from utility.mongodb import MongoDBManager
from utility.sentence import segment, sent2vec
class Doc2Vector(object):
"""
文本转向量
"""
def __init__(self):
"""
:param keep_val: 设定的阈值
"""
self.mongo_db = MongoDBManager()
def doc2vect(self):
"""
所有文档转成向量存储到数据库
:return:
"""
model = gensim.models.Doc2Vec.load('./models/doc2vec_v1.model')
df_data = pd.read_excel("./data/new_prd.xlsx", names=["SysNo", "Title", "Content"])
content = []
title = []
for idx, row in df_data.iterrows():
seg_title = segment(row.Title)
seg_content = segment(row.Content)
# 转向量
content_vect = sent2vec(model, ' '.join(seg_content))
title_vect = sent2vec(model, ' '.join(seg_title))
content_vect = map(str, content_vect.tolist())
title_vect = map(str, title_vect.tolist())
content.append({"_id": int(idx) + 1, "data": list(content_vect)})
title.append({"_id": int(idx) + 1, "data": list(title_vect)})
self.mongo_db.insert("content_vector", content)<|fim▁hole|>
print("finished")
if __name__ == '__main__':
doc2vect = Doc2Vector()
doc2vect.doc2vect()<|fim▁end|>
|
self.mongo_db.insert("title_vector", title)
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>export { default } from './GridListTile';<|fim▁hole|>export * from './GridListTile';<|fim▁end|>
| |
<|file_name|>blog.js<|end_file_name|><|fim▁begin|>const express = require('express');
const feedr = require('feedr').create();
const router = express.Router();
router.get('/', (req, res) => {
feedr.readFeed('https://blog.schul-cloud.org/rss', {
requestOptions: { timeout: 2000 },
}, (err, data) => {
let blogFeed;
try {
blogFeed = data.rss.channel[0].item
.filter((item) => (item['media:content'] || []).length && (item.link || []).length)
.slice(0, 3)
.map((e) => {<|fim▁hole|> const locale = 'en-us';
const month = date.toLocaleString(locale, { month: 'long' });
e.pubDate = `${date.getDate()}. ${month}`;
e.description = e.description.join(' ');
e.url = e.link[0];
e.img = {
src: e['media:content'][0].$.url,
alt: e.title,
};
return e;
});
} catch (e) {
blogFeed = [];
}
res.send({
blogFeed,
});
});
});
module.exports = router;<|fim▁end|>
|
const date = new Date(e.pubDate);
|
<|file_name|>kill.rs<|end_file_name|><|fim▁begin|>use log::{error, info, warn};
use pueue_lib::network::message::{Signal, TaskSelection};
use pueue_lib::state::GroupStatus;
use pueue_lib::task::TaskStatus;
use crate::ok_or_shutdown;
use crate::platform::process_helper::*;
use crate::state_helper::save_state;
use crate::task_handler::{Shutdown, TaskHandler};
impl TaskHandler {
/// Kill specific tasks or groups.
///
/// By default, this kills tasks with Rust's subprocess handling "kill" logic.
/// However, the user can decide to send unix signals to the processes as well.
///
/// `kill_children` Kill all direct child processes as well
/// `pause_groups` If `group` or `all` is given, the groups should be paused under some
/// circumstances. This is mostly to prevent any further task execution during an emergency
/// `signal` Don't kill the task as usual, but rather send a unix process signal.
pub fn kill(
&mut self,
tasks: TaskSelection,
kill_children: bool,
pause_groups: bool,
signal: Option<Signal>,
) {
let cloned_state_mutex = self.state.clone();
let mut state = cloned_state_mutex.lock().unwrap();
// Get the keys of all tasks that should be resumed
let task_ids = match tasks {
TaskSelection::TaskIds(task_ids) => task_ids,
TaskSelection::Group(group_name) => {
// Ensure that a given group exists. (Might not happen due to concurrency)
let group = match state.groups.get_mut(&group_name) {
Some(group) => group,
None => return,
};
// Pause this specific group.
if pause_groups {<|fim▁hole|> let (matching, _) = state.filter_tasks_of_group(
|task| matches!(task.status, TaskStatus::Running | TaskStatus::Paused),
&group_name,
);
matching
}
TaskSelection::All => {
// Pause all running tasks
if pause_groups {
state.set_status_for_all_groups(GroupStatus::Paused);
}
info!("Killing all running tasks");
self.children.all_task_ids()
}
};
for task_id in task_ids {
if let Some(signal) = signal.clone() {
self.send_internal_signal(task_id, signal, kill_children);
} else {
self.kill_task(task_id, kill_children);
}
}
ok_or_shutdown!(self, save_state(&state));
}
/// Send a signal to a specific child process.
/// This is a wrapper around [send_internal_signal_to_child], which does a little bit of
/// additional error handling.
pub fn send_internal_signal(&mut self, task_id: usize, signal: Signal, send_to_children: bool) {
let child = match self.children.get_child_mut(task_id) {
Some(child) => child,
None => {
warn!("Tried to kill non-existing child: {task_id}");
return;
}
};
if let Err(err) = send_internal_signal_to_child(child, signal, send_to_children) {
warn!("Failed to send signal to task {task_id} with error: {err}");
};
}
/// Kill a specific task and handle it accordingly.
/// Triggered on `reset` and `kill`.
pub fn kill_task(&mut self, task_id: usize, kill_children: bool) {
if let Some(child) = self.children.get_child_mut(task_id) {
kill_child(task_id, child, kill_children);
} else {
warn!("Tried to kill non-existing child: {task_id}");
}
}
}<|fim▁end|>
|
group.status = GroupStatus::Paused;
}
info!("Killing tasks of group {group_name}");
|
<|file_name|>template.rs<|end_file_name|><|fim▁begin|>use handlebars::Handlebars;
use std::path::{Path, PathBuf};
use glob::glob;
use std::sync::Mutex;
use itertools::Itertools;
use std::error::Error;
use std::borrow::Cow;
use serde_json::{Value, to_value};
use rocket::response::{Responder, Response};
use rocket::request::Request;
use rocket::http::ContentType;
use rocket::http::Status;
use serde::ser::Serialize;
use std::io::Cursor;
#[derive(Debug)]
pub struct Template {
name: Cow<'static, str>,
value: Option<Value>,
}
lazy_static! {
static ref HANDLEBARS: Mutex<Handlebars> = Mutex::new(Handlebars::new());
}
pub fn init_handlebars(f: fn(&mut Handlebars)) {
let mut hb = HANDLEBARS.lock().unwrap();
f(&mut hb)
}
pub fn add_templates<P>(root: P) -> Result<(), Box<Error>>
where P: Into<PathBuf>
{
let mut hb = HANDLEBARS.lock().unwrap();
let root_buf = root.into();
let mut mask_buf = root_buf.clone();
mask_buf.push("**");
mask_buf.push("*.hbs");
let mask = mask_buf.to_str().ok_or("read error")?;
let add_template = &mut |entry: &Path| -> Result<(), Box<Error>> {
let stripped = entry.strip_prefix(&root_buf)?.with_extension(""); // strip prefix and .hbs
//let ext = stripped.extension().ok_or("no type extension")?; // skip if no .html or smth else
let name: String = stripped
.with_extension("")
.to_str()
.ok_or("can't convert path to string")?
.chars()
.filter_map(|c| Some(if c == '\\' { '/' } else { c }))
.collect();
println!("{}", &name);
if let Err(e) = hb.register_template_file(&name, &entry) {
// TODO: make correct error loagging
println!("{} {}", &name, &e);
error!("Error in Handlebars template {}", &name);
info!("{}", e);
info!("Template path: '{}'", entry.to_string_lossy());
}
Ok(())
};
glob(mask)
.unwrap()
.filter_map(Result::ok)
.foreach(|entry| { let _ = add_template(&entry); });
Result::Ok(())
}
impl Template {
pub fn render<S, C>(name: S, context: C) -> Template
where S: Into<Cow<'static, str>>,
C: Serialize
{
Template {
name: name.into(),
value: to_value(context).ok(),
}
}
}
impl Responder<'static> for Template {
fn respond_to(self, _: &Request) -> Result<Response<'static>, Status> {
let hb = HANDLEBARS.lock().unwrap();
let render = hb.render(&self.name, &self.value).unwrap_or_else(|e| e.to_string());
Response::build()
.header(ContentType::HTML)
.sized_body(Cursor::new(render))
.ok()
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>treeUtils.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
<|fim▁hole|>
export function isEqualOrParent(tree: _.ITree, element: any, candidateParent: any): boolean {
const nav = tree.getNavigator(element);
do {
if (element === candidateParent) {
return true;
}
} while (element = nav.parent());
return false;
}<|fim▁end|>
|
import * as _ from 'vs/base/parts/tree/browser/tree';
|
<|file_name|>config.in.rs<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2015, Alex Frappier Lachapelle
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
use std::collections::BTreeMap;
use std::error::Error;
use std::fs::{File, OpenOptions};
use std::io::{Read, Write};
use std::path::PathBuf;
extern crate term;
use serde_json::Value;
////////////////////////////////////////////////////////////
// Macros //
////////////////////////////////////////////////////////////
include!("utils_macros.rs");
////////////////////////////////////////////////////////////
// Structs //
////////////////////////////////////////////////////////////
#[derive(Clone)]
pub struct TrelloBSTConfig {
pub key_val_map: BTreeMap<String, String>,
pub config_mode: Option<PathBuf>
}
////////////////////////////////////////////////////////////
// Impls //
////////////////////////////////////////////////////////////
impl TrelloBSTConfig {
pub fn new() -> TrelloBSTConfig {
TrelloBSTConfig {
key_val_map: BTreeMap::new(),
config_mode: Option::None
}
}
pub fn load(&mut self, config_mode: Option<PathBuf>) -> Result<(), &'static str> {
self.config_mode = config_mode;
//Parse if we're using a config file, silently skip if were not
if self.config_mode.is_some() {
//Load file
let mut file = match File::open(self.clone().config_mode.unwrap().as_path()) {
Ok(file) => file,
Err(_) =>{
self.config_mode = Option::None;
return Err("Error: Failed to open the configuration file for parsing, TrelloBST will continue without saving inputted values into the configuration file.");
}
};
//Get config file metadata.
let metadata = match file.metadata() {
Ok(metadata) => metadata,
Err(_) => {
self.config_mode = Option::None;
return Err("Error: Failed to gather metadata of the configuration file, TrelloBST will continue without saving inputted values into the configuration file.")
}
};
//Parse config file
let file_length: usize = metadata.len() as usize;
if file_length == 0 {
self.key_val_map = BTreeMap::new();
} else {
//Read file
let mut file_data: String = String::with_capacity(file_length + 1);
match file.read_to_string(&mut file_data) {
Ok(_) => (),
Err(_) => {
self.config_mode = Option::None;
return Err("Error: Failed to read the configuration file, TrelloBST will continue without saving inputted values into the configuration file.")
}
}
//Parse
let json_data: Value = match serde_json::from_str(&file_data){
Ok(json_data) => json_data,
Err(_) => {
self.config_mode = Option::None;
return Err("Error: Failed to parse the JSON data in the configuration file, TrelloBST will continue without saving inputted values into the configuration file.")
}
};
//Extract data
//Get JSON object
let json_object = match json_data.as_object().ok_or("Error: JSON data in the configuration file does not describe a JSON object, TrelloBST will continue without saving inputted values into the configuration file.") {
Ok(object) => object.clone(),
Err(err) => {
self.config_mode = Option::None;
return Err(err);
}
};
//Iterate through object
for (key, val) in &json_object {
if val.is_string() {
self.key_val_map.insert(key.clone(), val.as_str().unwrap().to_string());
} else {
println!("Value of the \"{}\" field in the configuration file is not a string, this value will not be considered.", key);
}
}
}
}
Ok(())
}
//Save config
pub fn save(&mut self) -> Result<(), String> {
if self.config_mode.is_some() {
<|fim▁hole|> let mut json_map: BTreeMap<String, Value> = BTreeMap::new();
for (key, val) in &self.key_val_map {
json_map.insert(key.clone(), Value::String(val.clone()));
}
let value = Value::Object(json_map);
let json_map_string = match serde_json::to_string(&value) {
Ok(map) => map,
Err(err) => {
return Err(err.description().to_string());
}
};
//Open file, overwrite config with what we have
let mut file: File;
match OpenOptions::new().write(true).truncate(true).open(self.config_mode.clone().unwrap().as_path()) {
Ok(_file) => {
file = _file;
match file.write_all(json_map_string.as_bytes()) {
Ok(()) => (),
Err(_) => {
self.config_mode = Option::None;
return Err("Error: Failed to write data to the configuration file, TrelloBST will continue without saving inputted values into the configuration file.".to_string());
}
}
}
Err(_) => {
self.config_mode = Option::None;
return Err("Error: Failed to open the configuration file for saving, TrelloBST will continue without saving inputted values into the configuration file.".to_string());
}
}
}
Ok(())
}
//Sets a config key-value pair
pub fn set(&mut self, key: &str, val: &str) {
self.key_val_map.insert(key.to_string(), val.to_string());
}
//Gets a config value for a key, returns "" if key doesnt exist and creates the key
pub fn get(&mut self, key: &str) -> String {
if self.key_val_map.contains_key(&key.to_string()) {
return self.key_val_map.get(&key.to_string()).unwrap().clone();
} else {
self.key_val_map.insert(key.to_string(), String::new());
return String::new();
}
}
}<|fim▁end|>
| |
<|file_name|>gat-in-trait-path.rs<|end_file_name|><|fim▁begin|>#![feature(generic_associated_types)]
#![feature(associated_type_defaults)]
trait Foo {
type A<'a> where Self: 'a;
}
struct Fooy;
impl Foo for Fooy {
type A<'a> = &'a ();
}
#[derive(Clone)]
struct Fooer<T>(T);<|fim▁hole|> type A<'x> where T: 'x = &'x ();
}
fn f(_arg : Box<dyn for<'a> Foo<A<'a> = &'a ()>>) {}
//~^ the trait `Foo` cannot be made into an object
fn main() {
let foo = Fooer(5);
f(Box::new(foo));
}<|fim▁end|>
|
impl<T> Foo for Fooer<T> {
|
<|file_name|>DataTx.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
<|fim▁hole|>====================
DataTx
====================
The DataTx packetises the data. It adds packet header to the
How it works?
---------------------
The DataTx adds a header to the data received on its inboxes "keyin" and
"inbox". The packet header contains packet type and packet length.
It is necessary to distinguish between encrypted data to be sent and
encrypted session keys because the client needs to be able to
distinguish between the two.
"""
import Axon
import struct
class DataTx(Axon.Component.component):
"""\ DataTx() -> new DataTx component
Handles packetizing
Keyword arguments: None
"""
Inboxes = {"inbox" : "encrypted data",
"keyIn" : "encrypted session key",
"control" : "receive shutdown messages"}
Outboxes = {"outbox" : "add header and send encrypted key and data packets",
"signal" : "pass shutdown messages"}
def __init__(self):
super(DataTx,self).__init__()
def main(self):
KEY = 0x20
DATA = 0x30
while 1:
#add header - packet type=4 bytes and packet length = 4 bytes
while self.dataReady("keyIn"):
data = self.recv("keyIn")
header = struct.pack("!2L", KEY, len(data))
packet = header + data
self.send(packet, "outbox")
yield 1
if self.dataReady("inbox"):
data = self.recv("inbox")
header = struct.pack("!2L", DATA, len(data))
packet = header + data
self.send(packet, "outbox")
yield 1<|fim▁end|>
|
#
"""
|
<|file_name|>health_test.go<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package integrationtest_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/apache/servicecomb-service-center/integration"
"github.com/widuu/gojson"
"io/ioutil"
"net/http"
"testing"
)
var _ = Describe("Basic Api Test", func() {
Context("Testing Basic Health Functions", func() {
By("Call Health API", func() {
It("health test", func() {
req, _ := http.NewRequest(GET, SCURL+HEALTH, nil)
req.Header.Set("X-Domain-Name", "default")
resp, err := scclient.Do(req)
Expect(err).To(BeNil())<|fim▁hole|>
By("Call Version API", func() {
It("version test", func() {
req, _ := http.NewRequest(GET, SCURL+VERSION, nil)
req.Header.Set("X-Domain-Name", "default")
resp, err := scclient.Do(req)
Expect(err).To(BeNil())
defer resp.Body.Close()
Expect(resp.StatusCode).To(Equal(http.StatusOK))
respbody, _ := ioutil.ReadAll(resp.Body)
Expect(gojson.Json(string(respbody)).Get("apiVersion").Tostring()).To(Equal("4.0.0"))
})
})
})
})
func BenchmarkHealthTest(b *testing.B) {
for i := 0; i < b.N; i++ {
req, _ := http.NewRequest(GET, SCURL+HEALTH, nil)
req.Header.Set("X-Domain-Name", "default")
resp, err := scclient.Do(req)
Expect(err).To(BeNil())
defer resp.Body.Close()
Expect(resp.StatusCode).To(Equal(http.StatusOK))
}
}
func BenchmarkVersionTest(b *testing.B) {
for i := 0; i < b.N; i++ {
req, _ := http.NewRequest(GET, SCURL+VERSION, nil)
req.Header.Set("X-Domain-Name", "default")
resp, err := scclient.Do(req)
Expect(err).To(BeNil())
defer resp.Body.Close()
Expect(resp.StatusCode).To(Equal(http.StatusOK))
}
}<|fim▁end|>
|
defer resp.Body.Close()
Expect(resp.StatusCode).To(Equal(http.StatusOK))
})
})
|
<|file_name|>repeated_letters.py<|end_file_name|><|fim▁begin|>from pprint import pprint
from Conundrum.utils import sanitize
def decrypt(msg: str, repeated_letter: str) -> str:
"""
Extract every letter after an occurrence of the repeated letter
"""
msg = sanitize(msg)
result = []
remove_next = False
for letter in msg:
take_this = remove_next
remove_next = letter == repeated_letter
if take_this:
result += letter
return ''.join(result)
def decrypt_try_all(msg: str) -> [str]:
msg = sanitize(msg)
letters_to_try = sorted({letter for letter in msg})
return {letter: decrypt(msg, letter) for letter in letters_to_try}
<|fim▁hole|> pprint(decrypt_try_all(encrypted_msg))<|fim▁end|>
|
if __name__ == '__main__':
# Used in Movies 4
encrypted_msg = 'i bet pews or leisure chains can seem to stink of effort, george, under no illusions of vanity'
|
<|file_name|>winprocess.py<|end_file_name|><|fim▁begin|># A module to expose various thread/process/job related structures and
# methods from kernel32
#
# The MIT License
#
# Copyright (c) 2006 the Mozilla Foundation <http://www.mozilla.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from ctypes import c_void_p, POINTER, sizeof, Structure, windll, WinError, WINFUNCTYPE
from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPWSTR, UINT, WORD
LPVOID = c_void_p
LPBYTE = POINTER(BYTE)
LPDWORD = POINTER(DWORD)
SW_HIDE = 0
def ErrCheckBool(result, func, args):
"""errcheck function for Windows functions that return a BOOL True
on success"""
if not result:
raise WinError()
return args
# CloseHandle()
CloseHandleProto = WINFUNCTYPE(BOOL, HANDLE)
CloseHandle = CloseHandleProto(("CloseHandle", windll.kernel32))
CloseHandle.errcheck = ErrCheckBool
# AutoHANDLE
class AutoHANDLE(HANDLE):
"""Subclass of HANDLE which will call CloseHandle() on deletion."""
def Close(self):
if self.value:
CloseHandle(self)
self.value = 0
def __del__(self):
self.Close()
def __int__(self):
return self.value
def ErrCheckHandle(result, func, args):
"""errcheck function for Windows functions that return a HANDLE."""
if not result:
raise WinError()
return AutoHANDLE(result)
# PROCESS_INFORMATION structure
class PROCESS_INFORMATION(Structure):
_fields_ = [("hProcess", HANDLE),
("hThread", HANDLE),
("dwProcessID", DWORD),
("dwThreadID", DWORD)]
def __init__(self):
Structure.__init__(self)
self.cb = sizeof(self)
<|fim▁hole|>
class STARTUPINFO(Structure):
_fields_ = [("cb", DWORD),
("lpReserved", LPWSTR),
("lpDesktop", LPWSTR),
("lpTitle", LPWSTR),
("dwX", DWORD),
("dwY", DWORD),
("dwXSize", DWORD),
("dwYSize", DWORD),
("dwXCountChars", DWORD),
("dwYCountChars", DWORD),
("dwFillAttribute", DWORD),
("dwFlags", DWORD),
("wShowWindow", WORD),
("cbReserved2", WORD),
("lpReserved2", LPBYTE),
("hStdInput", HANDLE),
("hStdOutput", HANDLE),
("hStdError", HANDLE)
]
LPSTARTUPINFO = POINTER(STARTUPINFO)
STARTF_USESHOWWINDOW = 0x01
STARTF_USESIZE = 0x02
STARTF_USEPOSITION = 0x04
STARTF_USECOUNTCHARS = 0x08
STARTF_USEFILLATTRIBUTE = 0x10
STARTF_RUNFULLSCREEN = 0x20
STARTF_FORCEONFEEDBACK = 0x40
STARTF_FORCEOFFFEEDBACK = 0x80
STARTF_USESTDHANDLES = 0x100
# EnvironmentBlock
class EnvironmentBlock:
"""An object which can be passed as the lpEnv parameter of CreateProcess.
It is initialized with a dictionary."""
def __init__(self, dict):
if not dict:
self._as_parameter_ = None
else:
values = ["%s=%s" % (key, value)
for (key, value) in dict.iteritems()]
values.append("")
self._as_parameter_ = LPCWSTR("\0".join(values))
# CreateProcess()
CreateProcessProto = WINFUNCTYPE(BOOL, # Return type
LPCWSTR, # lpApplicationName
LPWSTR, # lpCommandLine
LPVOID, # lpProcessAttributes
LPVOID, # lpThreadAttributes
BOOL, # bInheritHandles
DWORD, # dwCreationFlags
LPVOID, # lpEnvironment
LPCWSTR, # lpCurrentDirectory
LPSTARTUPINFO, # lpStartupInfo
LPPROCESS_INFORMATION # lpProcessInformation
)
CreateProcessFlags = ((1, "lpApplicationName", None),
(1, "lpCommandLine"),
(1, "lpProcessAttributes", None),
(1, "lpThreadAttributes", None),
(1, "bInheritHandles", True),
(1, "dwCreationFlags", 0),
(1, "lpEnvironment", None),
(1, "lpCurrentDirectory", None),
(1, "lpStartupInfo"),
(2, "lpProcessInformation"))
def ErrCheckCreateProcess(result, func, args):
ErrCheckBool(result, func, args)
# return a tuple (hProcess, hThread, dwProcessID, dwThreadID)
pi = args[9]
return AutoHANDLE(pi.hProcess), AutoHANDLE(pi.hThread), pi.dwProcessID, pi.dwThreadID
CreateProcess = CreateProcessProto(("CreateProcessW", windll.kernel32),
CreateProcessFlags)
CreateProcess.errcheck = ErrCheckCreateProcess
CREATE_BREAKAWAY_FROM_JOB = 0x01000000
CREATE_DEFAULT_ERROR_MODE = 0x04000000
CREATE_NEW_CONSOLE = 0x00000010
CREATE_NEW_PROCESS_GROUP = 0x00000200
CREATE_NO_WINDOW = 0x08000000
CREATE_SUSPENDED = 0x00000004
CREATE_UNICODE_ENVIRONMENT = 0x00000400
DEBUG_ONLY_THIS_PROCESS = 0x00000002
DEBUG_PROCESS = 0x00000001
DETACHED_PROCESS = 0x00000008
# CreateJobObject()
CreateJobObjectProto = WINFUNCTYPE(HANDLE, # Return type
LPVOID, # lpJobAttributes
LPCWSTR # lpName
)
CreateJobObjectFlags = ((1, "lpJobAttributes", None),
(1, "lpName", None))
CreateJobObject = CreateJobObjectProto(("CreateJobObjectW", windll.kernel32),
CreateJobObjectFlags)
CreateJobObject.errcheck = ErrCheckHandle
# AssignProcessToJobObject()
AssignProcessToJobObjectProto = WINFUNCTYPE(BOOL, # Return type
HANDLE, # hJob
HANDLE # hProcess
)
AssignProcessToJobObjectFlags = ((1, "hJob"),
(1, "hProcess"))
AssignProcessToJobObject = AssignProcessToJobObjectProto(
("AssignProcessToJobObject", windll.kernel32),
AssignProcessToJobObjectFlags)
AssignProcessToJobObject.errcheck = ErrCheckBool
# ResumeThread()
def ErrCheckResumeThread(result, func, args):
if result == -1:
raise WinError()
return args
ResumeThreadProto = WINFUNCTYPE(DWORD, # Return type
HANDLE # hThread
)
ResumeThreadFlags = ((1, "hThread"),)
ResumeThread = ResumeThreadProto(("ResumeThread", windll.kernel32),
ResumeThreadFlags)
ResumeThread.errcheck = ErrCheckResumeThread
# TerminateJobObject()
TerminateJobObjectProto = WINFUNCTYPE(BOOL, # Return type
HANDLE, # hJob
UINT # uExitCode
)
TerminateJobObjectFlags = ((1, "hJob"),
(1, "uExitCode", 127))
TerminateJobObject = TerminateJobObjectProto(
("TerminateJobObject", windll.kernel32),
TerminateJobObjectFlags)
TerminateJobObject.errcheck = ErrCheckBool
# WaitForSingleObject()
WaitForSingleObjectProto = WINFUNCTYPE(DWORD, # Return type
HANDLE, # hHandle
DWORD, # dwMilliseconds
)
WaitForSingleObjectFlags = ((1, "hHandle"),
(1, "dwMilliseconds", -1))
WaitForSingleObject = WaitForSingleObjectProto(
("WaitForSingleObject", windll.kernel32),
WaitForSingleObjectFlags)
INFINITE = -1
WAIT_TIMEOUT = 0x0102
WAIT_OBJECT_0 = 0x0
WAIT_ABANDONED = 0x0080
# GetExitCodeProcess()
GetExitCodeProcessProto = WINFUNCTYPE(BOOL, # Return type
HANDLE, # hProcess
LPDWORD, # lpExitCode
)
GetExitCodeProcessFlags = ((1, "hProcess"),
(2, "lpExitCode"))
GetExitCodeProcess = GetExitCodeProcessProto(
("GetExitCodeProcess", windll.kernel32),
GetExitCodeProcessFlags)
GetExitCodeProcess.errcheck = ErrCheckBool<|fim▁end|>
|
LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
# STARTUPINFO structure
|
<|file_name|>and_gate01.py<|end_file_name|><|fim▁begin|><|fim▁hole|> tmp = x1 * w1 + x2 * w2
if tmp <= theta:
print(0)
elif tmp > theta:
print(1)
AND(0, 0)
AND(1, 0)
AND(0, 1)
AND(1, 1)<|fim▁end|>
|
def AND(x1, x2):
w1, w2, theta = 0.5, 0.5, 0.7
|
<|file_name|>comment_controller.js<|end_file_name|><|fim▁begin|>var models=require('../models/models.js');
// Autoload :id de comentarios
exports.load=function (req,res,next,commentId) {
models.Comment.find({
where:{
id:Number(commentId)
}
}).then(function (comment) {
if(comment){
req.comment=comment;
next();
}else{
next(new Error('No existe commentId=' +commentId))
}
}).catch(function (error) {
next(error);
});
};
//GET /quizes/:quizId/comments/new
exports.new=function (req,res) {
res.render('comments/new.ejs',{quizid:req.params.quizId, errors:[]});
};
//POST /quizes/:quizId/comments
exports.create=function (req,res) {
var comment =models.Comment.build(
{
texto:req.body.comment.texto,
QuizId:req.params.quizId,
});
comment
.validate()
.then(
function (err) {
if(err){<|fim▁hole|> {comment:comment,quizid:req.params.quizId,errors:err.errors});
}else{
comment //save :guarda en DB campo texto
.save()
.then(function () {
res.redirect('/quizes/'+req.params.quizId);
});
}
}
).catch(function (error) {
next(error);
});
};
//GET /quizes/:quizId/comments/:commentId/publish
exports.publish=function (req,res) {
req.comment.publicado=true;
req.comment.save({fields:["publicado"]}).then(
function () {
res.redirect('/quizes/'+req.params.quizId);
}).catch(function (error) {
next(error);
});
};<|fim▁end|>
|
res.render('comments/new.ejs',
|
<|file_name|>etw_results.py<|end_file_name|><|fim▁begin|>"""
Parses the results found for the ETW started on a machine,
downloads the results and stops the ETW.
All credit to pauldotcom-
http://pauldotcom.com/2012/07/post-exploitation-recon-with-e.html
Module built by @harmj0y
"""
import settings
from lib import command_methods
from lib import helpers
from lib import smb
class Module:
def __init__(self, targets=None, creds=None, args=None):
self.name = "ETW Data Download"
self.description = "Download data results from ETW and clean everything up."
# internal list() that holds one or more targets
self.targets = targets
# internal list() that holds one or more cred tuples
# [ (username, pw), (username2, pw2), ...]
self.creds = creds
# a state output file that will be written out by pillage.py
# ex- if you're querying domain users
self.output = ""
# user interaction for- format is {Option : [Value, Description]]}
self.required_options = { "trigger_method" : ["wmis", "[wmis] or [winexe] for triggering"],
"flag" : ["cookies", "search for [cookies] or [post] parameters"]}
def run(self):
# assume single set of credentials
username, password = self.creds[0]
triggerMethod = self.required_options["trigger_method"][0]
flag = self.required_options["flag"][0]
for target in self.targets:
# stop the ETW
stopCMD = "logman stop Status32 -ets"
command_methods.executeCommand(target, username, password, stopCMD, triggerMethod)
# search for cookies or POST paramters
if flag.lower() == "post":
flag = "POST"
moduleFile = "post_params.txt"
else:
flag = "cookie added"
moduleFile = "cookies.txt"
# check the ETW results for the specified flag, and delete the dump file
parseCmd = "wevtutil qe C:\\Windows\\Temp\\status32.etl /lf:true /f:Text | find /i \""+flag+"\""
# wait 20 seconds for everything to parse...if errors happen, increase this
parseResult = command_methods.executeResult(target, username, password, parseCmd, triggerMethod, pause=20)
# delete the trace file
delCmd = "del C:\\Windows\\Temp\\status32.etl"
command_methods.executeCommand(target, username, password, delCmd, triggerMethod)
if parseResult == "":
self.output += "[!] No ETW results for "+flag+" using creds '"+username+":"+password+"' on : " + target + "\n"<|fim▁hole|> # save the file off to the appropriate location
saveFile = helpers.saveModuleFile(self, target, moduleFile, parseResult)
self.output += "[*] ETW results for "+flag+" using creds '"+username+":"+password+"' on " + target + " stored at "+saveFile+"\n"<|fim▁end|>
|
else:
|
<|file_name|>nodemailer-mailgun-transport-tests.ts<|end_file_name|><|fim▁begin|>import mailgunTransport = require('nodemailer-mailgun-transport');
import nodemailer = require('nodemailer');
const opts: mailgunTransport.Options = {
auth: {
api_key: "harry"<|fim▁hole|>const optsWithDomain: mailgunTransport.Options = {
auth: {
api_key: "harry",
domain: "http://www.foo.com"
}
};
const transport: nodemailer.Transporter = nodemailer.createTransport(mailgunTransport(optsWithDomain));
// setup e-mail data with unicode symbols
const mailOptions: nodemailer.SendMailOptions = {
from: 'Fred Foo ✔ <[email protected]>', // sender address
to: '[email protected], [email protected]', // list of receivers
subject: 'Hello ✔', // Subject line
text: 'Hello world ✔', // plaintext body
html: '<b>Hello world ✔</b>' // html body
};
transport.sendMail(mailOptions, (error: Error, info: nodemailer.SentMessageInfo): void => {
// nothing
});<|fim▁end|>
|
}
};
|
<|file_name|>atomics.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Core atomic primitives
use intrinsics;
use std::kinds::marker;
use ty::Unsafe;
/// An atomic boolean type.
pub struct AtomicBool {
v: Unsafe<uint>,
nocopy: marker::NoCopy
}
/// A signed atomic integer type, supporting basic atomic arithmetic operations
pub struct AtomicInt {
v: Unsafe<int>,
nocopy: marker::NoCopy
}
/// An unsigned atomic integer type, supporting basic atomic arithmetic operations
pub struct AtomicUint {
v: Unsafe<uint>,
nocopy: marker::NoCopy
}
/// An unsafe atomic pointer. Only supports basic atomic operations
pub struct AtomicPtr<T> {
p: Unsafe<uint>,
nocopy: marker::NoCopy
}
/// Atomic memory orderings
///
/// Memory orderings limit the ways that both the compiler and CPU may reorder
/// instructions around atomic operations. At its most restrictive,
/// "sequentially consistent" atomics allow neither reads nor writes
/// to be moved either before or after the atomic operation; on the other end
/// "relaxed" atomics allow all reorderings.
///
/// Rust's memory orderings are the same as in C++[1].
///
/// 1: http://gcc.gnu.org/wiki/Atomic/GCCMM/AtomicSync
pub enum Ordering {
/// No ordering constraints, only atomic operations
Relaxed,
/// When coupled with a store, all previous writes become visible
/// to another thread that performs a load with `Acquire` ordering
/// on the same value
Release,
/// When coupled with a load, all subsequent loads will see data
/// written before a store with `Release` ordering on the same value
/// in another thread
Acquire,
/// When coupled with a load, uses `Acquire` ordering, and with a store
/// `Release` ordering
AcqRel,
/// Like `AcqRel` with the additional guarantee that all threads see all
/// sequentially consistent operations in the same order.
SeqCst
}<|fim▁hole|>
/// An `AtomicBool` initialized to `false`
pub static INIT_ATOMIC_BOOL : AtomicBool = AtomicBool { v: Unsafe{value: 0,
marker1: marker::InvariantType},
nocopy: marker::NoCopy };
/// An `AtomicInt` initialized to `0`
pub static INIT_ATOMIC_INT : AtomicInt = AtomicInt { v: Unsafe{value: 0,
marker1: marker::InvariantType},
nocopy: marker::NoCopy };
/// An `AtomicUint` initialized to `0`
pub static INIT_ATOMIC_UINT : AtomicUint = AtomicUint { v: Unsafe{value: 0,
marker1: marker::InvariantType},
nocopy: marker::NoCopy };
// NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly
static UINT_TRUE: uint = -1;
impl AtomicBool {
/// Create a new `AtomicBool`
pub fn new(v: bool) -> AtomicBool {
let val = if v { UINT_TRUE } else { 0 };
AtomicBool { v: Unsafe::new(val), nocopy: marker::NoCopy }
}
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> bool {
unsafe { atomic_load(self.v.get() as *uint, order) > 0 }
}
/// Store the value
#[inline]
pub fn store(&self, val: bool, order: Ordering) {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_swap(self.v.get(), val, order) > 0 }
}
/// If the current value is the same as expected, store a new value
///
/// Compare the current value with `old`; if they are the same then
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
///
/// # Examples
///
/// ```ignore
/// # // FIXME: Needs PR #12430
/// extern crate sync;
///
/// use sync::Arc;
/// use std::sync::atomics::{AtomicBool, SeqCst};
///
/// fn main() {
/// let spinlock = Arc::new(AtomicBool::new(false));
/// let spinlock_clone = spin_lock.clone();
///
/// spawn(proc() {
/// with_lock(&spinlock, || println!("task 1 in lock"));
/// });
///
/// spawn(proc() {
/// with_lock(&spinlock_clone, || println!("task 2 in lock"));
/// });
/// }
///
/// fn with_lock(spinlock: &Arc<AtomicBool>, f: || -> ()) {
/// // CAS loop until we are able to replace `false` with `true`
/// while spinlock.compare_and_swap(false, true, SeqCst) == false {
/// // Since tasks may not be preemptive (if they are green threads)
/// // yield to the scheduler to let the other task run. Low level
/// // concurrent code needs to take into account Rust's two threading
/// // models.
/// deschedule();
/// }
///
/// // Now we have the spinlock
/// f();
///
/// // Release the lock
/// spinlock.store(false);
/// }
/// ```
#[inline]
pub fn compare_and_swap(&self, old: bool, new: bool, order: Ordering) -> bool {
let old = if old { UINT_TRUE } else { 0 };
let new = if new { UINT_TRUE } else { 0 };
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) > 0 }
}
/// A logical "and" operation
///
/// Performs a logical "and" operation on the current value and the
/// argument `val`, and sets the new value to the result.
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicBool, SeqCst};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_and(false, SeqCst));
/// assert_eq!(false, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_and(true, SeqCst));
/// assert_eq!(true, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_and(false, SeqCst));
/// assert_eq!(false, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_and(self.v.get(), val, order) > 0 }
}
/// A logical "nand" operation
///
/// Performs a logical "nand" operation on the current value and the
/// argument `val`, and sets the new value to the result.
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicBool, SeqCst};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_nand(false, SeqCst));
/// assert_eq!(true, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_nand(true, SeqCst));
/// assert_eq!(0, foo.load(SeqCst) as int);
/// assert_eq!(false, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_nand(false, SeqCst));
/// assert_eq!(true, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_nand(self.v.get(), val, order) > 0 }
}
/// A logical "or" operation
///
/// Performs a logical "or" operation on the current value and the
/// argument `val`, and sets the new value to the result.
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicBool, SeqCst};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_or(false, SeqCst));
/// assert_eq!(true, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_or(true, SeqCst));
/// assert_eq!(true, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_or(false, SeqCst));
/// assert_eq!(false, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_or(self.v.get(), val, order) > 0 }
}
/// A logical "xor" operation
///
/// Performs a logical "xor" operation on the current value and the
/// argument `val`, and sets the new value to the result.
/// Returns the previous value.
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicBool, SeqCst};
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_xor(false, SeqCst));
/// assert_eq!(true, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(true);
/// assert_eq!(true, foo.fetch_xor(true, SeqCst));
/// assert_eq!(false, foo.load(SeqCst));
///
/// let foo = AtomicBool::new(false);
/// assert_eq!(false, foo.fetch_xor(false, SeqCst));
/// assert_eq!(false, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
let val = if val { UINT_TRUE } else { 0 };
unsafe { atomic_xor(self.v.get(), val, order) > 0 }
}
}
impl AtomicInt {
/// Create a new `AtomicInt`
pub fn new(v: int) -> AtomicInt {
AtomicInt {v: Unsafe::new(v), nocopy: marker::NoCopy}
}
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> int {
unsafe { atomic_load(self.v.get() as *int, order) }
}
/// Store the value
#[inline]
pub fn store(&self, val: int, order: Ordering) {
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, val: int, order: Ordering) -> int {
unsafe { atomic_swap(self.v.get(), val, order) }
}
/// If the current value is the same as expected, store a new value
///
/// Compare the current value with `old`; if they are the same then
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
#[inline]
pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int {
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
/// Add to the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicInt, SeqCst};
///
/// let foo = AtomicInt::new(0);
/// assert_eq!(0, foo.fetch_add(10, SeqCst));
/// assert_eq!(10, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_add(&self, val: int, order: Ordering) -> int {
unsafe { atomic_add(self.v.get(), val, order) }
}
/// Subtract from the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicInt, SeqCst};
///
/// let foo = AtomicInt::new(0);
/// assert_eq!(0, foo.fetch_sub(10, SeqCst));
/// assert_eq!(-10, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_sub(&self, val: int, order: Ordering) -> int {
unsafe { atomic_sub(self.v.get(), val, order) }
}
/// Bitwise and with the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst));
/// assert_eq!(0b100001, foo.load(SeqCst));
#[inline]
pub fn fetch_and(&self, val: int, order: Ordering) -> int {
unsafe { atomic_and(self.v.get(), val, order) }
}
/// Bitwise or with the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst));
/// assert_eq!(0b111111, foo.load(SeqCst));
#[inline]
pub fn fetch_or(&self, val: int, order: Ordering) -> int {
unsafe { atomic_or(self.v.get(), val, order) }
}
/// Bitwise xor with the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst));
/// assert_eq!(0b011110, foo.load(SeqCst));
#[inline]
pub fn fetch_xor(&self, val: int, order: Ordering) -> int {
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
impl AtomicUint {
/// Create a new `AtomicUint`
pub fn new(v: uint) -> AtomicUint {
AtomicUint { v: Unsafe::new(v), nocopy: marker::NoCopy }
}
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> uint {
unsafe { atomic_load(self.v.get() as *uint, order) }
}
/// Store the value
#[inline]
pub fn store(&self, val: uint, order: Ordering) {
unsafe { atomic_store(self.v.get(), val, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_swap(self.v.get(), val, order) }
}
/// If the current value is the same as expected, store a new value
///
/// Compare the current value with `old`; if they are the same then
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
#[inline]
pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint {
unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
}
/// Add to the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0);
/// assert_eq!(0, foo.fetch_add(10, SeqCst));
/// assert_eq!(10, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_add(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_add(self.v.get(), val, order) }
}
/// Subtract from the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(10);
/// assert_eq!(10, foo.fetch_sub(10, SeqCst));
/// assert_eq!(0, foo.load(SeqCst));
/// ```
#[inline]
pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_sub(self.v.get(), val, order) }
}
/// Bitwise and with the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst));
/// assert_eq!(0b100001, foo.load(SeqCst));
#[inline]
pub fn fetch_and(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_and(self.v.get(), val, order) }
}
/// Bitwise or with the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst));
/// assert_eq!(0b111111, foo.load(SeqCst));
#[inline]
pub fn fetch_or(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_or(self.v.get(), val, order) }
}
/// Bitwise xor with the current value, returning the previous
///
/// # Examples
///
/// ```
/// use std::sync::atomics::{AtomicUint, SeqCst};
///
/// let foo = AtomicUint::new(0b101101);
/// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst));
/// assert_eq!(0b011110, foo.load(SeqCst));
#[inline]
pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint {
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
impl<T> AtomicPtr<T> {
/// Create a new `AtomicPtr`
pub fn new(p: *mut T) -> AtomicPtr<T> {
AtomicPtr { p: Unsafe::new(p as uint), nocopy: marker::NoCopy }
}
/// Load the value
#[inline]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe {
atomic_load(self.p.get() as **mut T, order) as *mut T
}
}
/// Store the value
#[inline]
pub fn store(&self, ptr: *mut T, order: Ordering) {
unsafe { atomic_store(self.p.get(), ptr as uint, order); }
}
/// Store a value, returning the old value
#[inline]
pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T }
}
/// If the current value is the same as expected, store a new value
///
/// Compare the current value with `old`; if they are the same then
/// replace the current value with `new`. Return the previous value.
/// If the return value is equal to `old` then the value was updated.
#[inline]
pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
unsafe {
atomic_compare_and_swap(self.p.get(), old as uint,
new as uint, order) as *mut T
}
}
}
#[inline]
unsafe fn atomic_store<T>(dst: *mut T, val: T, order:Ordering) {
match order {
Release => intrinsics::atomic_store_rel(dst, val),
Relaxed => intrinsics::atomic_store_relaxed(dst, val),
_ => intrinsics::atomic_store(dst, val)
}
}
#[inline]
unsafe fn atomic_load<T>(dst: *T, order:Ordering) -> T {
match order {
Acquire => intrinsics::atomic_load_acq(dst),
Relaxed => intrinsics::atomic_load_relaxed(dst),
_ => intrinsics::atomic_load(dst)
}
}
#[inline]
unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xchg_acq(dst, val),
Release => intrinsics::atomic_xchg_rel(dst, val),
AcqRel => intrinsics::atomic_xchg_acqrel(dst, val),
Relaxed => intrinsics::atomic_xchg_relaxed(dst, val),
_ => intrinsics::atomic_xchg(dst, val)
}
}
/// Returns the old value (like __sync_fetch_and_add).
#[inline]
unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xadd_acq(dst, val),
Release => intrinsics::atomic_xadd_rel(dst, val),
AcqRel => intrinsics::atomic_xadd_acqrel(dst, val),
Relaxed => intrinsics::atomic_xadd_relaxed(dst, val),
_ => intrinsics::atomic_xadd(dst, val)
}
}
/// Returns the old value (like __sync_fetch_and_sub).
#[inline]
unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xsub_acq(dst, val),
Release => intrinsics::atomic_xsub_rel(dst, val),
AcqRel => intrinsics::atomic_xsub_acqrel(dst, val),
Relaxed => intrinsics::atomic_xsub_relaxed(dst, val),
_ => intrinsics::atomic_xsub(dst, val)
}
}
#[inline]
unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
Release => intrinsics::atomic_cxchg_rel(dst, old, new),
AcqRel => intrinsics::atomic_cxchg_acqrel(dst, old, new),
Relaxed => intrinsics::atomic_cxchg_relaxed(dst, old, new),
_ => intrinsics::atomic_cxchg(dst, old, new),
}
}
#[inline]
unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_and_acq(dst, val),
Release => intrinsics::atomic_and_rel(dst, val),
AcqRel => intrinsics::atomic_and_acqrel(dst, val),
Relaxed => intrinsics::atomic_and_relaxed(dst, val),
_ => intrinsics::atomic_and(dst, val)
}
}
#[inline]
unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_nand_acq(dst, val),
Release => intrinsics::atomic_nand_rel(dst, val),
AcqRel => intrinsics::atomic_nand_acqrel(dst, val),
Relaxed => intrinsics::atomic_nand_relaxed(dst, val),
_ => intrinsics::atomic_nand(dst, val)
}
}
#[inline]
unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_or_acq(dst, val),
Release => intrinsics::atomic_or_rel(dst, val),
AcqRel => intrinsics::atomic_or_acqrel(dst, val),
Relaxed => intrinsics::atomic_or_relaxed(dst, val),
_ => intrinsics::atomic_or(dst, val)
}
}
#[inline]
unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
match order {
Acquire => intrinsics::atomic_xor_acq(dst, val),
Release => intrinsics::atomic_xor_rel(dst, val),
AcqRel => intrinsics::atomic_xor_acqrel(dst, val),
Relaxed => intrinsics::atomic_xor_relaxed(dst, val),
_ => intrinsics::atomic_xor(dst, val)
}
}
/// An atomic fence.
///
/// A fence 'A' which has `Release` ordering semantics, synchronizes with a
/// fence 'B' with (at least) `Acquire` semantics, if and only if there exists
/// atomic operations X and Y, both operating on some atomic object 'M' such
/// that A is sequenced before X, Y is synchronized before B and Y observers
/// the change to M. This provides a happens-before dependence between A and B.
///
/// Atomic operations with `Release` or `Acquire` semantics can also synchronize
/// with a fence.
///
/// A fence with has `SeqCst` ordering, in addition to having both `Acquire` and
/// `Release` semantics, participates in the global program order of the other
/// `SeqCst` operations and/or fences.
///
/// Accepts `Acquire`, `Release`, `AcqRel` and `SeqCst` orderings.
///
/// # Failure
///
/// Fails if `order` is `Relaxed`
#[inline]
pub fn fence(order: Ordering) {
unsafe {
match order {
Acquire => intrinsics::atomic_fence_acq(),
Release => intrinsics::atomic_fence_rel(),
AcqRel => intrinsics::atomic_fence_acqrel(),
SeqCst => intrinsics::atomic_fence(),
Relaxed => fail!("there is no such thing as a relaxed fence")
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn bool_() {
let a = AtomicBool::new(false);
assert_eq!(a.compare_and_swap(false, true, SeqCst), false);
assert_eq!(a.compare_and_swap(false, true, SeqCst), true);
a.store(false, SeqCst);
assert_eq!(a.compare_and_swap(false, true, SeqCst), false);
}
#[test]
fn bool_and() {
let a = AtomicBool::new(true);
assert_eq!(a.fetch_and(false, SeqCst),true);
assert_eq!(a.load(SeqCst),false);
}
#[test]
fn uint_and() {
let x = AtomicUint::new(0xf731);
assert_eq!(x.fetch_and(0x137f, SeqCst), 0xf731);
assert_eq!(x.load(SeqCst), 0xf731 & 0x137f);
}
#[test]
fn uint_or() {
let x = AtomicUint::new(0xf731);
assert_eq!(x.fetch_or(0x137f, SeqCst), 0xf731);
assert_eq!(x.load(SeqCst), 0xf731 | 0x137f);
}
#[test]
fn uint_xor() {
let x = AtomicUint::new(0xf731);
assert_eq!(x.fetch_xor(0x137f, SeqCst), 0xf731);
assert_eq!(x.load(SeqCst), 0xf731 ^ 0x137f);
}
#[test]
fn int_and() {
let x = AtomicInt::new(0xf731);
assert_eq!(x.fetch_and(0x137f, SeqCst), 0xf731);
assert_eq!(x.load(SeqCst), 0xf731 & 0x137f);
}
#[test]
fn int_or() {
let x = AtomicInt::new(0xf731);
assert_eq!(x.fetch_or(0x137f, SeqCst), 0xf731);
assert_eq!(x.load(SeqCst), 0xf731 | 0x137f);
}
#[test]
fn int_xor() {
let x = AtomicInt::new(0xf731);
assert_eq!(x.fetch_xor(0x137f, SeqCst), 0xf731);
assert_eq!(x.load(SeqCst), 0xf731 ^ 0x137f);
}
static mut S_BOOL : AtomicBool = INIT_ATOMIC_BOOL;
static mut S_INT : AtomicInt = INIT_ATOMIC_INT;
static mut S_UINT : AtomicUint = INIT_ATOMIC_UINT;
#[test]
fn static_init() {
unsafe {
assert!(!S_BOOL.load(SeqCst));
assert!(S_INT.load(SeqCst) == 0);
assert!(S_UINT.load(SeqCst) == 0);
}
}
#[test]
fn different_sizes() {
unsafe {
let mut slot = 0u16;
assert_eq!(super::atomic_swap(&mut slot, 1, SeqCst), 0);
let mut slot = 0u8;
assert_eq!(super::atomic_compare_and_swap(&mut slot, 1, 2, SeqCst), 0);
let slot = 0u32;
assert_eq!(super::atomic_load(&slot, SeqCst), 0);
let mut slot = 0u64;
super::atomic_store(&mut slot, 2, SeqCst);
}
}
}<|fim▁end|>
| |
<|file_name|>crypto_utils.ts<|end_file_name|><|fim▁begin|>// tslint:disabled:no-var-requires
/**
* @module node_opcua_crypto
*/
import * as constants from "constants";
import * as crypto from "crypto";
import { createFastUninitializedBuffer } from "./buffer_utils";
import { Certificate, CertificatePEM, DER, PEM, PrivateKey, PrivateKeyPEM, PublicKey, PublicKeyPEM, Signature } from "./common";
import { combine_der } from "./crypto_explore_certificate";
import * as assert from "assert";
import { hexy } from "hexy";
const jsrsasign = require("jsrsasign");
const PEM_REGEX = /^(-----BEGIN (.*)-----\r?\n([/+=a-zA-Z0-9\r\n]*)\r?\n-----END \2-----\r?\n)/gm;
const PEM_TYPE_REGEX = /^(-----BEGIN (.*)-----)/m;
// Copyright 2012 The Obvious Corporation.
// identifyPemType
/*=
* Extract and identify the PEM file type represented in the given
* buffer. Returns the extracted type string or undefined if the
* buffer doesn't seem to be any sort of PEM format file.
*/
export function identifyPemType(rawKey: Buffer | string): undefined | string {
if (rawKey instanceof Buffer) {
rawKey = rawKey.toString("utf8");
}
const match = PEM_TYPE_REGEX.exec(rawKey);
return !match ? undefined : match[2];
}
export function convertPEMtoDER(raw_key: PEM): DER {
let match: any;
let pemType;
let base64str;
const parts: DER[] = [];
PEM_REGEX.lastIndex = 0;
// tslint:disable-next-line:no-conditional-assignment
while ((match = PEM_REGEX.exec(raw_key)) !== null) {
pemType = match[2];
// pemType shall be "RSA PRIVATE KEY" , "PUBLIC KEY", "CERTIFICATE", "X509 CRL"
base64str = match[3];
base64str = base64str.replace(/\r?\n/g, "");
parts.push(Buffer.from(base64str, "base64"));
}
return combine_der(parts);
}
/**
* @method toPem
* @param raw_key
* @param pem
* @return
*/
export function toPem(raw_key: Buffer | string, pem: string): string {
assert(raw_key, "expecting a key");
assert(typeof pem === "string");
let pemType = identifyPemType(raw_key);
if (pemType) {
return raw_key instanceof Buffer ? raw_key.toString("utf8") : raw_key;
} else {
pemType = pem;
assert(["CERTIFICATE REQUEST", "CERTIFICATE", "RSA PRIVATE KEY", "PUBLIC KEY", "X509 CRL"].indexOf(pemType) >= 0);
let b = (raw_key as Buffer).toString("base64");
let str = "-----BEGIN " + pemType + "-----\n";
while (b.length) {
str += b.substr(0, 64) + "\n";
b = b.substr(64);
}
str += "-----END " + pemType + "-----";
str += "\n";
return str;
}
}
// istanbul ignore next
export function hexDump(buffer: Buffer, width?: number): string {
if (!buffer) {
return "<>";
}
width = width || 32;
if (buffer.length > 1024) {
return hexy(buffer.slice(0, 1024), { width, format: "twos" }) + "\n .... ( " + buffer.length + ")";
} else {
return hexy(buffer, { width, format: "twos" });
}
}
interface MakeMessageChunkSignatureOptions {
signatureLength: number;
algorithm: string;
privateKey: CertificatePEM;
}
/**
* @method makeMessageChunkSignature
* @param chunk
* @param options
* @param options.signatureLength
* @param options.algorithm for example "RSA-SHA256"
* @param options.privateKey
* @return - the signature
*/
export function makeMessageChunkSignature(chunk: Buffer, options: MakeMessageChunkSignatureOptions): Buffer {
assert(Object.prototype.hasOwnProperty.call(options,"algorithm"));
assert(chunk instanceof Buffer);
assert(["RSA PRIVATE KEY", "PRIVATE KEY"].indexOf(identifyPemType(options.privateKey) as string) >= 0);
// signature length = 128 bytes
const signer = crypto.createSign(options.algorithm);
signer.update(chunk);
const signature = signer.sign(options.privateKey);
assert(!options.signatureLength || signature.length === options.signatureLength);
return signature;
}
export interface VerifyMessageChunkSignatureOptions {
signatureLength?: number;
algorithm: string;
publicKey: PublicKeyPEM;
}
/**
* @method verifyMessageChunkSignature
*
* const signer = {
* signatureLength : 128,
* algorithm : "RSA-SHA256",
* publicKey: "qsdqsdqsd"
* };
* @param blockToVerify
* @param signature
* @param options
* @param options.signatureLength
* @param options.algorithm for example "RSA-SHA256"
* @param options.publicKey
* @return true if the signature is valid
*/
export function verifyMessageChunkSignature(
blockToVerify: Buffer,
signature: Signature,
options: VerifyMessageChunkSignatureOptions
): boolean {
assert(blockToVerify instanceof Buffer);
assert(signature instanceof Buffer);
assert(typeof options.publicKey === "string");
assert(identifyPemType(options.publicKey));
const verify = crypto.createVerify(options.algorithm);
verify.update(blockToVerify);
return verify.verify(options.publicKey, signature);
}
export function makeSHA1Thumbprint(buffer: Buffer): Signature {
return crypto.createHash("sha1").update(buffer).digest();
}
// Basically when you =encrypt something using an RSA key (whether public or private), the encrypted value must
// be smaller than the key (due to the maths used to do the actual encryption). So if you have a 1024-bit key,
// in theory you could encrypt any 1023-bit value (or a 1024-bit value smaller than the key) with that key.
// However, the PKCS#1 standard, which OpenSSL uses, specifies a padding scheme (so you can encrypt smaller
// quantities without losing security), and that padding scheme takes a minimum of 11 bytes (it will be longer
// if the value you're encrypting is smaller). So the highest number of bits you can encrypt with a 1024-bit
// key is 936 bits because of this (unless you disable the padding by adding the OPENSSL_NO_PADDING flag,
// in which case you can go up to 1023-1024 bits). With a 2048-bit key it's 1960 bits instead.
export const RSA_PKCS1_OAEP_PADDING: number = constants.RSA_PKCS1_OAEP_PADDING;
export const RSA_PKCS1_PADDING: number = constants.RSA_PKCS1_PADDING;
export enum PaddingAlgorithm {
RSA_PKCS1_OAEP_PADDING = 4,
RSA_PKCS1_PADDING = 1,
}
assert(PaddingAlgorithm.RSA_PKCS1_OAEP_PADDING === constants.RSA_PKCS1_OAEP_PADDING);
assert(PaddingAlgorithm.RSA_PKCS1_PADDING === constants.RSA_PKCS1_PADDING);
// publicEncrypt and privateDecrypt only work with
// small buffer that depends of the key size.
export function publicEncrypt_native(buffer: Buffer, publicKey: PublicKeyPEM, algorithm?: PaddingAlgorithm): Buffer {
if (algorithm === undefined) {
algorithm = PaddingAlgorithm.RSA_PKCS1_PADDING;
}
assert(algorithm === RSA_PKCS1_PADDING || algorithm === RSA_PKCS1_OAEP_PADDING);
assert(buffer instanceof Buffer, "Expecting a buffer");
return crypto.publicEncrypt(
{
key: publicKey,
padding: algorithm,
},
buffer
);
}
export function privateDecrypt_native(buffer: Buffer, privateKey: PrivateKeyPEM, algorithm?: PaddingAlgorithm): Buffer {
if (algorithm === undefined) {
algorithm = PaddingAlgorithm.RSA_PKCS1_PADDING;
}
assert(algorithm === RSA_PKCS1_PADDING || algorithm === RSA_PKCS1_OAEP_PADDING);
assert(buffer instanceof Buffer, "Expecting a buffer");
try {
return crypto.privateDecrypt(
{
key: privateKey,
padding: algorithm,
},
buffer
);
} catch (err) {
return Buffer.alloc(1);
}
}
export const publicEncrypt = publicEncrypt_native;
export const privateDecrypt = privateDecrypt_native;
export function publicEncrypt_long(
buffer: Buffer,
publicKey: PublicKeyPEM,
blockSize: number,
padding: number,
algorithm?: PaddingAlgorithm
): Buffer {
if (algorithm === undefined) {
algorithm = PaddingAlgorithm.RSA_PKCS1_PADDING;
}
assert(algorithm === RSA_PKCS1_PADDING || algorithm === RSA_PKCS1_OAEP_PADDING);
const chunk_size = blockSize - padding;
const nbBlocks = Math.ceil(buffer.length / chunk_size);
const outputBuffer = createFastUninitializedBuffer(nbBlocks * blockSize);
for (let i = 0; i < nbBlocks; i++) {
const currentBlock = buffer.slice(chunk_size * i, chunk_size * (i + 1));
const encrypted_chunk = publicEncrypt(currentBlock, publicKey, algorithm);
assert(encrypted_chunk.length === blockSize);
encrypted_chunk.copy(outputBuffer, i * blockSize);
}
return outputBuffer;
}
export function privateDecrypt_long(buffer: Buffer, privateKey: PrivateKeyPEM, blockSize: number, algorithm?: number): Buffer {
algorithm = algorithm || RSA_PKCS1_PADDING;
assert(algorithm === RSA_PKCS1_PADDING || algorithm === RSA_PKCS1_OAEP_PADDING);
const nbBlocks = Math.ceil(buffer.length / blockSize);
const outputBuffer = createFastUninitializedBuffer(nbBlocks * blockSize);
let total_length = 0;
for (let i = 0; i < nbBlocks; i++) {
const currentBlock = buffer.slice(blockSize * i, Math.min(blockSize * (i + 1), buffer.length));
const decrypted_buf = privateDecrypt(currentBlock, privateKey, algorithm);
decrypted_buf.copy(outputBuffer, total_length);
total_length += decrypted_buf.length;
}
return outputBuffer.slice(0, total_length);
}
export function coerceCertificatePem(certificate: Certificate | CertificatePEM): CertificatePEM {
if (certificate instanceof Buffer) {
certificate = toPem(certificate, "CERTIFICATE");
}
assert(typeof certificate === "string");
return certificate;<|fim▁hole|>
export function coercePublicKeyPem(publicKey: PublicKey | PublicKeyPEM): PublicKeyPEM {
if (publicKey instanceof Buffer) {
publicKey = toPem(publicKey, "PUBLIC KEY");
}
assert(typeof publicKey === "string");
return publicKey;
}
/***
* @method rsa_length
* A very expensive way to determine the rsa key length ( i.e 2048bits or 1024bits)
* @param key a PEM public key or a PEM rsa private key
* @return { the key length in bytes.
*/
export function rsa_length(key: PublicKeyPEM | PublicKey): number {
key = coercePublicKeyPem(key);
assert(typeof key === "string");
const a = jsrsasign.KEYUTIL.getKey(key);
return a.n.toString(16).length / 2;
}
export function extractPublicKeyFromCertificateSync(certificate: Certificate | CertificatePEM): PublicKeyPEM {
certificate = coerceCertificatePem(certificate);
const key = jsrsasign.KEYUTIL.getKey(certificate);
const publicKeyAsPem = jsrsasign.KEYUTIL.getPEM(key);
assert(typeof publicKeyAsPem === "string");
return publicKeyAsPem;
}
// https://github.com/kjur/jsrsasign/blob/master/x509-1.1.js
// tool to analyse asn1 base64 blocks : http://lapo.it/asn1js
/**
* extract the publickey from a certificate
* @async
*/
export function extractPublicKeyFromCertificate(
certificate: CertificatePEM | Certificate,
callback: (err: Error | null, publicKeyPEM?: PublicKeyPEM) => void
): void {
let err1: any = null;
let keyPem: PublicKeyPEM;
try {
keyPem = extractPublicKeyFromCertificateSync(certificate);
} catch (err) {
err1 = err;
}
setImmediate(() => {
callback(err1, keyPem);
});
}<|fim▁end|>
|
}
|
<|file_name|>constants.spec.ts<|end_file_name|><|fim▁begin|>import {
URL_REGEX,
MESSAGE_SENDER_REGEX,
MENTION_REGEX,
CARD_CALLOUT_REGEX,
CALLOUT_BOUNDARY_REGEX,
} from './constants';
describe('RegEx', () => {
describe('URL_REGEX', () => {
it('should match and capture whole url in main capture group', () => {
const test = [<|fim▁hole|> 'http://example.com',
'https://example.com',
'https://www.example.com',
];
test.forEach(str => {
const match = str.match(URL_REGEX);
expect(match).toBeDefined();
expect(match[0]).toBe(str);
});
});
it('should not match bad urls', () => {
const test = [
'htt://example.com',
'https:/example.com',
'https//www.example.com',
'www.example.com',
'example.com',
];
test.forEach(str =>
expect(str.match(URL_REGEX)).toBe(null)
);
});
});
describe('MESSAGE_SENDER_REGEX', () => {
it('should match and capture sender name in second capture group', () => {
const sender = 'sender';
const match = `${sender}: message`.match(MESSAGE_SENDER_REGEX);
expect(match).toBeDefined();
expect(match[1]).toBe(sender);
});
it('should not match if spaces before :', () => {
const test = [
' sender: message',
'sender : message',
' sender : message',
];
test.forEach(str =>
expect(str.match(URL_REGEX)).toBe(null)
);
});
});
describe('MENTION_REGEX', () => {
it('should match and capture user mentions in second capture group', () => {
expect('@mention'.match(MENTION_REGEX)[0]).toBe('@mention');
expect('@mention '.match(MENTION_REGEX)[0]).toBe('@mention');
expect(' @mention'.match(MENTION_REGEX)[0]).toBe(' @mention');
expect(' @mention '.match(MENTION_REGEX)[0]).toBe(' @mention');
expect('leading @mention'.match(MENTION_REGEX)[0]).toBe(' @mention');
expect('leading @mention trailing'.match(MENTION_REGEX)[0]).toBe(' @mention');
expect('@mention trailing'.match(MENTION_REGEX)[0]).toBe('@mention');
});
it('should not match preceded by character', () => {
const test = [
'leading@mention',
];
test.forEach(str =>
expect(str.match(MENTION_REGEX)).toBe(null)
);
});
});
});<|fim▁end|>
| |
<|file_name|>base_test_onramp.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Yannick Vaucher, Emanuel Cino
#
# The licence is in the file __openerp__.py
#
##############################################################################
import base64
import urllib2
import httplib
import simplejson
from openerp.tests import common
class TestOnramp(common.HttpCase):
""" Base class for all Onramp tests. """
def setUp(self):
super(TestOnramp, self).setUp()
self.server_url = self.env['ir.config_parameter'].get_param(
'web.base.url',
default='http://localhost:8069'
)
api_client_secret = base64.b64encode("client:secret")
self.rest_url = '{0}/onramp?secret={1}'.format(
self.server_url, api_client_secret)
params_post = 'grant_type=client_credentials&scope=read+write'
header_post = {
"Authorization": "Basic " + api_client_secret,
"Content-type": "application/x-www-form-urlencoded",
"Content-Length": 46,
"Expect": "100-continue",
"Connection": "Keep-Alive",
}
conn = httplib.HTTPSConnection('api2.compassion.com')
conn.request("POST", "/core/connect/token", params_post, header_post)
response = conn.getresponse()
data_token = simplejson.loads(response.read())
conn.close()
self.headers = {
'Content-type': 'application/json',
'Authorization': '{token_type} {access_token}'.format(
**data_token),
"x-cim-MessageType": "http://schemas.ci.org/ci/services/"
"communications/2015/09/SBCStructured",
"x-cim-FromAddress": "CHTest",<|fim▁hole|> """ Check we have an access denied if token is not provided
"""
del self.headers['Authorization']
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_bad_token(self):
""" Check we have an access denied if token is not valid
"""
self.headers['Authorization'] = 'Bearer notarealtoken'
with self.assertRaises(urllib2.HTTPError) as e:
self._send_post({'nothing': 'nothing'})
self.assertEqual(e.exception.code, 401)
self.assertEqual(e.exception.msg, 'UNAUTHORIZED')
def _test_body_no_json(self):
req = urllib2.Request(self.rest_url, "This is not json", self.headers)
with self.assertRaises(urllib2.HTTPError):
urllib2.urlopen(req)
def _send_post(self, vals):
data = simplejson.dumps(vals)
req = urllib2.Request(self.rest_url, data, self.headers)
return urllib2.urlopen(req)<|fim▁end|>
|
"x-cim-ToAddress": "CH",
}
def _test_no_token(self):
|
<|file_name|>mnist_benchmark.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
extern crate o2lsh;
extern crate test;
use test::Bencher;
#[bench]
fn build_table_from_mnist_and_query(b: &mut Bencher) {
let mnist_data = match o2lsh::util::get_mnist_vector("mnist1k.dts") {
Ok(v) => v,
Err(reason) => panic!("couldnt open because {}", reason)
};
let vec_length = mnist_data[0].len();
let num_hashes = 10;
let mnist_q = match o2lsh::util::get_mnist_vector("mnist1k.q") {
Ok(v) => v,
Err(reason) => panic!("couldnt open because {}", reason)
};
let zjs = o2lsh::multi::get_expected_zj_vals(num_hashes, 1.0);
let sets: Vec<o2lsh::multi::PerturbationSet> = o2lsh::multi::gen_perturbation_sets(&zjs)
.take(5)
.collect();
let ms: Vec<Vec<usize>> = sets.into_iter()
.map(|x| {x.data})
.collect();
let hash_boxes: Vec<_> = (1..num_hashes).map(|_| o2lsh::hashes::get_hash_closure(vec_length, 1.0)).collect();
let hash_table: o2lsh::table::LSHTable<Vec<f32>, Fn(&Vec<f32>) -> f32> = o2lsh::table::LSHTable::new_build(&mnist_data, hash_boxes, &ms);
let run_test = || {
for vec in &mnist_q {
hash_table.query_multiprobe(vec, 10);
}
};
<|fim▁hole|>fn build_many_tables_from_mnist_and_time_query(b: &mut Bencher) {
let mnist_data = match o2lsh::util::get_mnist_vector("mnist1k.dts") {
Ok(v) => v,
Err(reason) => panic!("couldnt open because {}", reason)
};
let vec_length = mnist_data[0].len();
let num_hashes = 10;
let mnist_q = match o2lsh::util::get_mnist_vector("mnist1k.q") {
Ok(v) => v,
Err(reason) => panic!("couldnt open because {}", reason)
};
let zjs = o2lsh::multi::get_expected_zj_vals(num_hashes, 1.0);
let sets: Vec<o2lsh::multi::PerturbationSet> = o2lsh::multi::gen_perturbation_sets(&zjs)
.take(5)
.collect();
let ms: Vec<Vec<usize>> = sets.into_iter()
.map(|x| {x.data})
.collect();
let mut all_tables = o2lsh::lsh::LSHLookup::new();
for _ in 1..10 {
let hash_boxes: Vec<_> = (1..num_hashes).map(|_| o2lsh::hashes::get_hash_closure(vec_length, 1.0)).collect();
let hash_table: o2lsh::table::LSHTable<Vec<f32>, Fn(&Vec<f32>) -> f32> = o2lsh::table::LSHTable::new_build(&mnist_data, hash_boxes, &ms);
all_tables.add_table(hash_table);
}
let run_test = || {
for vec in &mnist_q {
all_tables.query_vec(vec, 10);
}
};
b.iter(run_test);
}<|fim▁end|>
|
b.iter(run_test);
}
#[bench]
|
<|file_name|>S15.4.4.10_A5.6.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved.<|fim▁hole|>/**
* The slice property of Array has not prototype property
*
* @path ch15/15.4/15.4.4/15.4.4.10/S15.4.4.10_A5.6.js
* @description Checking Array.prototype.slice.prototype
*/
//CHECK#1
if (Array.prototype.slice.prototype !== undefined) {
$ERROR('#1: Array.prototype.slice.prototype === undefined. Actual: ' + (Array.prototype.slice.prototype));
}<|fim▁end|>
| |
<|file_name|>task_review.go<|end_file_name|><|fim▁begin|>package dao
import (
"context"
xsql "database/sql"
"encoding/json"
"fmt"
"strings"
"time"
"go-common/app/admin/main/videoup-task/model"
"go-common/library/database/sql"
"go-common/library/log"
"go-common/library/xstr"
)
const (
_reviewCfg = 3
_countSQL = "SELECT COUNT(*) FROM task_json_config"
_listConfsSQL = "SELECT id,conf_json,conf_type,btime,etime,state,uid,uname,description,mtime FROM task_json_config WHERE conf_type=3"
_reConfsSQL = "SELECT id,conf_json,conf_type,btime,etime,state,uid,uname,description,mtime FROM task_json_config WHERE conf_type=3"
_inConfSQL = "INSERT INTO task_json_config(conf_json,conf_type,btime,etime,state,uid,uname,description) VALUE (?,?,?,?,?,?,?,?)"
_upConfSQL = "UPDATE task_json_config SET conf_json=?,conf_type=?,btime=?,etime=?,state=?,uid=?,uname=?,description=? WHERE id=?"
_delConfSQL = "DELETE FROM task_json_config WHERE id=?"
_reviewSQL = "SELECT review_form FROM task_review WHERE task_id=?"
_inReviewSQL = "INSERT INTO task_review(task_id,review_form,uid,uname) VALUE (?,?,?,?)"
)
// ListConfs 配置列表
func (d *Dao) ListConfs(c context.Context, uids []int64, bt, et, sort string, pn, ps int64) (rcs []*model.ReviewConf, count int64, err error) {
var (
rows *sql.Rows
countstring, sqlstring, params string
wherecases []string
)
if len(uids) > 0 {
wherecases = append(wherecases, fmt.Sprintf("uid IN (%s)", xstr.JoinInts(uids)))
}
if len(bt) > 0 && len(et) > 0 {
wherecases = append(wherecases, fmt.Sprintf("mtime>='%s' AND mtime<='%s'", bt, et))
}
if len(wherecases) > 0 {
params = " AND " + strings.Join(wherecases, " AND ")
}
countstring = _countSQL + " WHERE conf_type=3" + params
sqlstring = _listConfsSQL + params + fmt.Sprintf(" ORDER BY mtime %s LIMIT %d,%d", sort, (pn-1)*ps, pn*ps)
if err = d.arcDB.QueryRow(c, countstring).Scan(&count); err != nil {
log.Error("d.arcDB.QueryRow(%s) error(%v)", countstring, err)
return
}
if count == 0 {
return
}
if rows, err = d.arcDB.Query(c, sqlstring); err != nil {
log.Error("d.arcDB.Query(%s) error(%v)", sqlstring, err)
return
}
defer rows.Close()
for rows.Next() {
var (
jsonCfg []byte
cfgType int8
)
trc := &model.ReviewConf{}
if err = rows.Scan(&trc.ID, &jsonCfg, &cfgType, &trc.Bt, &trc.Et, &trc.State, &trc.UID, &trc.Uname, &trc.Desc, &trc.Mt); err != nil {
log.Error("rows.Scan error(%v)", err)
continue
}
if err = json.Unmarshal(jsonCfg, trc); err != nil {
log.Error("json.Unmarshal error(%v)", err)
continue
}
trc.Refresh()
rcs = append(rcs, trc)
}
return
}
// ReviewConfs 复审配置
func (d *Dao) ReviewConfs(c context.Context) (rcs []*model.ReviewConf, err error) {
var rows *sql.Rows
if rows, err = d.arcDB.Query(c, _reConfsSQL); err != nil {
log.Error("d.arcDB.Query(%s, %d) error(%v)", _reConfsSQL, err)
return
}
defer rows.Close()
for rows.Next() {
var (
jsonCfg []byte
cfgType int8
)
trc := &model.ReviewConf{}<|fim▁hole|> continue
}
if err = json.Unmarshal(jsonCfg, trc); err != nil {
log.Error("json.Unmarshal error(%v)", err)
continue
}
trc.Refresh()
rcs = append(rcs, trc)
}
return
}
// InReviewConf 插入配置
func (d *Dao) InReviewConf(c context.Context, rc *model.ReviewConf) (lastid int64, err error) {
var (
res xsql.Result
jsonCfg []byte
)
v := new(struct {
Types []int64 `json:"types" params:"types"` // 分区
UpFroms []int64 `json:"upfroms" params:"upfroms"` // 投稿来源
UpGroups []int64 `json:"upgroups" params:"upgroups"` // 用户组
Uids []int64 `json:"uids" params:"uids"` // 指定uid
FansLow int64 `json:"fanslow" params:"fanslow"` // 粉丝数最低值
FansHigh int64 `json:"fanshigh" params:"fanshigh"` // 粉丝数最高
})
v.Types = rc.Types
v.UpFroms = rc.UpFroms
v.UpGroups = rc.UpGroups
v.Uids = rc.Uids
v.FansLow = rc.FansLow
v.FansHigh = rc.FansHigh
if rc.Bt.TimeValue().IsZero() {
rc.Bt = model.NewFormatTime(time.Now())
}
if jsonCfg, err = json.Marshal(v); err != nil {
log.Error("json.Marshal(%+v) error(%v)", rc, err)
return
}
if res, err = d.arcDB.Exec(c, _inConfSQL, jsonCfg, _reviewCfg, rc.Bt, rc.Et, 0, rc.UID, rc.Uname, rc.Desc); err != nil {
log.Error("d.arcDB.Exec(%+v) error(%s, %v)", _inConfSQL, rc, err)
return
}
return res.LastInsertId()
}
// UpReviewConf 更新指定配置
func (d *Dao) UpReviewConf(c context.Context, rc *model.ReviewConf) (lastid int64, err error) {
var (
res xsql.Result
jsonCfg []byte
)
v := new(struct {
Types []int64 `json:"types" params:"types"` // 分区
UpFroms []int64 `json:"upfroms" params:"upfroms"` // 投稿来源
UpGroups []int64 `json:"upgroups" params:"upgroups"` // 用户组
Uids []int64 `json:"uids" params:"uids"` // 指定uid
FansLow int64 `json:"fanslow" params:"fanslow"` // 粉丝数最低值
FansHigh int64 `json:"fanshigh" params:"fanshigh"` // 粉丝数最高
})
v.Types = rc.Types
v.UpFroms = rc.UpFroms
v.UpGroups = rc.UpGroups
v.Uids = rc.Uids
v.FansLow = rc.FansLow
v.FansHigh = rc.FansHigh
if rc.Bt.TimeValue().IsZero() {
rc.Bt = model.NewFormatTime(time.Now())
}
if jsonCfg, err = json.Marshal(v); err != nil {
log.Error("json.Marshal(%+v) error(%v)", rc, err)
return
}
if res, err = d.arcDB.Exec(c, _upConfSQL, jsonCfg, _reviewCfg, rc.Bt, rc.Et, rc.State, rc.UID, rc.Uname, rc.Desc, rc.ID); err != nil {
log.Error("d.arcDB.Exec(%s %+v) error(%v)", _upConfSQL, rc, err)
return
}
return res.RowsAffected()
}
// DelReviewConf 删除指定配置
func (d *Dao) DelReviewConf(c context.Context, id int) (lastid int64, err error) {
var res xsql.Result
if res, err = d.arcDB.Exec(c, _delConfSQL, id); err != nil {
log.Error("d.arcDB.Exec(%s %d) error(%v)", _delConfSQL, id, err)
return
}
return res.RowsAffected()
}
// ReviewForm 复审表单
func (d *Dao) ReviewForm(c context.Context, tid int64) (tsf *model.SubmitForm, err error) {
var form []byte
if err = d.arcDB.QueryRow(c, _reviewSQL, tid).Scan(&form); err != nil {
if err == sql.ErrNoRows {
log.Info("ReviewForm QueryRow empty(%d)", tid)
err = nil
return
}
log.Error("d.arcDB.QueryRow(%s, %d) error(%v)", _reviewSQL, tid, err)
return
}
tsf = &model.SubmitForm{}
if err = json.Unmarshal(form, tsf); err != nil {
log.Error("json.Unmarshal error(%v)", err)
tsf = nil
}
return
}
// InReviewForm insert submit form
func (d *Dao) InReviewForm(c context.Context, sf *model.SubmitForm, uid int64, uname string) (lastid int64, err error) {
var (
res xsql.Result
bsf []byte
)
if bsf, err = json.Marshal(sf); err != nil {
log.Error("json.Marshal error(%v)", err)
return
}
if res, err = d.arcDB.Exec(c, _inReviewSQL, sf.TaskID, bsf, uid, uname); err != nil {
log.Error("d.arcDB.Exec(%s,%d,%v,%d,%s) error(%v)", _inReviewSQL, sf.TaskID, bsf, uid, uname, err)
return
}
return res.LastInsertId()
}<|fim▁end|>
|
if err = rows.Scan(&trc.ID, &jsonCfg, &cfgType, &trc.Bt, &trc.Et, &trc.State, &trc.UID, &trc.Uname, &trc.Desc, &trc.Mt); err != nil {
log.Error("rows.Scan error(%v)", err)
|
<|file_name|>header.js<|end_file_name|><|fim▁begin|>module.exports = {
<% if (modules === 'systemjs') { -%>
templateUrl: 'app/header.html'<|fim▁hole|> template: require('./header.html')
<% } -%>
};<|fim▁end|>
|
<% } else { -%>
|
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__version__= "$Version: $"
__rcsid__="$Id: $"
import matplotlib
#matplotlib.use('WX')
from wx import MilliSleep
from wx import SplashScreen, SPLASH_CENTRE_ON_SCREEN, SPLASH_TIMEOUT
import os
import sys
import warnings
from . import zpickle
from .utils import *
from .dialogs.waxy import *
from .dialogs import *
from .run_sim import *
import threading
import pylab
gray=pylab.cm.gray
from matplotlib.backends.backend_wxagg import FigureCanvasWx as FigureCanvas
from matplotlib.backends.backend_wx import FigureManager
from matplotlib.figure import Figure
from matplotlib.axes import Subplot
class SimThread(threading.Thread):
def __init__(self,params,parent):
self.params=params
self.parent=parent
threading.Thread.__init__(self);
def run(self):
run_sim(self.params,self.parent)
def subplot(*args):
import pylab
if len(args)==1:
return pylab.subplot(args[0])
elif len(args)==3:
return pylab.subplot(args[0],args[1],args[2])
elif len(args)==4:
r=args[2]
c=args[3]
return pylab.subplot(args[0],args[1],c+(r-1)*args[1]);
else:
raise ValueError("invalid number of arguments")
class MainFrame(Frame):
def __init__(self,parent=None,title='',direction='H',
size=(750,750),lfname=None,params=None):
self.fig=None
# turn off security warning on tmpnam. why is it here?
warnings.filterwarnings('ignore')
fname=os.tempnam()
warnings.resetwarnings()
self.base_dir=os.path.dirname(__file__)
if not self.base_dir:
self.base_dir='.'
self.tmpfile=fname+"_plasticity.dat"
self.modified=False
self.running=False
self.stopping=False
self.quitting=False
self.plot_first=False
if not params:
self.params=default_params()
else:
self.params=params
for p in self.params['pattern_input']:
if not os.path.exists(p['filename']):
p['filename']=self.base_dir+"/"+p['filename']
if lfname:
if not self.__load_sim__(lfname):
self.plot_first=True
Frame.__init__(self,parent,title,direction,size)
def Body(self):
self.CreateMenu()
self.CenterOnScreen()
self.ResetTitle()
fname=self.base_dir+"/images/plasticity_small_icon.ico"
self.SetIcon(fname)
self.fig = Figure(figsize=(7,5),dpi=100)
self.canvas = FigureCanvas(self, -1, self.fig)
self.figmgr = FigureManager(self.canvas, 1, self)
self.axes = [self.fig.add_subplot(221),
self.fig.add_subplot(222),
self.fig.add_subplot(223),
self.fig.add_subplot(224)]
if self.plot_first:
sim=zpickle.load(self.tmpfile)
sim['params']['display']=True
self.Plot(sim)
def Stopping(self):
return self.stopping
def Yield(self):
wx.Yield()
def ResetTitle(self):
(root,sfname)=os.path.split(self.params['save_sim_file'])
if self.modified:
s=' (*)'
else:
s=''
title='Plasticity: %s%s' % (sfname,s)
self.SetTitle(title)
def Plot(self,sim):
if not sim['params']['display']:
return
if sim['params']['display_module']:
try:
module=__import__(sim['params']['display_module'],fromlist=['UserPlot'])
except ImportError:
sim['params']['display']=False
dlg = MessageDialog(self,
"Error","Error in Import: %s. Turning display off" % sim['params']['display_module'],
icon='error')
dlg.ShowModal()
dlg.Destroy()
return
try:
module.UserPlot(self,sim)
return
except ValueError:
sim['params']['display']=False
dlg = MessageDialog(self,
"Error","Error in display. Turning display off",
icon='error')
dlg.ShowModal()
dlg.Destroy()
return
try:
im=weights2image(sim['params'],sim['weights'])
self.axes[0].hold(False)
self.axes[0].set_axis_bgcolor('k')
self.axes[0].pcolor(im,cmap=gray,edgecolors='k')
self.axes[0].set_aspect('equal')
num_moments=sim['moments_mat'].shape[0]
self.axes[1].hold(False)
num_neurons=sim['moments_mat'].shape[1]
for k in range(num_neurons):
for i in range(num_moments):
self.axes[1].plot(sim['moments_mat'][i,k,:],'-o')
self.axes[1].hold(True)
self.axes[2].hold(False)
response_mat=sim['response_mat']
response_var_list=sim['response_var_list']
styles=['b-o','g-o']
for i,r in enumerate(response_var_list[-1]):
x=r[1]
y=r[2]
self.axes[2].plot(x,y,styles[i])
self.axes[2].hold(True)
self.axes[3].hold(False)
styles=['b-o','g-o']
for i,r in enumerate(response_mat):
self.axes[3].plot(r,styles[i])
self.axes[3].hold(True)
self.canvas.draw()
self.canvas.gui_repaint()
except ValueError:
sim['params']['display']=False
dlg = MessageDialog(self,
"Error","Error in display. Turning display off",
icon='error')
dlg.ShowModal()
dlg.Destroy()
def Run_Pause(self,event):
if not self.running:
# pylab.close()
self.params['tmpfile']=self.tmpfile
if os.path.exists(self.tmpfile):
self.params['continue']=1
self.modified=True
self.ResetTitle()
self.running=True
## d={}
## d['params']=self.params
## zpickle.save(d,'plasticity_tmpparams.dat')
## cmd='./run_sim.py --paramfile plasticity_tmpparams.dat --from_gui 1'
## os.system(cmd)
self.stopping=False
run_sim(self.params,self)<|fim▁hole|> self.Quit()
else:
self.stopping=True
def __load_sim__(self,lfname):
sim=zpickle.load(lfname)
params=sim['params']
params['save_sim_file']=self.params['save_sim_file']
params['load_sim_file']=''
params['continue']=False
try:
params['initial_weights']=sim['weights']
params['initial_moments']=sim['moments']
except KeyError:
self.params=params
return 1
params['load_sim_file']=self.tmpfile
params['continue']=True
sim['params']=params
self.params=params
zpickle.save(sim,self.tmpfile)
return 0
def Reset_Simulation(self,event=None):
if not os.path.exists(self.tmpfile):
return
self.canvas.Show(False)
if self.modified:
(root,sfname)=os.path.split(self.params['save_sim_file'])
dlg=MessageDialog(self,
text="Do you want to save the changes you made to %s?" % sfname,
title="Reset", ok=0, yes_no=1,cancel=1)
result=dlg.ShowModal()
dlg.Destroy()
if result == 'cancel':
self.canvas.Show(True)
return
elif result == 'yes':
filename=self.Save_Simulation()
if not filename: # cancelled the save
self.canvas.Show(True)
return
self.params['continue']=False
self.params['load_sim_file']=''
self.params['initial_weights']=[]
self.params['initial_moments']=[]
for a in self.axes:
a.cla()
self.canvas.draw()
self.canvas.Show(True)
def Restart(self,event=None):
if not os.path.exists(self.tmpfile):
return
self.canvas.Show(False)
if self.modified:
(root,sfname)=os.path.split(self.params['save_sim_file'])
dlg=MessageDialog(self,
text="Do you want to save the changes you made to %s?" % sfname,
title="Restart", ok=0, yes_no=1,cancel=1)
result=dlg.ShowModal()
dlg.Destroy()
if result == 'cancel':
self.canvas.Show(True)
return
elif result == 'yes':
filename=self.Save_Simulation()
if not filename: # cancelled the save
self.canvas.Show(True)
return
self.__load_sim__(self.tmpfile)
self.params['continue']=False
self.canvas.Show(True)
def Load_Simulation(self,event=None):
self.canvas.Show(False)
if self.modified:
(root,sfname)=os.path.split(self.params['save_sim_file'])
dlg=MessageDialog(self,
text="Do you want to save the changes you made to %s?" % sfname,
title="Load Simulation", ok=0, yes_no=1,cancel=1)
result=dlg.ShowModal()
dlg.Destroy()
if result == 'cancel':
pass
elif result == 'yes':
self.Save_Simulation()
lfname=''
dlg = FileDialog(self, "Load Simulation",default_dir=os.getcwd()+"/sims",
wildcard='DAT Files|*.dat|All Files|*.*')
result = dlg.ShowModal()
if result == 'ok':
lfname = dlg.GetPaths()[0]
dlg.Destroy()
if not lfname:
self.canvas.Show(True)
return
self.__load_sim__(lfname)
sim=zpickle.load(self.tmpfile)
self.Plot(sim)
self.canvas.Show(True)
def Save_Simulation(self,event=None):
if not self.modified:
return
sfname=self.params['save_sim_file']
def_sfname=default_params()['save_sim_file']
if sfname==def_sfname:
filename=self.Save_Simulation_As()
else:
filename=sfname
d=zpickle.load(self.tmpfile)
d['params']=self.params
zpickle.save(d,sfname)
self.modified=False
self.ResetTitle()
return filename
def Save_Simulation_As(self,event=None):
self.canvas.Show(False)
dlg = FileDialog(self, "Save Simulation As...",default_dir=os.getcwd()+"/sims/",
wildcard='DAT Files|*.dat|All Files|*.*',save=1)
result = dlg.ShowModal()
if result == 'ok':
filename = dlg.GetPaths()[0]
else:
filename=None
dlg.Destroy()
if filename:
d=zpickle.load(self.tmpfile)
self.params['save_sim_file']=filename
d['params']=self.params
zpickle.save(d,filename)
self.modified=False
self.ResetTitle()
self.canvas.Show(True)
return filename
def Set_Simulation_Parameters(self,event):
self.canvas.Show(False)
set_simulation_parameters(self.params,self)
self.canvas.Show(True)
def Set_Input_Parameters(self,event):
self.canvas.Show(False)
set_input_parameters(self.params,self)
self.canvas.Show(True)
def Set_Output_Parameters(self,event):
self.canvas.Show(False)
set_output_parameters(self.params,self)
self.canvas.Show(True)
def Set_Weight_Parameters(self,event):
self.canvas.Show(False)
set_weight_parameters(self.params,self)
self.canvas.Show(True)
def Save_Parameters_As(self,event):
save_parameters_as(self.params,self)
def Set_Parameter_Structure(self,event):
set_parameter_structure(self.params,self)
def Load_Parameters(self,event):
p=load_parameters(None,self)
if p:
self.params=p
def CreateMenu(self):
menubar = MenuBar()
menu = Menu(self)
menu.Append("L&oad State", self.Load_Simulation, "Load a Complete Simulation",hotkey="Ctrl+O")
menu.Append("Load &Parameters", self.Load_Parameters, "Load Simulation Parameters")
menu.AppendSeparator()
menu.Append("Save Parameters As...", self.Save_Parameters_As, "Save Simulation Parameters")
menu.Append("Save State As...", self.Save_Simulation_As, "Save a Complete Simulation")
menu.Append("Save State", self.Save_Simulation, "Save a Complete Simulation",hotkey="Ctrl+S")
menu.AppendSeparator()
menu.Append("&Run/Pause", self.Run_Pause, "Run a Simulation",hotkey="Ctrl+P")
menu.Append("Restart from Current State", self.Restart)
menu.Append("Reset Simulation", self.Reset_Simulation,hotkey="Ctrl+R")
menu.AppendSeparator()
menu.Append("Export Figure...", self.Export, "Export the Screen")
menu.Append("&Quit", self.Quit, "Quit",hotkey="Ctrl+Q")
menubar.Append(menu, "&File")
menu = Menu(self)
menu.Append("&Simulation Parameters", self.Set_Simulation_Parameters)
menu.Append("&Input Parameters", self.Set_Input_Parameters)
menu.Append("&Output Neuron Parameters", self.Set_Output_Parameters)
menu.Append("&Weight Parameters", self.Set_Weight_Parameters)
menu.AppendSeparator()
menu.Append("&Display", self.Display)
menu.Append("Make &New Input Files", self.Nop)
menu.Append("Parameter Structure", self.Set_Parameter_Structure)
menubar.Append(menu, "&Edit")
menu=Menu(self)
menu.Append("&Help", self.Nop)
menu.Append("&About", self.About)
menubar.Append(menu, "&Help")
self.SetMenuBar(menubar)
self.CreateStatusBar()
def Display(self,event=None):
self.canvas.Show(False)
dlg = FileDialog(self, "Choose Display Module",default_dir=os.getcwd()+"/",
wildcard='Python Plot Files|plot*.py|All Files|*.*')
result = dlg.ShowModal()
dlg.Destroy()
if result == 'ok':
lfname = dlg.GetPaths()[0]
modulename=os.path.splitext(os.path.split(lfname)[-1])[0]
self.params['display_module']=modulename
if os.path.exists(self.tmpfile):
sim=zpickle.load(self.tmpfile)
self.Plot(sim)
self.canvas.Show(True)
def About(self,event):
win=AboutWindow()
win.Show()
def Nop(self,event):
self.canvas.Show(False)
dlg = MessageDialog(self, "Error","Function Not Implemented",icon='error')
dlg.ShowModal()
dlg.Destroy()
self.canvas.Show(True)
def Export(self,event=None):
export_fig(self)
def Quit(self,event=None):
if self.running:
self.quitting=True
self.stopping=True
return
self.canvas.Show(False)
if self.modified:
(root,sfname)=os.path.split(self.params['save_sim_file'])
dlg=MessageDialog(self,
text="Do you want to save the changes you made to %s?" % sfname,
title="Quit", ok=0, yes_no=1,cancel=1)
result=dlg.ShowModal()
dlg.Destroy()
if result == 'cancel':
self.canvas.Show(True)
return
elif result == 'yes':
self.Save_Simulation()
self.Close()
if os.path.exists(self.tmpfile):
os.remove(self.tmpfile)
def run(lfname=None,params=None,use_splash=True):
if use_splash:
app1=Application(splash.SplashFrame)
app1.Run()
app = Application(MainFrame, title="Plasticity",lfname=lfname,
params=params)
app.Run()
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option( "--nosplash",
action="store_false", dest="splash", default=True,
help="don't show the splash screen")
(options, args) = parser.parse_args()
if options.splash:
app1=Application(splash.SplashFrame)
app1.Run()
if len(args)>=1:
lfname=args[0]
else:
lfname=None
run(lfname)<|fim▁end|>
|
self.params['load_sim_file']=self.tmpfile
self.running=False
if self.quitting:
|
<|file_name|>ConcurrentNavigableMapTestSuiteBuilder.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2015 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing;
<|fim▁hole|>import java.util.List;
/**
* Creates, based on your criteria, a JUnit test suite that exhaustively tests
* a ConcurrentNavigableMap implementation.
*
* @author Louis Wasserman
*/
@GwtIncompatible
public class ConcurrentNavigableMapTestSuiteBuilder<K, V>
extends NavigableMapTestSuiteBuilder<K, V> {
public static <K, V> ConcurrentNavigableMapTestSuiteBuilder<K, V> using(
TestSortedMapGenerator<K, V> generator) {
ConcurrentNavigableMapTestSuiteBuilder<K, V> result =
new ConcurrentNavigableMapTestSuiteBuilder<K, V>();
result.usingGenerator(generator);
return result;
}
@Override
protected List<Class<? extends AbstractTester>> getTesters() {
List<Class<? extends AbstractTester>> testers = Helpers.copyToList(super.getTesters());
testers.addAll(ConcurrentMapTestSuiteBuilder.TESTERS);
return testers;
}
@Override
NavigableMapTestSuiteBuilder<K, V> subSuiteUsing(TestSortedMapGenerator<K, V> generator) {
return using(generator);
}
}<|fim▁end|>
|
import com.google.common.annotations.GwtIncompatible;
|
<|file_name|>reveal.js<|end_file_name|><|fim▁begin|>/*!
* reveal.js
* http://lab.hakim.se/reveal-js
* MIT licensed
*
* Copyright (C) 2016 Hakim El Hattab, http://hakim.se
*/
(function( root, factory ) {
if( typeof define === 'function' && define.amd ) {
// AMD. Register as an anonymous module.
define( function() {
root.Reveal = factory();
return root.Reveal;
} );
} else if( typeof exports === 'object' ) {
// Node. Does not work with strict CommonJS.
module.exports = factory();
} else {
// Browser globals.
root.Reveal = factory();
}
}( this, function() {
'use strict';
var Reveal;
// The reveal.js version
var VERSION = '3.3.0';
var SLIDES_SELECTOR = '.slides section',
HORIZONTAL_SLIDES_SELECTOR = '.slides>section',
VERTICAL_SLIDES_SELECTOR = '.slides>section.present>section',
HOME_SLIDE_SELECTOR = '.slides>section:first-of-type',
UA = navigator.userAgent,
// Configuration defaults, can be overridden at initialization time
config = {
// The "normal" size of the presentation, aspect ratio will be preserved
// when the presentation is scaled to fit different resolutions
width: 960,
height: 700,
// Factor of the display size that should remain empty around the content
margin: 0.1,
// Bounds for smallest/largest possible scale to apply to content
minScale: 0.2,
maxScale: 1.5,
// Display controls in the bottom right corner
controls: true,
// Display a presentation progress bar
progress: true,
// Display the page number of the current slide
slideNumber: false,
// Push each slide change to the browser history
history: false,
// Enable keyboard shortcuts for navigation
keyboard: true,
// Optional function that blocks keyboard events when retuning false
keyboardCondition: null,
// Enable the slide overview mode
overview: true,
// Vertical centering of slides
center: true,
// Enables touch navigation on devices with touch input
touch: true,
// Loop the presentation
loop: false,
// Change the presentation direction to be RTL
rtl: false,
// Randomizes the order of slides each time the presentation loads
shuffle: false,
// Turns fragments on and off globally
fragments: true,
// Flags if the presentation is running in an embedded mode,
// i.e. contained within a limited portion of the screen
embedded: false,
// Flags if we should show a help overlay when the questionmark
// key is pressed
help: true,
// Flags if it should be possible to pause the presentation (blackout)
pause: true,
// Flags if speaker notes should be visible to all viewers
showNotes: false,
// Number of milliseconds between automatically proceeding to the
// next slide, disabled when set to 0, this value can be overwritten
// by using a data-autoslide attribute on your slides
autoSlide: 0,
// Stop auto-sliding after user input
autoSlideStoppable: true,
// Use this method for navigation when auto-sliding (defaults to navigateNext)
autoSlideMethod: null,
// Enable slide navigation via mouse wheel
mouseWheel: false,
// Apply a 3D roll to links on hover
rollingLinks: false,
// Hides the address bar on mobile devices
hideAddressBar: true,
// Opens links in an iframe preview overlay
previewLinks: false,
// Exposes the reveal.js API through window.postMessage
postMessage: true,
// Dispatches all reveal.js events to the parent window through postMessage
postMessageEvents: false,
// Focuses body when page changes visiblity to ensure keyboard shortcuts work
focusBodyOnPageVisibilityChange: true,
// Transition style
transition: 'fade', // none/fade/slide/convex/concave/zoom
// Transition speed
transitionSpeed: 'default', // default/fast/slow
// Transition style for full page slide backgrounds
backgroundTransition: 'fade', // none/fade/slide/convex/concave/zoom
// Parallax background image
parallaxBackgroundImage: '', // CSS syntax, e.g. "a.jpg"
// Parallax background size
parallaxBackgroundSize: '', // CSS syntax, e.g. "3000px 2000px"
// Amount of pixels to move the parallax background per slide step
parallaxBackgroundHorizontal: null,
parallaxBackgroundVertical: null,
// Number of slides away from the current that are visible
viewDistance: 3,
// Script dependencies to load
dependencies: []
},
// Flags if reveal.js is loaded (has dispatched the 'ready' event)
loaded = false,
// Flags if the overview mode is currently active
overview = false,
// Holds the dimensions of our overview slides, including margins
overviewSlideWidth = null,
overviewSlideHeight = null,
// The horizontal and vertical index of the currently active slide
indexh,
indexv,
// The previous and current slide HTML elements
previousSlide,
currentSlide,
previousBackground,
// Slides may hold a data-state attribute which we pick up and apply
// as a class to the body. This list contains the combined state of
// all current slides.
state = [],
// The current scale of the presentation (see width/height config)
scale = 1,
// CSS transform that is currently applied to the slides container,
// split into two groups
slidesTransform = { layout: '', overview: '' },
// Cached references to DOM elements
dom = {},
// Features supported by the browser, see #checkCapabilities()
features = {},
// Client is a mobile device, see #checkCapabilities()
isMobileDevice,
// Client is a desktop Chrome, see #checkCapabilities()
isChrome,
// Throttles mouse wheel navigation
lastMouseWheelStep = 0,
// Delays updates to the URL due to a Chrome thumbnailer bug
writeURLTimeout = 0,
// Flags if the interaction event listeners are bound
eventsAreBound = false,
// The current auto-slide duration
autoSlide = 0,
// Auto slide properties
autoSlidePlayer,
autoSlideTimeout = 0,
autoSlideStartTime = -1,
autoSlidePaused = false,
// Holds information about the currently ongoing touch input
touch = {
startX: 0,
startY: 0,
startSpan: 0,
startCount: 0,
captured: false,
threshold: 40
},
// Holds information about the keyboard shortcuts
keyboardShortcuts = {
'N , SPACE': 'Next slide',
'P': 'Previous slide',
'← , H': 'Navigate left',
'→ , L': 'Navigate right',
'↑ , K': 'Navigate up',
'↓ , J': 'Navigate down',
'Home': 'First slide',
'End': 'Last slide',
'B , .': 'Pause',
'F': 'Fullscreen',
'ESC, O': 'Slide overview'
};
/**
* Starts up the presentation if the client is capable.
*/
function initialize( options ) {
checkCapabilities();
if( !features.transforms2d && !features.transforms3d ) {
document.body.setAttribute( 'class', 'no-transforms' );
// Since JS won't be running any further, we load all lazy
// loading elements upfront
var images = toArray( document.getElementsByTagName( 'img' ) ),
iframes = toArray( document.getElementsByTagName( 'iframe' ) );
var lazyLoadable = images.concat( iframes );
for( var i = 0, len = lazyLoadable.length; i < len; i++ ) {
var element = lazyLoadable[i];
if( element.getAttribute( 'data-src' ) ) {
element.setAttribute( 'src', element.getAttribute( 'data-src' ) );
element.removeAttribute( 'data-src' );
}
}
// If the browser doesn't support core features we won't be
// using JavaScript to control the presentation
return;
}
// Cache references to key DOM elements
dom.wrapper = document.querySelector( '.reveal' );
dom.slides = document.querySelector( '.reveal .slides' );
// Force a layout when the whole page, incl fonts, has loaded
window.addEventListener( 'load', layout, false );
var query = Reveal.getQueryHash();
// Do not accept new dependencies via query config to avoid
// the potential of malicious script injection
if( typeof query['dependencies'] !== 'undefined' ) delete query['dependencies'];
// Copy options over to our config object
extend( config, options );
extend( config, query );
// Hide the address bar in mobile browsers
hideAddressBar();
// Loads the dependencies and continues to #start() once done
load();
}
/**
* Inspect the client to see what it's capable of, this
* should only happens once per runtime.
*/
function checkCapabilities() {
isMobileDevice = /(iphone|ipod|ipad|android)/gi.test( UA );
isChrome = /chrome/i.test( UA ) && !/edge/i.test( UA );
var testElement = document.createElement( 'div' );
features.transforms3d = 'WebkitPerspective' in testElement.style ||
'MozPerspective' in testElement.style ||
'msPerspective' in testElement.style ||
'OPerspective' in testElement.style ||
'perspective' in testElement.style;
features.transforms2d = 'WebkitTransform' in testElement.style ||
'MozTransform' in testElement.style ||
'msTransform' in testElement.style ||
'OTransform' in testElement.style ||
'transform' in testElement.style;
features.requestAnimationFrameMethod = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame;
features.requestAnimationFrame = typeof features.requestAnimationFrameMethod === 'function';
features.canvas = !!document.createElement( 'canvas' ).getContext;
// Transitions in the overview are disabled in desktop and
// Safari due to lag
features.overviewTransitions = !/Version\/[\d\.]+.*Safari/.test( UA );
// Flags if we should use zoom instead of transform to scale
// up slides. Zoom produces crisper results but has a lot of
// xbrowser quirks so we only use it in whitelsited browsers.
features.zoom = 'zoom' in testElement.style && !isMobileDevice &&
( isChrome || /Version\/[\d\.]+.*Safari/.test( UA ) );
}
/**
* Loads the dependencies of reveal.js. Dependencies are
* defined via the configuration option 'dependencies'
* and will be loaded prior to starting/binding reveal.js.
* Some dependencies may have an 'async' flag, if so they
* will load after reveal.js has been started up.
*/
function load() {
var scripts = [],
scriptsAsync = [],
scriptsToPreload = 0;
// Called once synchronous scripts finish loading
function proceed() {
if( scriptsAsync.length ) {
// Load asynchronous scripts
head.js.apply( null, scriptsAsync );
}
start();
}
function loadScript( s ) {
head.ready( s.src.match( /([\w\d_\-]*)\.?js$|[^\\\/]*$/i )[0], function() {
// Extension may contain callback functions
if( typeof s.callback === 'function' ) {
s.callback.apply( this );
}
if( --scriptsToPreload === 0 ) {
proceed();
}
});
}
for( var i = 0, len = config.dependencies.length; i < len; i++ ) {
var s = config.dependencies[i];
// Load if there's no condition or the condition is truthy
if( !s.condition || s.condition() ) {
if( s.async ) {
scriptsAsync.push( s.src );
}
else {
scripts.push( s.src );
}
loadScript( s );
}
}
if( scripts.length ) {
scriptsToPreload = scripts.length;
// Load synchronous scripts
head.js.apply( null, scripts );
}
else {
proceed();
}
}
/**
* Starts up reveal.js by binding input events and navigating
* to the current URL deeplink if there is one.
*/
function start() {
// Make sure we've got all the DOM elements we need
setupDOM();
// Listen to messages posted to this window
setupPostMessage();
// Prevent the slides from being scrolled out of view
setupScrollPrevention();
// Resets all vertical slides so that only the first is visible
resetVerticalSlides();
// Updates the presentation to match the current configuration values
configure();
// Read the initial hash
readURL();
// Update all backgrounds
updateBackground( true );
// Notify listeners that the presentation is ready but use a 1ms
// timeout to ensure it's not fired synchronously after #initialize()
setTimeout( function() {
// Enable transitions now that we're loaded
dom.slides.classList.remove( 'no-transition' );
loaded = true;
dispatchEvent( 'ready', {
'indexh': indexh,
'indexv': indexv,
'currentSlide': currentSlide
} );
}, 1 );
// Special setup and config is required when printing to PDF
if( isPrintingPDF() ) {
removeEventListeners();
// The document needs to have loaded for the PDF layout
// measurements to be accurate
if( document.readyState === 'complete' ) {
setupPDF();
}
else {
window.addEventListener( 'load', setupPDF );
}
}
}
/**
* Finds and stores references to DOM elements which are
* required by the presentation. If a required element is
* not found, it is created.
*/
function setupDOM() {
// Prevent transitions while we're loading
dom.slides.classList.add( 'no-transition' );
// Background element
dom.background = createSingletonNode( dom.wrapper, 'div', 'backgrounds', null );
// Progress bar
dom.progress = createSingletonNode( dom.wrapper, 'div', 'progress', '<span></span>' );
dom.progressbar = dom.progress.querySelector( 'span' );
// Arrow controls
createSingletonNode( dom.wrapper, 'aside', 'controls',
'<button class="navigate-left" aria-label="previous slide"></button>' +
'<button class="navigate-right" aria-label="next slide"></button>' +
'<button class="navigate-up" aria-label="above slide"></button>' +
'<button class="navigate-down" aria-label="below slide"></button>' );
// Slide number
dom.slideNumber = createSingletonNode( dom.wrapper, 'div', 'slide-number', '' );
// Element containing notes that are visible to the audience
dom.speakerNotes = createSingletonNode( dom.wrapper, 'div', 'speaker-notes', null );
dom.speakerNotes.setAttribute( 'data-prevent-swipe', '' );
// Overlay graphic which is displayed during the paused mode
createSingletonNode( dom.wrapper, 'div', 'pause-overlay', null );
// Cache references to elements
dom.controls = document.querySelector( '.reveal .controls' );
dom.theme = document.querySelector( '#theme' );
dom.wrapper.setAttribute( 'role', 'application' );
// There can be multiple instances of controls throughout the page
dom.controlsLeft = toArray( document.querySelectorAll( '.navigate-left' ) );
dom.controlsRight = toArray( document.querySelectorAll( '.navigate-right' ) );
dom.controlsUp = toArray( document.querySelectorAll( '.navigate-up' ) );
dom.controlsDown = toArray( document.querySelectorAll( '.navigate-down' ) );
dom.controlsPrev = toArray( document.querySelectorAll( '.navigate-prev' ) );
dom.controlsNext = toArray( document.querySelectorAll( '.navigate-next' ) );
dom.statusDiv = createStatusDiv();
}
/**
* Creates a hidden div with role aria-live to announce the
* current slide content. Hide the div off-screen to make it
* available only to Assistive Technologies.
*/
function createStatusDiv() {
var statusDiv = document.getElementById( 'aria-status-div' );
if( !statusDiv ) {
statusDiv = document.createElement( 'div' );
statusDiv.style.position = 'absolute';
statusDiv.style.height = '1px';
statusDiv.style.width = '1px';
statusDiv.style.overflow ='hidden';
statusDiv.style.clip = 'rect( 1px, 1px, 1px, 1px )';
statusDiv.setAttribute( 'id', 'aria-status-div' );
statusDiv.setAttribute( 'aria-live', 'polite' );
statusDiv.setAttribute( 'aria-atomic','true' );
dom.wrapper.appendChild( statusDiv );
}
return statusDiv;
}
/**
* Configures the presentation for printing to a static
* PDF.
*/
function setupPDF() {
var slideSize = getComputedSlideSize( window.innerWidth, window.innerHeight );
// Dimensions of the PDF pages
var pageWidth = Math.floor( slideSize.width * ( 1 + config.margin ) ),
pageHeight = Math.floor( slideSize.height * ( 1 + config.margin ) );
// Dimensions of slides within the pages
var slideWidth = slideSize.width,
slideHeight = slideSize.height;
// Let the browser know what page size we want to print
injectStyleSheet( '@page{size:'+ pageWidth +'px '+ pageHeight +'px; margin: 0;}' );
// Limit the size of certain elements to the dimensions of the slide
injectStyleSheet( '.reveal section>img, .reveal section>video, .reveal section>iframe{max-width: '+ slideWidth +'px; max-height:'+ slideHeight +'px}' );
document.body.classList.add( 'print-pdf' );
document.body.style.width = pageWidth + 'px';
document.body.style.height = pageHeight + 'px';
// Add each slide's index as attributes on itself, we need these
// indices to generate slide numbers below
toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) ).forEach( function( hslide, h ) {
hslide.setAttribute( 'data-index-h', h );
if( hslide.classList.contains( 'stack' ) ) {
toArray( hslide.querySelectorAll( 'section' ) ).forEach( function( vslide, v ) {
vslide.setAttribute( 'data-index-h', h );
vslide.setAttribute( 'data-index-v', v );
} );
}
} );
// Slide and slide background layout
toArray( dom.wrapper.querySelectorAll( SLIDES_SELECTOR ) ).forEach( function( slide ) {
// Vertical stacks are not centred since their section
// children will be
if( slide.classList.contains( 'stack' ) === false ) {
// Center the slide inside of the page, giving the slide some margin
var left = ( pageWidth - slideWidth ) / 2,
top = ( pageHeight - slideHeight ) / 2;
var contentHeight = getAbsoluteHeight( slide );
var numberOfPages = Math.max( Math.ceil( contentHeight / pageHeight ), 1 );
// Center slides vertically
if( numberOfPages === 1 && config.center || slide.classList.contains( 'center' ) ) {
top = Math.max( ( pageHeight - contentHeight ) / 2, 0 );
}
// Position the slide inside of the page
slide.style.left = left + 'px';
slide.style.top = top + 'px';
slide.style.width = slideWidth + 'px';
// TODO Backgrounds need to be multiplied when the slide
// stretches over multiple pages
var background = slide.querySelector( '.slide-background' );
if( background ) {
background.style.width = pageWidth + 'px';
background.style.height = ( pageHeight * numberOfPages ) + 'px';
background.style.top = -top + 'px';
background.style.left = -left + 'px';
}
// Inject notes if `showNotes` is enabled
if( config.showNotes ) {
var notes = getSlideNotes( slide );
if( notes ) {
var notesSpacing = 8;
var notesElement = document.createElement( 'div' );
notesElement.classList.add( 'speaker-notes' );
notesElement.classList.add( 'speaker-notes-pdf' );
notesElement.innerHTML = notes;
notesElement.style.left = ( notesSpacing - left ) + 'px';
notesElement.style.bottom = ( notesSpacing - top ) + 'px';
notesElement.style.width = ( pageWidth - notesSpacing*2 ) + 'px';
slide.appendChild( notesElement );
}
}
// Inject slide numbers if `slideNumbers` are enabled
if( config.slideNumber ) {
var slideNumberH = parseInt( slide.getAttribute( 'data-index-h' ), 10 ) + 1,
slideNumberV = parseInt( slide.getAttribute( 'data-index-v' ), 10 ) + 1;
var numberElement = document.createElement( 'div' );
numberElement.classList.add( 'slide-number' );
numberElement.classList.add( 'slide-number-pdf' );
numberElement.innerHTML = formatSlideNumber( slideNumberH, '.', slideNumberV );
background.appendChild( numberElement );
}
}
} );
// Show all fragments
toArray( dom.wrapper.querySelectorAll( SLIDES_SELECTOR + ' .fragment' ) ).forEach( function( fragment ) {
fragment.classList.add( 'visible' );
} );
}
/**
* This is an unfortunate necessity. Some actions – such as
* an input field being focused in an iframe or using the
* keyboard to expand text selection beyond the bounds of
* a slide – can trigger our content to be pushed out of view.
* This scrolling can not be prevented by hiding overflow in
* CSS (we already do) so we have to resort to repeatedly
* checking if the slides have been offset :(
*/
function setupScrollPrevention() {
setInterval( function() {
if( dom.wrapper.scrollTop !== 0 || dom.wrapper.scrollLeft !== 0 ) {
dom.wrapper.scrollTop = 0;
dom.wrapper.scrollLeft = 0;
}
}, 1000 );
}
/**
* Creates an HTML element and returns a reference to it.
* If the element already exists the existing instance will
* be returned.
*/
function createSingletonNode( container, tagname, classname, innerHTML ) {
// Find all nodes matching the description
var nodes = container.querySelectorAll( '.' + classname );
// Check all matches to find one which is a direct child of
// the specified container
for( var i = 0; i < nodes.length; i++ ) {
var testNode = nodes[i];
if( testNode.parentNode === container ) {
return testNode;
}
}
// If no node was found, create it now
var node = document.createElement( tagname );
node.classList.add( classname );
if( typeof innerHTML === 'string' ) {
node.innerHTML = innerHTML;
}
container.appendChild( node );
return node;
}
/**
* Creates the slide background elements and appends them
* to the background container. One element is created per
* slide no matter if the given slide has visible background.
*/
function createBackgrounds() {
var printMode = isPrintingPDF();
// Clear prior backgrounds
dom.background.innerHTML = '';
dom.background.classList.add( 'no-transition' );
// Iterate over all horizontal slides
toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) ).forEach( function( slideh ) {
var backgroundStack;
if( printMode ) {
backgroundStack = createBackground( slideh, slideh );
}
else {
backgroundStack = createBackground( slideh, dom.background );
}
// Iterate over all vertical slides
toArray( slideh.querySelectorAll( 'section' ) ).forEach( function( slidev ) {
if( printMode ) {
createBackground( slidev, slidev );
}
else {
createBackground( slidev, backgroundStack );
}
backgroundStack.classList.add( 'stack' );
} );
} );
// Add parallax background if specified
if( config.parallaxBackgroundImage ) {
dom.background.style.backgroundImage = 'url("' + config.parallaxBackgroundImage + '")';
dom.background.style.backgroundSize = config.parallaxBackgroundSize;
// Make sure the below properties are set on the element - these properties are
// needed for proper transitions to be set on the element via CSS. To remove
// annoying background slide-in effect when the presentation starts, apply
// these properties after short time delay
setTimeout( function() {
dom.wrapper.classList.add( 'has-parallax-background' );
}, 1 );
}
else {
dom.background.style.backgroundImage = '';
dom.wrapper.classList.remove( 'has-parallax-background' );
}
}
/**
* Creates a background for the given slide.
*
* @param {HTMLElement} slide
* @param {HTMLElement} container The element that the background
* should be appended to
*/
function createBackground( slide, container ) {
var data = {
background: slide.getAttribute( 'data-background' ),
backgroundSize: slide.getAttribute( 'data-background-size' ),
backgroundImage: slide.getAttribute( 'data-background-image' ),
backgroundVideo: slide.getAttribute( 'data-background-video' ),
backgroundIframe: slide.getAttribute( 'data-background-iframe' ),
backgroundColor: slide.getAttribute( 'data-background-color' ),
backgroundRepeat: slide.getAttribute( 'data-background-repeat' ),
backgroundPosition: slide.getAttribute( 'data-background-position' ),
backgroundTransition: slide.getAttribute( 'data-background-transition' )
};
var element = document.createElement( 'div' );
// Carry over custom classes from the slide to the background
element.className = 'slide-background ' + slide.className.replace( /present|past|future/, '' );
if( data.background ) {
// Auto-wrap image urls in url(...)
if( /^(http|file|\/\/)/gi.test( data.background ) || /\.(svg|png|jpg|jpeg|gif|bmp)$/gi.test( data.background ) ) {
slide.setAttribute( 'data-background-image', data.background );
}
else {
element.style.background = data.background;
}
}
// Create a hash for this combination of background settings.
// This is used to determine when two slide backgrounds are
// the same.
if( data.background || data.backgroundColor || data.backgroundImage || data.backgroundVideo || data.backgroundIframe ) {
element.setAttribute( 'data-background-hash', data.background +
data.backgroundSize +
data.backgroundImage +
data.backgroundVideo +
data.backgroundIframe +
data.backgroundColor +
data.backgroundRepeat +
data.backgroundPosition +
data.backgroundTransition );
}
// Additional and optional background properties
if( data.backgroundSize ) element.style.backgroundSize = data.backgroundSize;
if( data.backgroundColor ) element.style.backgroundColor = data.backgroundColor;
if( data.backgroundRepeat ) element.style.backgroundRepeat = data.backgroundRepeat;
if( data.backgroundPosition ) element.style.backgroundPosition = data.backgroundPosition;
if( data.backgroundTransition ) element.setAttribute( 'data-background-transition', data.backgroundTransition );
container.appendChild( element );
// If backgrounds are being recreated, clear old classes
slide.classList.remove( 'has-dark-background' );
slide.classList.remove( 'has-light-background' );
// If this slide has a background color, add a class that
// signals if it is light or dark. If the slide has no background
// color, no class will be set
var computedBackgroundColor = window.getComputedStyle( element ).backgroundColor;
if( computedBackgroundColor ) {
var rgb = colorToRgb( computedBackgroundColor );
// Ignore fully transparent backgrounds. Some browsers return
// rgba(0,0,0,0) when reading the computed background color of
// an element with no background
if( rgb && rgb.a !== 0 ) {
if( colorBrightness( computedBackgroundColor ) < 128 ) {
slide.classList.add( 'has-dark-background' );
}
else {
slide.classList.add( 'has-light-background' );
}
}
}
return element;
}
/**
* Registers a listener to postMessage events, this makes it
* possible to call all reveal.js API methods from another
* window. For example:
*
* revealWindow.postMessage( JSON.stringify({
* method: 'slide',
* args: [ 2 ]
* }), '*' );
*/
function setupPostMessage() {
if( config.postMessage ) {
window.addEventListener( 'message', function ( event ) {
var data = event.data;
// Make sure we're dealing with JSON
if( typeof data === 'string' && data.charAt( 0 ) === '{' && data.charAt( data.length - 1 ) === '}' ) {
data = JSON.parse( data );
// Check if the requested method can be found
if( data.method && typeof Reveal[data.method] === 'function' ) {
Reveal[data.method].apply( Reveal, data.args );
}
}
}, false );
}
}
/**
* Applies the configuration settings from the config
* object. May be called multiple times.
*/
function configure( options ) {
var numberOfSlides = dom.wrapper.querySelectorAll( SLIDES_SELECTOR ).length;
dom.wrapper.classList.remove( config.transition );
// New config options may be passed when this method
// is invoked through the API after initialization
if( typeof options === 'object' ) extend( config, options );
// Force linear transition based on browser capabilities
if( features.transforms3d === false ) config.transition = 'linear';
dom.wrapper.classList.add( config.transition );
dom.wrapper.setAttribute( 'data-transition-speed', config.transitionSpeed );
dom.wrapper.setAttribute( 'data-background-transition', config.backgroundTransition );
dom.controls.style.display = config.controls ? 'block' : 'none';
dom.progress.style.display = config.progress ? 'block' : 'none';
dom.slideNumber.style.display = config.slideNumber && !isPrintingPDF() ? 'block' : 'none';
if( config.shuffle ) {
shuffle();
}
if( config.rtl ) {
dom.wrapper.classList.add( 'rtl' );
}
else {
dom.wrapper.classList.remove( 'rtl' );
}
if( config.center ) {
dom.wrapper.classList.add( 'center' );
}
else {
dom.wrapper.classList.remove( 'center' );
}
// Exit the paused mode if it was configured off
if( config.pause === false ) {
resume();
}
if( config.showNotes ) {
dom.speakerNotes.classList.add( 'visible' );
}
else {
dom.speakerNotes.classList.remove( 'visible' );
}
if( config.mouseWheel ) {
document.addEventListener( 'DOMMouseScroll', onDocumentMouseScroll, false ); // FF
document.addEventListener( 'mousewheel', onDocumentMouseScroll, false );
}
else {
document.removeEventListener( 'DOMMouseScroll', onDocumentMouseScroll, false ); // FF
document.removeEventListener( 'mousewheel', onDocumentMouseScroll, false );
}
// Rolling 3D links
if( config.rollingLinks ) {
enableRollingLinks();
}
else {
disableRollingLinks();
}
// Iframe link previews
if( config.previewLinks ) {
enablePreviewLinks();
}
else {
disablePreviewLinks();
enablePreviewLinks( '[data-preview-link]' );
}
// Remove existing auto-slide controls
if( autoSlidePlayer ) {
autoSlidePlayer.destroy();
autoSlidePlayer = null;
}
// Generate auto-slide controls if needed
if( numberOfSlides > 1 && config.autoSlide && config.autoSlideStoppable && features.canvas && features.requestAnimationFrame ) {
autoSlidePlayer = new Playback( dom.wrapper, function() {
return Math.min( Math.max( ( Date.now() - autoSlideStartTime ) / autoSlide, 0 ), 1 );
} );
autoSlidePlayer.on( 'click', onAutoSlidePlayerClick );
autoSlidePaused = false;
}
// When fragments are turned off they should be visible
if( config.fragments === false ) {
toArray( dom.slides.querySelectorAll( '.fragment' ) ).forEach( function( element ) {
element.classList.add( 'visible' );
element.classList.remove( 'current-fragment' );
} );
}
sync();
}
/**
* Binds all event listeners.
*/
function addEventListeners() {
eventsAreBound = true;
window.addEventListener( 'hashchange', onWindowHashChange, false );
window.addEventListener( 'resize', onWindowResize, false );
if( config.touch ) {
dom.wrapper.addEventListener( 'touchstart', onTouchStart, false );
dom.wrapper.addEventListener( 'touchmove', onTouchMove, false );
dom.wrapper.addEventListener( 'touchend', onTouchEnd, false );
// Support pointer-style touch interaction as well
if( window.navigator.pointerEnabled ) {
// IE 11 uses un-prefixed version of pointer events
dom.wrapper.addEventListener( 'pointerdown', onPointerDown, false );
dom.wrapper.addEventListener( 'pointermove', onPointerMove, false );
dom.wrapper.addEventListener( 'pointerup', onPointerUp, false );
}
else if( window.navigator.msPointerEnabled ) {
// IE 10 uses prefixed version of pointer events
dom.wrapper.addEventListener( 'MSPointerDown', onPointerDown, false );
dom.wrapper.addEventListener( 'MSPointerMove', onPointerMove, false );
dom.wrapper.addEventListener( 'MSPointerUp', onPointerUp, false );
}
}
if( config.keyboard ) {
document.addEventListener( 'keydown', onDocumentKeyDown, false );
document.addEventListener( 'keypress', onDocumentKeyPress, false );
}
if( config.progress && dom.progress ) {
dom.progress.addEventListener( 'click', onProgressClicked, false );
}
if( config.focusBodyOnPageVisibilityChange ) {
var visibilityChange;
if( 'hidden' in document ) {
visibilityChange = 'visibilitychange';
}
else if( 'msHidden' in document ) {
visibilityChange = 'msvisibilitychange';
}
else if( 'webkitHidden' in document ) {
visibilityChange = 'webkitvisibilitychange';
}
if( visibilityChange ) {
document.addEventListener( visibilityChange, onPageVisibilityChange, false );
}
}
// Listen to both touch and click events, in case the device
// supports both
var pointerEvents = [ 'touchstart', 'click' ];
// Only support touch for Android, fixes double navigations in
// stock browser
if( UA.match( /android/gi ) ) {
pointerEvents = [ 'touchstart' ];
}
pointerEvents.forEach( function( eventName ) {
dom.controlsLeft.forEach( function( el ) { el.addEventListener( eventName, onNavigateLeftClicked, false ); } );
dom.controlsRight.forEach( function( el ) { el.addEventListener( eventName, onNavigateRightClicked, false ); } );
dom.controlsUp.forEach( function( el ) { el.addEventListener( eventName, onNavigateUpClicked, false ); } );
dom.controlsDown.forEach( function( el ) { el.addEventListener( eventName, onNavigateDownClicked, false ); } );
dom.controlsPrev.forEach( function( el ) { el.addEventListener( eventName, onNavigatePrevClicked, false ); } );
dom.controlsNext.forEach( function( el ) { el.addEventListener( eventName, onNavigateNextClicked, false ); } );
} );
}
/**
* Unbinds all event listeners.
*/
function removeEventListeners() {
eventsAreBound = false;
document.removeEventListener( 'keydown', onDocumentKeyDown, false );
document.removeEventListener( 'keypress', onDocumentKeyPress, false );
window.removeEventListener( 'hashchange', onWindowHashChange, false );
window.removeEventListener( 'resize', onWindowResize, false );
dom.wrapper.removeEventListener( 'touchstart', onTouchStart, false );
dom.wrapper.removeEventListener( 'touchmove', onTouchMove, false );
dom.wrapper.removeEventListener( 'touchend', onTouchEnd, false );
// IE11
if( window.navigator.pointerEnabled ) {
dom.wrapper.removeEventListener( 'pointerdown', onPointerDown, false );
dom.wrapper.removeEventListener( 'pointermove', onPointerMove, false );
dom.wrapper.removeEventListener( 'pointerup', onPointerUp, false );
}
// IE10
else if( window.navigator.msPointerEnabled ) {
dom.wrapper.removeEventListener( 'MSPointerDown', onPointerDown, false );
dom.wrapper.removeEventListener( 'MSPointerMove', onPointerMove, false );
dom.wrapper.removeEventListener( 'MSPointerUp', onPointerUp, false );
}
if ( config.progress && dom.progress ) {
dom.progress.removeEventListener( 'click', onProgressClicked, false );
}
[ 'touchstart', 'click' ].forEach( function( eventName ) {
dom.controlsLeft.forEach( function( el ) { el.removeEventListener( eventName, onNavigateLeftClicked, false ); } );
dom.controlsRight.forEach( function( el ) { el.removeEventListener( eventName, onNavigateRightClicked, false ); } );
dom.controlsUp.forEach( function( el ) { el.removeEventListener( eventName, onNavigateUpClicked, false ); } );
dom.controlsDown.forEach( function( el ) { el.removeEventListener( eventName, onNavigateDownClicked, false ); } );
dom.controlsPrev.forEach( function( el ) { el.removeEventListener( eventName, onNavigatePrevClicked, false ); } );
dom.controlsNext.forEach( function( el ) { el.removeEventListener( eventName, onNavigateNextClicked, false ); } );
} );
}
/**
* Extend object a with the properties of object b.
* If there's a conflict, object b takes precedence.
*/
function extend( a, b ) {
for( var i in b ) {
a[ i ] = b[ i ];
}
}
/**
* Converts the target object to an array.
*/
function toArray( o ) {
return Array.prototype.slice.call( o );
}
/**
* Utility for deserializing a value.
*/
function deserialize( value ) {
if( typeof value === 'string' ) {
if( value === 'null' ) return null;
else if( value === 'true' ) return true;
else if( value === 'false' ) return false;
else if( value.match( /^\d+$/ ) ) return parseFloat( value );
}
return value;
}
/**
* Measures the distance in pixels between point a
* and point b.
*
* @param {Object} a point with x/y properties
* @param {Object} b point with x/y properties
*/
function distanceBetween( a, b ) {
var dx = a.x - b.x,
dy = a.y - b.y;
return Math.sqrt( dx*dx + dy*dy );
}
/**
* Applies a CSS transform to the target element.
*/
function transformElement( element, transform ) {
element.style.WebkitTransform = transform;
element.style.MozTransform = transform;
element.style.msTransform = transform;
element.style.transform = transform;
}
/**
* Applies CSS transforms to the slides container. The container
* is transformed from two separate sources: layout and the overview
* mode.
*/
function transformSlides( transforms ) {
// Pick up new transforms from arguments
if( typeof transforms.layout === 'string' ) slidesTransform.layout = transforms.layout;
if( typeof transforms.overview === 'string' ) slidesTransform.overview = transforms.overview;
// Apply the transforms to the slides container
if( slidesTransform.layout ) {
transformElement( dom.slides, slidesTransform.layout + ' ' + slidesTransform.overview );
}
else {
transformElement( dom.slides, slidesTransform.overview );
}
}
/**
* Injects the given CSS styles into the DOM.
*/
function injectStyleSheet( value ) {
var tag = document.createElement( 'style' );
tag.type = 'text/css';
if( tag.styleSheet ) {
tag.styleSheet.cssText = value;
}
else {
tag.appendChild( document.createTextNode( value ) );
}
document.getElementsByTagName( 'head' )[0].appendChild( tag );
}
/**
* Converts various color input formats to an {r:0,g:0,b:0} object.
*
* @param {String} color The string representation of a color,
* the following formats are supported:
* - #000
* - #000000
* - rgb(0,0,0)
*/
function colorToRgb( color ) {
var hex3 = color.match( /^#([0-9a-f]{3})$/i );
if( hex3 && hex3[1] ) {
hex3 = hex3[1];
return {
r: parseInt( hex3.charAt( 0 ), 16 ) * 0x11,
g: parseInt( hex3.charAt( 1 ), 16 ) * 0x11,
b: parseInt( hex3.charAt( 2 ), 16 ) * 0x11
};
}
var hex6 = color.match( /^#([0-9a-f]{6})$/i );
if( hex6 && hex6[1] ) {
hex6 = hex6[1];
return {
r: parseInt( hex6.substr( 0, 2 ), 16 ),
g: parseInt( hex6.substr( 2, 2 ), 16 ),
b: parseInt( hex6.substr( 4, 2 ), 16 )
};
}
var rgb = color.match( /^rgb\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$/i );
if( rgb ) {
return {
r: parseInt( rgb[1], 10 ),
g: parseInt( rgb[2], 10 ),
b: parseInt( rgb[3], 10 )
};
}
var rgba = color.match( /^rgba\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\,\s*([\d]+|[\d]*.[\d]+)\s*\)$/i );
if( rgba ) {
return {
r: parseInt( rgba[1], 10 ),
g: parseInt( rgba[2], 10 ),
b: parseInt( rgba[3], 10 ),
a: parseFloat( rgba[4] )
};
}
return null;
}
/**
* Calculates brightness on a scale of 0-255.
*
* @param color See colorStringToRgb for supported formats.
*/
function colorBrightness( color ) {
if( typeof color === 'string' ) color = colorToRgb( color );
if( color ) {
return ( color.r * 299 + color.g * 587 + color.b * 114 ) / 1000;
}
return null;
}
/**
* Retrieves the height of the given element by looking
* at the position and height of its immediate children.
*/
function getAbsoluteHeight( element ) {
var height = 0;
if( element ) {
var absoluteChildren = 0;
toArray( element.childNodes ).forEach( function( child ) {
if( typeof child.offsetTop === 'number' && child.style ) {
// Count # of abs children
if( window.getComputedStyle( child ).position === 'absolute' ) {
absoluteChildren += 1;
}
height = Math.max( height, child.offsetTop + child.offsetHeight );
}
} );
// If there are no absolute children, use offsetHeight
if( absoluteChildren === 0 ) {
height = element.offsetHeight;
}
}
return height;
}
/**
* Returns the remaining height within the parent of the
* target element.
*
* remaining height = [ configured parent height ] - [ current parent height ]
*/
function getRemainingHeight( element, height ) {
height = height || 0;
if( element ) {
var newHeight, oldHeight = element.style.height;
// Change the .stretch element height to 0 in order find the height of all<|fim▁hole|>
// Restore the old height, just in case
element.style.height = oldHeight + 'px';
return newHeight;
}
return height;
}
/**
* Checks if this instance is being used to print a PDF.
*/
function isPrintingPDF() {
return ( /print-pdf/gi ).test( window.location.search );
}
/**
* Hides the address bar if we're on a mobile device.
*/
function hideAddressBar() {
if( config.hideAddressBar && isMobileDevice ) {
// Events that should trigger the address bar to hide
window.addEventListener( 'load', removeAddressBar, false );
window.addEventListener( 'orientationchange', removeAddressBar, false );
}
}
/**
* Causes the address bar to hide on mobile devices,
* more vertical space ftw.
*/
function removeAddressBar() {
setTimeout( function() {
window.scrollTo( 0, 1 );
}, 10 );
}
/**
* Dispatches an event of the specified type from the
* reveal DOM element.
*/
function dispatchEvent( type, args ) {
var event = document.createEvent( 'HTMLEvents', 1, 2 );
event.initEvent( type, true, true );
extend( event, args );
dom.wrapper.dispatchEvent( event );
// If we're in an iframe, post each reveal.js event to the
// parent window. Used by the notes plugin
if( config.postMessageEvents && window.parent !== window.self ) {
window.parent.postMessage( JSON.stringify({ namespace: 'reveal', eventName: type, state: getState() }), '*' );
}
}
/**
* Wrap all links in 3D goodness.
*/
function enableRollingLinks() {
if( features.transforms3d && !( 'msPerspective' in document.body.style ) ) {
var anchors = dom.wrapper.querySelectorAll( SLIDES_SELECTOR + ' a' );
for( var i = 0, len = anchors.length; i < len; i++ ) {
var anchor = anchors[i];
if( anchor.textContent && !anchor.querySelector( '*' ) && ( !anchor.className || !anchor.classList.contains( anchor, 'roll' ) ) ) {
var span = document.createElement('span');
span.setAttribute('data-title', anchor.text);
span.innerHTML = anchor.innerHTML;
anchor.classList.add( 'roll' );
anchor.innerHTML = '';
anchor.appendChild(span);
}
}
}
}
/**
* Unwrap all 3D links.
*/
function disableRollingLinks() {
var anchors = dom.wrapper.querySelectorAll( SLIDES_SELECTOR + ' a.roll' );
for( var i = 0, len = anchors.length; i < len; i++ ) {
var anchor = anchors[i];
var span = anchor.querySelector( 'span' );
if( span ) {
anchor.classList.remove( 'roll' );
anchor.innerHTML = span.innerHTML;
}
}
}
/**
* Bind preview frame links.
*/
function enablePreviewLinks( selector ) {
var anchors = toArray( document.querySelectorAll( selector ? selector : 'a' ) );
anchors.forEach( function( element ) {
if( /^(http|www)/gi.test( element.getAttribute( 'href' ) ) ) {
element.addEventListener( 'click', onPreviewLinkClicked, false );
}
} );
}
/**
* Unbind preview frame links.
*/
function disablePreviewLinks() {
var anchors = toArray( document.querySelectorAll( 'a' ) );
anchors.forEach( function( element ) {
if( /^(http|www)/gi.test( element.getAttribute( 'href' ) ) ) {
element.removeEventListener( 'click', onPreviewLinkClicked, false );
}
} );
}
/**
* Opens a preview window for the target URL.
*/
function showPreview( url ) {
closeOverlay();
dom.overlay = document.createElement( 'div' );
dom.overlay.classList.add( 'overlay' );
dom.overlay.classList.add( 'overlay-preview' );
dom.wrapper.appendChild( dom.overlay );
dom.overlay.innerHTML = [
'<header>',
'<a class="close" href="#"><span class="icon"></span></a>',
'<a class="external" href="'+ url +'" target="_blank"><span class="icon"></span></a>',
'</header>',
'<div class="spinner"></div>',
'<div class="viewport">',
'<iframe src="'+ url +'"></iframe>',
'</div>'
].join('');
dom.overlay.querySelector( 'iframe' ).addEventListener( 'load', function( event ) {
dom.overlay.classList.add( 'loaded' );
}, false );
dom.overlay.querySelector( '.close' ).addEventListener( 'click', function( event ) {
closeOverlay();
event.preventDefault();
}, false );
dom.overlay.querySelector( '.external' ).addEventListener( 'click', function( event ) {
closeOverlay();
}, false );
setTimeout( function() {
dom.overlay.classList.add( 'visible' );
}, 1 );
}
/**
* Opens a overlay window with help material.
*/
function showHelp() {
if( config.help ) {
closeOverlay();
dom.overlay = document.createElement( 'div' );
dom.overlay.classList.add( 'overlay' );
dom.overlay.classList.add( 'overlay-help' );
dom.wrapper.appendChild( dom.overlay );
var html = '<p class="title">Keyboard Shortcuts</p><br/>';
html += '<table><th>KEY</th><th>ACTION</th>';
for( var key in keyboardShortcuts ) {
html += '<tr><td>' + key + '</td><td>' + keyboardShortcuts[ key ] + '</td></tr>';
}
html += '</table>';
dom.overlay.innerHTML = [
'<header>',
'<a class="close" href="#"><span class="icon"></span></a>',
'</header>',
'<div class="viewport">',
'<div class="viewport-inner">'+ html +'</div>',
'</div>'
].join('');
dom.overlay.querySelector( '.close' ).addEventListener( 'click', function( event ) {
closeOverlay();
event.preventDefault();
}, false );
setTimeout( function() {
dom.overlay.classList.add( 'visible' );
}, 1 );
}
}
/**
* Closes any currently open overlay.
*/
function closeOverlay() {
if( dom.overlay ) {
dom.overlay.parentNode.removeChild( dom.overlay );
dom.overlay = null;
}
}
/**
* Applies JavaScript-controlled layout rules to the
* presentation.
*/
function layout() {
if( dom.wrapper && !isPrintingPDF() ) {
var size = getComputedSlideSize();
var slidePadding = 20; // TODO Dig this out of DOM
// Layout the contents of the slides
layoutSlideContents( config.width, config.height, slidePadding );
dom.slides.style.width = size.width + 'px';
dom.slides.style.height = size.height + 'px';
// Determine scale of content to fit within available space
scale = Math.min( size.presentationWidth / size.width, size.presentationHeight / size.height );
// Respect max/min scale settings
scale = Math.max( scale, config.minScale );
scale = Math.min( scale, config.maxScale );
// Don't apply any scaling styles if scale is 1
if( scale === 1 ) {
dom.slides.style.zoom = '';
dom.slides.style.left = '';
dom.slides.style.top = '';
dom.slides.style.bottom = '';
dom.slides.style.right = '';
transformSlides( { layout: '' } );
}
else {
// Prefer zoom for scaling up so that content remains crisp.
// Don't use zoom to scale down since that can lead to shifts
// in text layout/line breaks.
if( scale > 1 && features.zoom ) {
dom.slides.style.zoom = scale;
dom.slides.style.left = '';
dom.slides.style.top = '';
dom.slides.style.bottom = '';
dom.slides.style.right = '';
transformSlides( { layout: '' } );
}
// Apply scale transform as a fallback
else {
dom.slides.style.zoom = '';
dom.slides.style.left = '50%';
dom.slides.style.top = '50%';
dom.slides.style.bottom = 'auto';
dom.slides.style.right = 'auto';
transformSlides( { layout: 'translate(-50%, -50%) scale('+ scale +')' } );
}
}
// Select all slides, vertical and horizontal
var slides = toArray( dom.wrapper.querySelectorAll( SLIDES_SELECTOR ) );
for( var i = 0, len = slides.length; i < len; i++ ) {
var slide = slides[ i ];
// Don't bother updating invisible slides
if( slide.style.display === 'none' ) {
continue;
}
if( config.center || slide.classList.contains( 'center' ) ) {
// Vertical stacks are not centred since their section
// children will be
if( slide.classList.contains( 'stack' ) ) {
slide.style.top = 0;
}
else {
slide.style.top = Math.max( ( ( size.height - getAbsoluteHeight( slide ) ) / 2 ) - slidePadding, 0 ) + 'px';
}
}
else {
slide.style.top = '';
}
}
updateProgress();
updateParallax();
}
}
/**
* Applies layout logic to the contents of all slides in
* the presentation.
*/
function layoutSlideContents( width, height, padding ) {
// Handle sizing of elements with the 'stretch' class
toArray( dom.slides.querySelectorAll( 'section > .stretch' ) ).forEach( function( element ) {
// Determine how much vertical space we can use
var remainingHeight = getRemainingHeight( element, height );
// Consider the aspect ratio of media elements
if( /(img|video)/gi.test( element.nodeName ) ) {
var nw = element.naturalWidth || element.videoWidth,
nh = element.naturalHeight || element.videoHeight;
var es = Math.min( width / nw, remainingHeight / nh );
element.style.width = ( nw * es ) + 'px';
element.style.height = ( nh * es ) + 'px';
}
else {
element.style.width = width + 'px';
element.style.height = remainingHeight + 'px';
}
} );
}
/**
* Calculates the computed pixel size of our slides. These
* values are based on the width and height configuration
* options.
*/
function getComputedSlideSize( presentationWidth, presentationHeight ) {
var size = {
// Slide size
width: config.width,
height: config.height,
// Presentation size
presentationWidth: presentationWidth || dom.wrapper.offsetWidth,
presentationHeight: presentationHeight || dom.wrapper.offsetHeight
};
// Reduce available space by margin
size.presentationWidth -= ( size.presentationWidth * config.margin );
size.presentationHeight -= ( size.presentationHeight * config.margin );
// Slide width may be a percentage of available width
if( typeof size.width === 'string' && /%$/.test( size.width ) ) {
size.width = parseInt( size.width, 10 ) / 100 * size.presentationWidth;
}
// Slide height may be a percentage of available height
if( typeof size.height === 'string' && /%$/.test( size.height ) ) {
size.height = parseInt( size.height, 10 ) / 100 * size.presentationHeight;
}
return size;
}
/**
* Stores the vertical index of a stack so that the same
* vertical slide can be selected when navigating to and
* from the stack.
*
* @param {HTMLElement} stack The vertical stack element
* @param {int} v Index to memorize
*/
function setPreviousVerticalIndex( stack, v ) {
if( typeof stack === 'object' && typeof stack.setAttribute === 'function' ) {
stack.setAttribute( 'data-previous-indexv', v || 0 );
}
}
/**
* Retrieves the vertical index which was stored using
* #setPreviousVerticalIndex() or 0 if no previous index
* exists.
*
* @param {HTMLElement} stack The vertical stack element
*/
function getPreviousVerticalIndex( stack ) {
if( typeof stack === 'object' && typeof stack.setAttribute === 'function' && stack.classList.contains( 'stack' ) ) {
// Prefer manually defined start-indexv
var attributeName = stack.hasAttribute( 'data-start-indexv' ) ? 'data-start-indexv' : 'data-previous-indexv';
return parseInt( stack.getAttribute( attributeName ) || 0, 10 );
}
return 0;
}
/**
* Displays the overview of slides (quick nav) by scaling
* down and arranging all slide elements.
*/
function activateOverview() {
// Only proceed if enabled in config
if( config.overview && !isOverview() ) {
overview = true;
dom.wrapper.classList.add( 'overview' );
dom.wrapper.classList.remove( 'overview-deactivating' );
if( features.overviewTransitions ) {
setTimeout( function() {
dom.wrapper.classList.add( 'overview-animated' );
}, 1 );
}
// Don't auto-slide while in overview mode
cancelAutoSlide();
// Move the backgrounds element into the slide container to
// that the same scaling is applied
dom.slides.appendChild( dom.background );
// Clicking on an overview slide navigates to it
toArray( dom.wrapper.querySelectorAll( SLIDES_SELECTOR ) ).forEach( function( slide ) {
if( !slide.classList.contains( 'stack' ) ) {
slide.addEventListener( 'click', onOverviewSlideClicked, true );
}
} );
// Calculate slide sizes
var margin = 70;
var slideSize = getComputedSlideSize();
overviewSlideWidth = slideSize.width + margin;
overviewSlideHeight = slideSize.height + margin;
// Reverse in RTL mode
if( config.rtl ) {
overviewSlideWidth = -overviewSlideWidth;
}
updateSlidesVisibility();
layoutOverview();
updateOverview();
layout();
// Notify observers of the overview showing
dispatchEvent( 'overviewshown', {
'indexh': indexh,
'indexv': indexv,
'currentSlide': currentSlide
} );
}
}
/**
* Uses CSS transforms to position all slides in a grid for
* display inside of the overview mode.
*/
function layoutOverview() {
// Layout slides
toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) ).forEach( function( hslide, h ) {
hslide.setAttribute( 'data-index-h', h );
transformElement( hslide, 'translate3d(' + ( h * overviewSlideWidth ) + 'px, 0, 0)' );
if( hslide.classList.contains( 'stack' ) ) {
toArray( hslide.querySelectorAll( 'section' ) ).forEach( function( vslide, v ) {
vslide.setAttribute( 'data-index-h', h );
vslide.setAttribute( 'data-index-v', v );
transformElement( vslide, 'translate3d(0, ' + ( v * overviewSlideHeight ) + 'px, 0)' );
} );
}
} );
// Layout slide backgrounds
toArray( dom.background.childNodes ).forEach( function( hbackground, h ) {
transformElement( hbackground, 'translate3d(' + ( h * overviewSlideWidth ) + 'px, 0, 0)' );
toArray( hbackground.querySelectorAll( '.slide-background' ) ).forEach( function( vbackground, v ) {
transformElement( vbackground, 'translate3d(0, ' + ( v * overviewSlideHeight ) + 'px, 0)' );
} );
} );
}
/**
* Moves the overview viewport to the current slides.
* Called each time the current slide changes.
*/
function updateOverview() {
transformSlides( {
overview: [
'translateX('+ ( -indexh * overviewSlideWidth ) +'px)',
'translateY('+ ( -indexv * overviewSlideHeight ) +'px)',
'translateZ('+ ( window.innerWidth < 400 ? -1000 : -2500 ) +'px)'
].join( ' ' )
} );
}
/**
* Exits the slide overview and enters the currently
* active slide.
*/
function deactivateOverview() {
// Only proceed if enabled in config
if( config.overview ) {
overview = false;
dom.wrapper.classList.remove( 'overview' );
dom.wrapper.classList.remove( 'overview-animated' );
// Temporarily add a class so that transitions can do different things
// depending on whether they are exiting/entering overview, or just
// moving from slide to slide
dom.wrapper.classList.add( 'overview-deactivating' );
setTimeout( function () {
dom.wrapper.classList.remove( 'overview-deactivating' );
}, 1 );
// Move the background element back out
dom.wrapper.appendChild( dom.background );
// Clean up changes made to slides
toArray( dom.wrapper.querySelectorAll( SLIDES_SELECTOR ) ).forEach( function( slide ) {
transformElement( slide, '' );
slide.removeEventListener( 'click', onOverviewSlideClicked, true );
} );
// Clean up changes made to backgrounds
toArray( dom.background.querySelectorAll( '.slide-background' ) ).forEach( function( background ) {
transformElement( background, '' );
} );
transformSlides( { overview: '' } );
slide( indexh, indexv );
layout();
cueAutoSlide();
// Notify observers of the overview hiding
dispatchEvent( 'overviewhidden', {
'indexh': indexh,
'indexv': indexv,
'currentSlide': currentSlide
} );
}
}
/**
* Toggles the slide overview mode on and off.
*
* @param {Boolean} override Optional flag which overrides the
* toggle logic and forcibly sets the desired state. True means
* overview is open, false means it's closed.
*/
function toggleOverview( override ) {
if( typeof override === 'boolean' ) {
override ? activateOverview() : deactivateOverview();
}
else {
isOverview() ? deactivateOverview() : activateOverview();
}
}
/**
* Checks if the overview is currently active.
*
* @return {Boolean} true if the overview is active,
* false otherwise
*/
function isOverview() {
return overview;
}
/**
* Checks if the current or specified slide is vertical
* (nested within another slide).
*
* @param {HTMLElement} slide [optional] The slide to check
* orientation of
*/
function isVerticalSlide( slide ) {
// Prefer slide argument, otherwise use current slide
slide = slide ? slide : currentSlide;
return slide && slide.parentNode && !!slide.parentNode.nodeName.match( /section/i );
}
/**
* Handling the fullscreen functionality via the fullscreen API
*
* @see http://fullscreen.spec.whatwg.org/
* @see https://developer.mozilla.org/en-US/docs/DOM/Using_fullscreen_mode
*/
function enterFullscreen() {
var element = document.body;
// Check which implementation is available
var requestMethod = element.requestFullScreen ||
element.webkitRequestFullscreen ||
element.webkitRequestFullScreen ||
element.mozRequestFullScreen ||
element.msRequestFullscreen;
if( requestMethod ) {
requestMethod.apply( element );
}
}
/**
* Enters the paused mode which fades everything on screen to
* black.
*/
function pause() {
if( config.pause ) {
var wasPaused = dom.wrapper.classList.contains( 'paused' );
cancelAutoSlide();
dom.wrapper.classList.add( 'paused' );
if( wasPaused === false ) {
dispatchEvent( 'paused' );
}
}
}
/**
* Exits from the paused mode.
*/
function resume() {
var wasPaused = dom.wrapper.classList.contains( 'paused' );
dom.wrapper.classList.remove( 'paused' );
cueAutoSlide();
if( wasPaused ) {
dispatchEvent( 'resumed' );
}
}
/**
* Toggles the paused mode on and off.
*/
function togglePause( override ) {
if( typeof override === 'boolean' ) {
override ? pause() : resume();
}
else {
isPaused() ? resume() : pause();
}
}
/**
* Checks if we are currently in the paused mode.
*/
function isPaused() {
return dom.wrapper.classList.contains( 'paused' );
}
/**
* Toggles the auto slide mode on and off.
*
* @param {Boolean} override Optional flag which sets the desired state.
* True means autoplay starts, false means it stops.
*/
function toggleAutoSlide( override ) {
if( typeof override === 'boolean' ) {
override ? resumeAutoSlide() : pauseAutoSlide();
}
else {
autoSlidePaused ? resumeAutoSlide() : pauseAutoSlide();
}
}
/**
* Checks if the auto slide mode is currently on.
*/
function isAutoSliding() {
return !!( autoSlide && !autoSlidePaused );
}
/**
* Steps from the current point in the presentation to the
* slide which matches the specified horizontal and vertical
* indices.
*
* @param {int} h Horizontal index of the target slide
* @param {int} v Vertical index of the target slide
* @param {int} f Optional index of a fragment within the
* target slide to activate
* @param {int} o Optional origin for use in multimaster environments
*/
function slide( h, v, f, o ) {
// Remember where we were at before
previousSlide = currentSlide;
// Query all horizontal slides in the deck
var horizontalSlides = dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR );
// If no vertical index is specified and the upcoming slide is a
// stack, resume at its previous vertical index
if( v === undefined && !isOverview() ) {
v = getPreviousVerticalIndex( horizontalSlides[ h ] );
}
// If we were on a vertical stack, remember what vertical index
// it was on so we can resume at the same position when returning
if( previousSlide && previousSlide.parentNode && previousSlide.parentNode.classList.contains( 'stack' ) ) {
setPreviousVerticalIndex( previousSlide.parentNode, indexv );
}
// Remember the state before this slide
var stateBefore = state.concat();
// Reset the state array
state.length = 0;
var indexhBefore = indexh || 0,
indexvBefore = indexv || 0;
// Activate and transition to the new slide
indexh = updateSlides( HORIZONTAL_SLIDES_SELECTOR, h === undefined ? indexh : h );
indexv = updateSlides( VERTICAL_SLIDES_SELECTOR, v === undefined ? indexv : v );
// Update the visibility of slides now that the indices have changed
updateSlidesVisibility();
layout();
// Apply the new state
stateLoop: for( var i = 0, len = state.length; i < len; i++ ) {
// Check if this state existed on the previous slide. If it
// did, we will avoid adding it repeatedly
for( var j = 0; j < stateBefore.length; j++ ) {
if( stateBefore[j] === state[i] ) {
stateBefore.splice( j, 1 );
continue stateLoop;
}
}
document.documentElement.classList.add( state[i] );
// Dispatch custom event matching the state's name
dispatchEvent( state[i] );
}
// Clean up the remains of the previous state
while( stateBefore.length ) {
document.documentElement.classList.remove( stateBefore.pop() );
}
// Update the overview if it's currently active
if( isOverview() ) {
updateOverview();
}
// Find the current horizontal slide and any possible vertical slides
// within it
var currentHorizontalSlide = horizontalSlides[ indexh ],
currentVerticalSlides = currentHorizontalSlide.querySelectorAll( 'section' );
// Store references to the previous and current slides
currentSlide = currentVerticalSlides[ indexv ] || currentHorizontalSlide;
// Show fragment, if specified
if( typeof f !== 'undefined' ) {
navigateFragment( f );
}
// Dispatch an event if the slide changed
var slideChanged = ( indexh !== indexhBefore || indexv !== indexvBefore );
if( slideChanged ) {
dispatchEvent( 'slidechanged', {
'indexh': indexh,
'indexv': indexv,
'previousSlide': previousSlide,
'currentSlide': currentSlide,
'origin': o
} );
}
else {
// Ensure that the previous slide is never the same as the current
previousSlide = null;
}
// Solves an edge case where the previous slide maintains the
// 'present' class when navigating between adjacent vertical
// stacks
if( previousSlide ) {
previousSlide.classList.remove( 'present' );
previousSlide.setAttribute( 'aria-hidden', 'true' );
// Reset all slides upon navigate to home
// Issue: #285
if ( dom.wrapper.querySelector( HOME_SLIDE_SELECTOR ).classList.contains( 'present' ) ) {
// Launch async task
setTimeout( function () {
var slides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR + '.stack') ), i;
for( i in slides ) {
if( slides[i] ) {
// Reset stack
setPreviousVerticalIndex( slides[i], 0 );
}
}
}, 0 );
}
}
// Handle embedded content
if( slideChanged || !previousSlide ) {
stopEmbeddedContent( previousSlide );
startEmbeddedContent( currentSlide );
}
// Announce the current slide contents, for screen readers
dom.statusDiv.textContent = currentSlide.textContent;
updateControls();
updateProgress();
updateBackground();
updateParallax();
updateSlideNumber();
updateNotes();
// Update the URL hash
writeURL();
cueAutoSlide();
}
/**
* Syncs the presentation with the current DOM. Useful
* when new slides or control elements are added or when
* the configuration has changed.
*/
function sync() {
// Subscribe to input
removeEventListeners();
addEventListeners();
// Force a layout to make sure the current config is accounted for
layout();
// Reflect the current autoSlide value
autoSlide = config.autoSlide;
// Start auto-sliding if it's enabled
cueAutoSlide();
// Re-create the slide backgrounds
createBackgrounds();
// Write the current hash to the URL
writeURL();
sortAllFragments();
updateControls();
updateProgress();
updateBackground( true );
updateSlideNumber();
updateSlidesVisibility();
updateNotes();
formatEmbeddedContent();
startEmbeddedContent( currentSlide );
if( isOverview() ) {
layoutOverview();
}
}
/**
* Resets all vertical slides so that only the first
* is visible.
*/
function resetVerticalSlides() {
var horizontalSlides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) );
horizontalSlides.forEach( function( horizontalSlide ) {
var verticalSlides = toArray( horizontalSlide.querySelectorAll( 'section' ) );
verticalSlides.forEach( function( verticalSlide, y ) {
if( y > 0 ) {
verticalSlide.classList.remove( 'present' );
verticalSlide.classList.remove( 'past' );
verticalSlide.classList.add( 'future' );
verticalSlide.setAttribute( 'aria-hidden', 'true' );
}
} );
} );
}
/**
* Sorts and formats all of fragments in the
* presentation.
*/
function sortAllFragments() {
var horizontalSlides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) );
horizontalSlides.forEach( function( horizontalSlide ) {
var verticalSlides = toArray( horizontalSlide.querySelectorAll( 'section' ) );
verticalSlides.forEach( function( verticalSlide, y ) {
sortFragments( verticalSlide.querySelectorAll( '.fragment' ) );
} );
if( verticalSlides.length === 0 ) sortFragments( horizontalSlide.querySelectorAll( '.fragment' ) );
} );
}
/**
* Randomly shuffles all slides in the deck.
*/
function shuffle() {
var slides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) );
slides.forEach( function( slide ) {
// Insert this slide next to another random slide. This may
// cause the slide to insert before itself but that's fine.
dom.slides.insertBefore( slide, slides[ Math.floor( Math.random() * slides.length ) ] );
} );
}
/**
* Updates one dimension of slides by showing the slide
* with the specified index.
*
* @param {String} selector A CSS selector that will fetch
* the group of slides we are working with
* @param {Number} index The index of the slide that should be
* shown
*
* @return {Number} The index of the slide that is now shown,
* might differ from the passed in index if it was out of
* bounds.
*/
function updateSlides( selector, index ) {
// Select all slides and convert the NodeList result to
// an array
var slides = toArray( dom.wrapper.querySelectorAll( selector ) ),
slidesLength = slides.length;
var printMode = isPrintingPDF();
if( slidesLength ) {
// Should the index loop?
if( config.loop ) {
index %= slidesLength;
if( index < 0 ) {
index = slidesLength + index;
}
}
// Enforce max and minimum index bounds
index = Math.max( Math.min( index, slidesLength - 1 ), 0 );
for( var i = 0; i < slidesLength; i++ ) {
var element = slides[i];
var reverse = config.rtl && !isVerticalSlide( element );
element.classList.remove( 'past' );
element.classList.remove( 'present' );
element.classList.remove( 'future' );
// http://www.w3.org/html/wg/drafts/html/master/editing.html#the-hidden-attribute
element.setAttribute( 'hidden', '' );
element.setAttribute( 'aria-hidden', 'true' );
// If this element contains vertical slides
if( element.querySelector( 'section' ) ) {
element.classList.add( 'stack' );
}
// If we're printing static slides, all slides are "present"
if( printMode ) {
element.classList.add( 'present' );
continue;
}
if( i < index ) {
// Any element previous to index is given the 'past' class
element.classList.add( reverse ? 'future' : 'past' );
if( config.fragments ) {
var pastFragments = toArray( element.querySelectorAll( '.fragment' ) );
// Show all fragments on prior slides
while( pastFragments.length ) {
var pastFragment = pastFragments.pop();
pastFragment.classList.add( 'visible' );
pastFragment.classList.remove( 'current-fragment' );
}
}
}
else if( i > index ) {
// Any element subsequent to index is given the 'future' class
element.classList.add( reverse ? 'past' : 'future' );
if( config.fragments ) {
var futureFragments = toArray( element.querySelectorAll( '.fragment.visible' ) );
// No fragments in future slides should be visible ahead of time
while( futureFragments.length ) {
var futureFragment = futureFragments.pop();
futureFragment.classList.remove( 'visible' );
futureFragment.classList.remove( 'current-fragment' );
}
}
}
}
// Mark the current slide as present
slides[index].classList.add( 'present' );
slides[index].removeAttribute( 'hidden' );
slides[index].removeAttribute( 'aria-hidden' );
// If this slide has a state associated with it, add it
// onto the current state of the deck
var slideState = slides[index].getAttribute( 'data-state' );
if( slideState ) {
state = state.concat( slideState.split( ' ' ) );
}
}
else {
// Since there are no slides we can't be anywhere beyond the
// zeroth index
index = 0;
}
return index;
}
/**
* Optimization method; hide all slides that are far away
* from the present slide.
*/
function updateSlidesVisibility() {
// Select all slides and convert the NodeList result to
// an array
var horizontalSlides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) ),
horizontalSlidesLength = horizontalSlides.length,
distanceX,
distanceY;
if( horizontalSlidesLength && typeof indexh !== 'undefined' ) {
// The number of steps away from the present slide that will
// be visible
var viewDistance = isOverview() ? 10 : config.viewDistance;
// Limit view distance on weaker devices
if( isMobileDevice ) {
viewDistance = isOverview() ? 6 : 2;
}
// All slides need to be visible when exporting to PDF
if( isPrintingPDF() ) {
viewDistance = Number.MAX_VALUE;
}
for( var x = 0; x < horizontalSlidesLength; x++ ) {
var horizontalSlide = horizontalSlides[x];
var verticalSlides = toArray( horizontalSlide.querySelectorAll( 'section' ) ),
verticalSlidesLength = verticalSlides.length;
// Determine how far away this slide is from the present
distanceX = Math.abs( ( indexh || 0 ) - x ) || 0;
// If the presentation is looped, distance should measure
// 1 between the first and last slides
if( config.loop ) {
distanceX = Math.abs( ( ( indexh || 0 ) - x ) % ( horizontalSlidesLength - viewDistance ) ) || 0;
}
// Show the horizontal slide if it's within the view distance
if( distanceX < viewDistance ) {
showSlide( horizontalSlide );
}
else {
hideSlide( horizontalSlide );
}
if( verticalSlidesLength ) {
var oy = getPreviousVerticalIndex( horizontalSlide );
for( var y = 0; y < verticalSlidesLength; y++ ) {
var verticalSlide = verticalSlides[y];
distanceY = x === ( indexh || 0 ) ? Math.abs( ( indexv || 0 ) - y ) : Math.abs( y - oy );
if( distanceX + distanceY < viewDistance ) {
showSlide( verticalSlide );
}
else {
hideSlide( verticalSlide );
}
}
}
}
}
}
/**
* Pick up notes from the current slide and display tham
* to the viewer.
*
* @see `showNotes` config value
*/
function updateNotes() {
if( config.showNotes && dom.speakerNotes && currentSlide && !isPrintingPDF() ) {
dom.speakerNotes.innerHTML = getSlideNotes() || '';
}
}
/**
* Updates the progress bar to reflect the current slide.
*/
function updateProgress() {
// Update progress if enabled
if( config.progress && dom.progressbar ) {
dom.progressbar.style.width = getProgress() * dom.wrapper.offsetWidth + 'px';
}
}
/**
* Updates the slide number div to reflect the current slide.
*
* The following slide number formats are available:
* "h.v": horizontal . vertical slide number (default)
* "h/v": horizontal / vertical slide number
* "c": flattened slide number
* "c/t": flattened slide number / total slides
*/
function updateSlideNumber() {
// Update slide number if enabled
if( config.slideNumber && dom.slideNumber ) {
var value = [];
var format = 'h.v';
// Check if a custom number format is available
if( typeof config.slideNumber === 'string' ) {
format = config.slideNumber;
}
switch( format ) {
case 'c':
value.push( getSlidePastCount() + 1 );
break;
case 'c/t':
value.push( getSlidePastCount() + 1, '/', getTotalSlides() );
break;
case 'h/v':
value.push( indexh + 1 );
if( isVerticalSlide() ) value.push( '/', indexv + 1 );
break;
default:
value.push( indexh + 1 );
if( isVerticalSlide() ) value.push( '.', indexv + 1 );
}
dom.slideNumber.innerHTML = formatSlideNumber( value[0], value[1], value[2] );
}
}
/**
* Applies HTML formatting to a slide number before it's
* written to the DOM.
*/
function formatSlideNumber( a, delimiter, b ) {
if( typeof b === 'number' && !isNaN( b ) ) {
return '<span class="slide-number-a">'+ a +'</span>' +
'<span class="slide-number-delimiter">'+ delimiter +'</span>' +
'<span class="slide-number-b">'+ b +'</span>';
}
else {
return '<span class="slide-number-a">'+ a +'</span>';
}
}
/**
* Updates the state of all control/navigation arrows.
*/
function updateControls() {
var routes = availableRoutes();
var fragments = availableFragments();
// Remove the 'enabled' class from all directions
dom.controlsLeft.concat( dom.controlsRight )
.concat( dom.controlsUp )
.concat( dom.controlsDown )
.concat( dom.controlsPrev )
.concat( dom.controlsNext ).forEach( function( node ) {
node.classList.remove( 'enabled' );
node.classList.remove( 'fragmented' );
} );
// Add the 'enabled' class to the available routes
if( routes.left ) dom.controlsLeft.forEach( function( el ) { el.classList.add( 'enabled' ); } );
if( routes.right ) dom.controlsRight.forEach( function( el ) { el.classList.add( 'enabled' ); } );
if( routes.up ) dom.controlsUp.forEach( function( el ) { el.classList.add( 'enabled' ); } );
if( routes.down ) dom.controlsDown.forEach( function( el ) { el.classList.add( 'enabled' ); } );
// Prev/next buttons
if( routes.left || routes.up ) dom.controlsPrev.forEach( function( el ) { el.classList.add( 'enabled' ); } );
if( routes.right || routes.down ) dom.controlsNext.forEach( function( el ) { el.classList.add( 'enabled' ); } );
// Highlight fragment directions
if( currentSlide ) {
// Always apply fragment decorator to prev/next buttons
if( fragments.prev ) dom.controlsPrev.forEach( function( el ) { el.classList.add( 'fragmented', 'enabled' ); } );
if( fragments.next ) dom.controlsNext.forEach( function( el ) { el.classList.add( 'fragmented', 'enabled' ); } );
// Apply fragment decorators to directional buttons based on
// what slide axis they are in
if( isVerticalSlide( currentSlide ) ) {
if( fragments.prev ) dom.controlsUp.forEach( function( el ) { el.classList.add( 'fragmented', 'enabled' ); } );
if( fragments.next ) dom.controlsDown.forEach( function( el ) { el.classList.add( 'fragmented', 'enabled' ); } );
}
else {
if( fragments.prev ) dom.controlsLeft.forEach( function( el ) { el.classList.add( 'fragmented', 'enabled' ); } );
if( fragments.next ) dom.controlsRight.forEach( function( el ) { el.classList.add( 'fragmented', 'enabled' ); } );
}
}
}
/**
* Updates the background elements to reflect the current
* slide.
*
* @param {Boolean} includeAll If true, the backgrounds of
* all vertical slides (not just the present) will be updated.
*/
function updateBackground( includeAll ) {
var currentBackground = null;
// Reverse past/future classes when in RTL mode
var horizontalPast = config.rtl ? 'future' : 'past',
horizontalFuture = config.rtl ? 'past' : 'future';
// Update the classes of all backgrounds to match the
// states of their slides (past/present/future)
toArray( dom.background.childNodes ).forEach( function( backgroundh, h ) {
backgroundh.classList.remove( 'past' );
backgroundh.classList.remove( 'present' );
backgroundh.classList.remove( 'future' );
if( h < indexh ) {
backgroundh.classList.add( horizontalPast );
}
else if ( h > indexh ) {
backgroundh.classList.add( horizontalFuture );
}
else {
backgroundh.classList.add( 'present' );
// Store a reference to the current background element
currentBackground = backgroundh;
}
if( includeAll || h === indexh ) {
toArray( backgroundh.querySelectorAll( '.slide-background' ) ).forEach( function( backgroundv, v ) {
backgroundv.classList.remove( 'past' );
backgroundv.classList.remove( 'present' );
backgroundv.classList.remove( 'future' );
if( v < indexv ) {
backgroundv.classList.add( 'past' );
}
else if ( v > indexv ) {
backgroundv.classList.add( 'future' );
}
else {
backgroundv.classList.add( 'present' );
// Only if this is the present horizontal and vertical slide
if( h === indexh ) currentBackground = backgroundv;
}
} );
}
} );
// Stop any currently playing video background
if( previousBackground ) {
var previousVideo = previousBackground.querySelector( 'video' );
if( previousVideo ) previousVideo.pause();
}
if( currentBackground ) {
// Start video playback
var currentVideo = currentBackground.querySelector( 'video' );
if( currentVideo ) {
var startVideo = function() {
currentVideo.currentTime = 0;
currentVideo.play();
currentVideo.removeEventListener( 'loadeddata', startVideo );
};
if( currentVideo.readyState > 1 ) {
startVideo();
}
else {
currentVideo.addEventListener( 'loadeddata', startVideo );
}
}
var backgroundImageURL = currentBackground.style.backgroundImage || '';
// Restart GIFs (doesn't work in Firefox)
if( /\.gif/i.test( backgroundImageURL ) ) {
currentBackground.style.backgroundImage = '';
window.getComputedStyle( currentBackground ).opacity;
currentBackground.style.backgroundImage = backgroundImageURL;
}
// Don't transition between identical backgrounds. This
// prevents unwanted flicker.
var previousBackgroundHash = previousBackground ? previousBackground.getAttribute( 'data-background-hash' ) : null;
var currentBackgroundHash = currentBackground.getAttribute( 'data-background-hash' );
if( currentBackgroundHash && currentBackgroundHash === previousBackgroundHash && currentBackground !== previousBackground ) {
dom.background.classList.add( 'no-transition' );
}
previousBackground = currentBackground;
}
// If there's a background brightness flag for this slide,
// bubble it to the .reveal container
if( currentSlide ) {
[ 'has-light-background', 'has-dark-background' ].forEach( function( classToBubble ) {
if( currentSlide.classList.contains( classToBubble ) ) {
dom.wrapper.classList.add( classToBubble );
}
else {
dom.wrapper.classList.remove( classToBubble );
}
} );
}
// Allow the first background to apply without transition
setTimeout( function() {
dom.background.classList.remove( 'no-transition' );
}, 1 );
}
/**
* Updates the position of the parallax background based
* on the current slide index.
*/
function updateParallax() {
if( config.parallaxBackgroundImage ) {
var horizontalSlides = dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ),
verticalSlides = dom.wrapper.querySelectorAll( VERTICAL_SLIDES_SELECTOR );
var backgroundSize = dom.background.style.backgroundSize.split( ' ' ),
backgroundWidth, backgroundHeight;
if( backgroundSize.length === 1 ) {
backgroundWidth = backgroundHeight = parseInt( backgroundSize[0], 10 );
}
else {
backgroundWidth = parseInt( backgroundSize[0], 10 );
backgroundHeight = parseInt( backgroundSize[1], 10 );
}
var slideWidth = dom.background.offsetWidth,
horizontalSlideCount = horizontalSlides.length,
horizontalOffsetMultiplier,
horizontalOffset;
if( typeof config.parallaxBackgroundHorizontal === 'number' ) {
horizontalOffsetMultiplier = config.parallaxBackgroundHorizontal;
}
else {
horizontalOffsetMultiplier = horizontalSlideCount > 1 ? ( backgroundWidth - slideWidth ) / ( horizontalSlideCount-1 ) : 0;
}
horizontalOffset = horizontalOffsetMultiplier * indexh * -1;
var slideHeight = dom.background.offsetHeight,
verticalSlideCount = verticalSlides.length,
verticalOffsetMultiplier,
verticalOffset;
if( typeof config.parallaxBackgroundVertical === 'number' ) {
verticalOffsetMultiplier = config.parallaxBackgroundVertical;
}
else {
verticalOffsetMultiplier = ( backgroundHeight - slideHeight ) / ( verticalSlideCount-1 );
}
verticalOffset = verticalSlideCount > 0 ? verticalOffsetMultiplier * indexv * 1 : 0;
dom.background.style.backgroundPosition = horizontalOffset + 'px ' + -verticalOffset + 'px';
}
}
/**
* Called when the given slide is within the configured view
* distance. Shows the slide element and loads any content
* that is set to load lazily (data-src).
*/
function showSlide( slide ) {
// Show the slide element
slide.style.display = 'block';
// Media elements with data-src attributes
toArray( slide.querySelectorAll( 'img[data-src], video[data-src], audio[data-src]' ) ).forEach( function( element ) {
element.setAttribute( 'src', element.getAttribute( 'data-src' ) );
element.removeAttribute( 'data-src' );
} );
// Media elements with <source> children
toArray( slide.querySelectorAll( 'video, audio' ) ).forEach( function( media ) {
var sources = 0;
toArray( media.querySelectorAll( 'source[data-src]' ) ).forEach( function( source ) {
source.setAttribute( 'src', source.getAttribute( 'data-src' ) );
source.removeAttribute( 'data-src' );
sources += 1;
} );
// If we rewrote sources for this video/audio element, we need
// to manually tell it to load from its new origin
if( sources > 0 ) {
media.load();
}
} );
// Show the corresponding background element
var indices = getIndices( slide );
var background = getSlideBackground( indices.h, indices.v );
if( background ) {
background.style.display = 'block';
// If the background contains media, load it
if( background.hasAttribute( 'data-loaded' ) === false ) {
background.setAttribute( 'data-loaded', 'true' );
var backgroundImage = slide.getAttribute( 'data-background-image' ),
backgroundVideo = slide.getAttribute( 'data-background-video' ),
backgroundVideoLoop = slide.hasAttribute( 'data-background-video-loop' ),
backgroundVideoMuted = slide.hasAttribute( 'data-background-video-muted' ),
backgroundIframe = slide.getAttribute( 'data-background-iframe' );
// Images
if( backgroundImage ) {
background.style.backgroundImage = 'url('+ backgroundImage +')';
}
// Videos
else if ( backgroundVideo && !isSpeakerNotes() ) {
var video = document.createElement( 'video' );
if( backgroundVideoLoop ) {
video.setAttribute( 'loop', '' );
}
if( backgroundVideoMuted ) {
video.muted = true;
}
// Support comma separated lists of video sources
backgroundVideo.split( ',' ).forEach( function( source ) {
video.innerHTML += '<source src="'+ source +'">';
} );
background.appendChild( video );
}
// Iframes
else if( backgroundIframe ) {
var iframe = document.createElement( 'iframe' );
iframe.setAttribute( 'src', backgroundIframe );
iframe.style.width = '100%';
iframe.style.height = '100%';
iframe.style.maxHeight = '100%';
iframe.style.maxWidth = '100%';
background.appendChild( iframe );
}
}
}
}
/**
* Called when the given slide is moved outside of the
* configured view distance.
*/
function hideSlide( slide ) {
// Hide the slide element
slide.style.display = 'none';
// Hide the corresponding background element
var indices = getIndices( slide );
var background = getSlideBackground( indices.h, indices.v );
if( background ) {
background.style.display = 'none';
}
}
/**
* Determine what available routes there are for navigation.
*
* @return {Object} containing four booleans: left/right/up/down
*/
function availableRoutes() {
var horizontalSlides = dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ),
verticalSlides = dom.wrapper.querySelectorAll( VERTICAL_SLIDES_SELECTOR );
var routes = {
left: indexh > 0 || config.loop,
right: indexh < horizontalSlides.length - 1 || config.loop,
up: indexv > 0,
down: indexv < verticalSlides.length - 1
};
// reverse horizontal controls for rtl
if( config.rtl ) {
var left = routes.left;
routes.left = routes.right;
routes.right = left;
}
return routes;
}
/**
* Returns an object describing the available fragment
* directions.
*
* @return {Object} two boolean properties: prev/next
*/
function availableFragments() {
if( currentSlide && config.fragments ) {
var fragments = currentSlide.querySelectorAll( '.fragment' );
var hiddenFragments = currentSlide.querySelectorAll( '.fragment:not(.visible)' );
return {
prev: fragments.length - hiddenFragments.length > 0,
next: !!hiddenFragments.length
};
}
else {
return { prev: false, next: false };
}
}
/**
* Enforces origin-specific format rules for embedded media.
*/
function formatEmbeddedContent() {
var _appendParamToIframeSource = function( sourceAttribute, sourceURL, param ) {
toArray( dom.slides.querySelectorAll( 'iframe['+ sourceAttribute +'*="'+ sourceURL +'"]' ) ).forEach( function( el ) {
var src = el.getAttribute( sourceAttribute );
if( src && src.indexOf( param ) === -1 ) {
el.setAttribute( sourceAttribute, src + ( !/\?/.test( src ) ? '?' : '&' ) + param );
}
});
};
// YouTube frames must include "?enablejsapi=1"
_appendParamToIframeSource( 'src', 'youtube.com/embed/', 'enablejsapi=1' );
_appendParamToIframeSource( 'data-src', 'youtube.com/embed/', 'enablejsapi=1' );
// Vimeo frames must include "?api=1"
_appendParamToIframeSource( 'src', 'player.vimeo.com/', 'api=1' );
_appendParamToIframeSource( 'data-src', 'player.vimeo.com/', 'api=1' );
}
/**
* Start playback of any embedded content inside of
* the targeted slide.
*/
function startEmbeddedContent( slide ) {
if( slide && !isSpeakerNotes() ) {
// Restart GIFs
toArray( slide.querySelectorAll( 'img[src$=".gif"]' ) ).forEach( function( el ) {
// Setting the same unchanged source like this was confirmed
// to work in Chrome, FF & Safari
el.setAttribute( 'src', el.getAttribute( 'src' ) );
} );
// HTML5 media elements
toArray( slide.querySelectorAll( 'video, audio' ) ).forEach( function( el ) {
if( el.hasAttribute( 'data-autoplay' ) && typeof el.play === 'function' ) {
el.play();
}
} );
// Normal iframes
toArray( slide.querySelectorAll( 'iframe[src]' ) ).forEach( function( el ) {
startEmbeddedIframe( { target: el } );
} );
// Lazy loading iframes
toArray( slide.querySelectorAll( 'iframe[data-src]' ) ).forEach( function( el ) {
if( el.getAttribute( 'src' ) !== el.getAttribute( 'data-src' ) ) {
el.removeEventListener( 'load', startEmbeddedIframe ); // remove first to avoid dupes
el.addEventListener( 'load', startEmbeddedIframe );
el.setAttribute( 'src', el.getAttribute( 'data-src' ) );
}
} );
}
}
/**
* "Starts" the content of an embedded iframe using the
* postmessage API.
*/
function startEmbeddedIframe( event ) {
var iframe = event.target;
// YouTube postMessage API
if( /youtube\.com\/embed\//.test( iframe.getAttribute( 'src' ) ) && iframe.hasAttribute( 'data-autoplay' ) ) {
iframe.contentWindow.postMessage( '{"event":"command","func":"playVideo","args":""}', '*' );
}
// Vimeo postMessage API
else if( /player\.vimeo\.com\//.test( iframe.getAttribute( 'src' ) ) && iframe.hasAttribute( 'data-autoplay' ) ) {
iframe.contentWindow.postMessage( '{"method":"play"}', '*' );
}
// Generic postMessage API
else {
iframe.contentWindow.postMessage( 'slide:start', '*' );
}
}
/**
* Stop playback of any embedded content inside of
* the targeted slide.
*/
function stopEmbeddedContent( slide ) {
if( slide && slide.parentNode ) {
// HTML5 media elements
toArray( slide.querySelectorAll( 'video, audio' ) ).forEach( function( el ) {
if( !el.hasAttribute( 'data-ignore' ) && typeof el.pause === 'function' ) {
el.pause();
}
} );
// Generic postMessage API for non-lazy loaded iframes
toArray( slide.querySelectorAll( 'iframe' ) ).forEach( function( el ) {
el.contentWindow.postMessage( 'slide:stop', '*' );
el.removeEventListener( 'load', startEmbeddedIframe );
});
// YouTube postMessage API
toArray( slide.querySelectorAll( 'iframe[src*="youtube.com/embed/"]' ) ).forEach( function( el ) {
if( !el.hasAttribute( 'data-ignore' ) && typeof el.contentWindow.postMessage === 'function' ) {
el.contentWindow.postMessage( '{"event":"command","func":"pauseVideo","args":""}', '*' );
}
});
// Vimeo postMessage API
toArray( slide.querySelectorAll( 'iframe[src*="player.vimeo.com/"]' ) ).forEach( function( el ) {
if( !el.hasAttribute( 'data-ignore' ) && typeof el.contentWindow.postMessage === 'function' ) {
el.contentWindow.postMessage( '{"method":"pause"}', '*' );
}
});
// Lazy loading iframes
toArray( slide.querySelectorAll( 'iframe[data-src]' ) ).forEach( function( el ) {
// Only removing the src doesn't actually unload the frame
// in all browsers (Firefox) so we set it to blank first
el.setAttribute( 'src', 'about:blank' );
el.removeAttribute( 'src' );
} );
}
}
/**
* Returns the number of past slides. This can be used as a global
* flattened index for slides.
*/
function getSlidePastCount() {
var horizontalSlides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) );
// The number of past slides
var pastCount = 0;
// Step through all slides and count the past ones
mainLoop: for( var i = 0; i < horizontalSlides.length; i++ ) {
var horizontalSlide = horizontalSlides[i];
var verticalSlides = toArray( horizontalSlide.querySelectorAll( 'section' ) );
for( var j = 0; j < verticalSlides.length; j++ ) {
// Stop as soon as we arrive at the present
if( verticalSlides[j].classList.contains( 'present' ) ) {
break mainLoop;
}
pastCount++;
}
// Stop as soon as we arrive at the present
if( horizontalSlide.classList.contains( 'present' ) ) {
break;
}
// Don't count the wrapping section for vertical slides
if( horizontalSlide.classList.contains( 'stack' ) === false ) {
pastCount++;
}
}
return pastCount;
}
/**
* Returns a value ranging from 0-1 that represents
* how far into the presentation we have navigated.
*/
function getProgress() {
// The number of past and total slides
var totalCount = getTotalSlides();
var pastCount = getSlidePastCount();
if( currentSlide ) {
var allFragments = currentSlide.querySelectorAll( '.fragment' );
// If there are fragments in the current slide those should be
// accounted for in the progress.
if( allFragments.length > 0 ) {
var visibleFragments = currentSlide.querySelectorAll( '.fragment.visible' );
// This value represents how big a portion of the slide progress
// that is made up by its fragments (0-1)
var fragmentWeight = 0.9;
// Add fragment progress to the past slide count
pastCount += ( visibleFragments.length / allFragments.length ) * fragmentWeight;
}
}
return pastCount / ( totalCount - 1 );
}
/**
* Checks if this presentation is running inside of the
* speaker notes window.
*/
function isSpeakerNotes() {
return !!window.location.search.match( /receiver/gi );
}
/**
* Reads the current URL (hash) and navigates accordingly.
*/
function readURL() {
var hash = window.location.hash;
// Attempt to parse the hash as either an index or name
var bits = hash.slice( 2 ).split( '/' ),
name = hash.replace( /#|\//gi, '' );
// If the first bit is invalid and there is a name we can
// assume that this is a named link
if( isNaN( parseInt( bits[0], 10 ) ) && name.length ) {
var element;
// Ensure the named link is a valid HTML ID attribute
if( /^[a-zA-Z][\w:.-]*$/.test( name ) ) {
// Find the slide with the specified ID
element = document.getElementById( name );
}
if( element ) {
// Find the position of the named slide and navigate to it
var indices = Reveal.getIndices( element );
slide( indices.h, indices.v );
}
// If the slide doesn't exist, navigate to the current slide
else {
slide( indexh || 0, indexv || 0 );
}
}
else {
// Read the index components of the hash
var h = parseInt( bits[0], 10 ) || 0,
v = parseInt( bits[1], 10 ) || 0;
if( h !== indexh || v !== indexv ) {
slide( h, v );
}
}
}
/**
* Updates the page URL (hash) to reflect the current
* state.
*
* @param {Number} delay The time in ms to wait before
* writing the hash
*/
function writeURL( delay ) {
if( config.history ) {
// Make sure there's never more than one timeout running
clearTimeout( writeURLTimeout );
// If a delay is specified, timeout this call
if( typeof delay === 'number' ) {
writeURLTimeout = setTimeout( writeURL, delay );
}
else if( currentSlide ) {
var url = '/';
// Attempt to create a named link based on the slide's ID
var id = currentSlide.getAttribute( 'id' );
if( id ) {
id = id.replace( /[^a-zA-Z0-9\-\_\:\.]/g, '' );
}
// If the current slide has an ID, use that as a named link
if( typeof id === 'string' && id.length ) {
url = '/' + id;
}
// Otherwise use the /h/v index
else {
if( indexh > 0 || indexv > 0 ) url += indexh;
if( indexv > 0 ) url += '/' + indexv;
}
window.location.hash = url;
}
}
}
/**
* Retrieves the h/v location of the current, or specified,
* slide.
*
* @param {HTMLElement} slide If specified, the returned
* index will be for this slide rather than the currently
* active one
*
* @return {Object} { h: <int>, v: <int>, f: <int> }
*/
function getIndices( slide ) {
// By default, return the current indices
var h = indexh,
v = indexv,
f;
// If a slide is specified, return the indices of that slide
if( slide ) {
var isVertical = isVerticalSlide( slide );
var slideh = isVertical ? slide.parentNode : slide;
// Select all horizontal slides
var horizontalSlides = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) );
// Now that we know which the horizontal slide is, get its index
h = Math.max( horizontalSlides.indexOf( slideh ), 0 );
// Assume we're not vertical
v = undefined;
// If this is a vertical slide, grab the vertical index
if( isVertical ) {
v = Math.max( toArray( slide.parentNode.querySelectorAll( 'section' ) ).indexOf( slide ), 0 );
}
}
if( !slide && currentSlide ) {
var hasFragments = currentSlide.querySelectorAll( '.fragment' ).length > 0;
if( hasFragments ) {
var currentFragment = currentSlide.querySelector( '.current-fragment' );
if( currentFragment && currentFragment.hasAttribute( 'data-fragment-index' ) ) {
f = parseInt( currentFragment.getAttribute( 'data-fragment-index' ), 10 );
}
else {
f = currentSlide.querySelectorAll( '.fragment.visible' ).length - 1;
}
}
}
return { h: h, v: v, f: f };
}
/**
* Retrieves the total number of slides in this presentation.
*/
function getTotalSlides() {
return dom.wrapper.querySelectorAll( SLIDES_SELECTOR + ':not(.stack)' ).length;
}
/**
* Returns the slide element matching the specified index.
*/
function getSlide( x, y ) {
var horizontalSlide = dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR )[ x ];
var verticalSlides = horizontalSlide && horizontalSlide.querySelectorAll( 'section' );
if( verticalSlides && verticalSlides.length && typeof y === 'number' ) {
return verticalSlides ? verticalSlides[ y ] : undefined;
}
return horizontalSlide;
}
/**
* Returns the background element for the given slide.
* All slides, even the ones with no background properties
* defined, have a background element so as long as the
* index is valid an element will be returned.
*/
function getSlideBackground( x, y ) {
// When printing to PDF the slide backgrounds are nested
// inside of the slides
if( isPrintingPDF() ) {
var slide = getSlide( x, y );
if( slide ) {
var background = slide.querySelector( '.slide-background' );
if( background && background.parentNode === slide ) {
return background;
}
}
return undefined;
}
var horizontalBackground = dom.wrapper.querySelectorAll( '.backgrounds>.slide-background' )[ x ];
var verticalBackgrounds = horizontalBackground && horizontalBackground.querySelectorAll( '.slide-background' );
if( verticalBackgrounds && verticalBackgrounds.length && typeof y === 'number' ) {
return verticalBackgrounds ? verticalBackgrounds[ y ] : undefined;
}
return horizontalBackground;
}
/**
* Retrieves the speaker notes from a slide. Notes can be
* defined in two ways:
* 1. As a data-notes attribute on the slide <section>
* 2. As an <aside class="notes"> inside of the slide
*/
function getSlideNotes( slide ) {
// Default to the current slide
slide = slide || currentSlide;
// Notes can be specified via the data-notes attribute...
if( slide.hasAttribute( 'data-notes' ) ) {
return slide.getAttribute( 'data-notes' );
}
// ... or using an <aside class="notes"> element
var notesElement = slide.querySelector( 'aside.notes' );
if( notesElement ) {
return notesElement.innerHTML;
}
return null;
}
/**
* Retrieves the current state of the presentation as
* an object. This state can then be restored at any
* time.
*/
function getState() {
var indices = getIndices();
return {
indexh: indices.h,
indexv: indices.v,
indexf: indices.f,
paused: isPaused(),
overview: isOverview()
};
}
/**
* Restores the presentation to the given state.
*
* @param {Object} state As generated by getState()
*/
function setState( state ) {
if( typeof state === 'object' ) {
slide( deserialize( state.indexh ), deserialize( state.indexv ), deserialize( state.indexf ) );
var pausedFlag = deserialize( state.paused ),
overviewFlag = deserialize( state.overview );
if( typeof pausedFlag === 'boolean' && pausedFlag !== isPaused() ) {
togglePause( pausedFlag );
}
if( typeof overviewFlag === 'boolean' && overviewFlag !== isOverview() ) {
toggleOverview( overviewFlag );
}
}
}
/**
* Return a sorted fragments list, ordered by an increasing
* "data-fragment-index" attribute.
*
* Fragments will be revealed in the order that they are returned by
* this function, so you can use the index attributes to control the
* order of fragment appearance.
*
* To maintain a sensible default fragment order, fragments are presumed
* to be passed in document order. This function adds a "fragment-index"
* attribute to each node if such an attribute is not already present,
* and sets that attribute to an integer value which is the position of
* the fragment within the fragments list.
*/
function sortFragments( fragments ) {
fragments = toArray( fragments );
var ordered = [],
unordered = [],
sorted = [];
// Group ordered and unordered elements
fragments.forEach( function( fragment, i ) {
if( fragment.hasAttribute( 'data-fragment-index' ) ) {
var index = parseInt( fragment.getAttribute( 'data-fragment-index' ), 10 );
if( !ordered[index] ) {
ordered[index] = [];
}
ordered[index].push( fragment );
}
else {
unordered.push( [ fragment ] );
}
} );
// Append fragments without explicit indices in their
// DOM order
ordered = ordered.concat( unordered );
// Manually count the index up per group to ensure there
// are no gaps
var index = 0;
// Push all fragments in their sorted order to an array,
// this flattens the groups
ordered.forEach( function( group ) {
group.forEach( function( fragment ) {
sorted.push( fragment );
fragment.setAttribute( 'data-fragment-index', index );
} );
index ++;
} );
return sorted;
}
/**
* Navigate to the specified slide fragment.
*
* @param {Number} index The index of the fragment that
* should be shown, -1 means all are invisible
* @param {Number} offset Integer offset to apply to the
* fragment index
*
* @return {Boolean} true if a change was made in any
* fragments visibility as part of this call
*/
function navigateFragment( index, offset ) {
if( currentSlide && config.fragments ) {
var fragments = sortFragments( currentSlide.querySelectorAll( '.fragment' ) );
if( fragments.length ) {
// If no index is specified, find the current
if( typeof index !== 'number' ) {
var lastVisibleFragment = sortFragments( currentSlide.querySelectorAll( '.fragment.visible' ) ).pop();
if( lastVisibleFragment ) {
index = parseInt( lastVisibleFragment.getAttribute( 'data-fragment-index' ) || 0, 10 );
}
else {
index = -1;
}
}
// If an offset is specified, apply it to the index
if( typeof offset === 'number' ) {
index += offset;
}
var fragmentsShown = [],
fragmentsHidden = [];
toArray( fragments ).forEach( function( element, i ) {
if( element.hasAttribute( 'data-fragment-index' ) ) {
i = parseInt( element.getAttribute( 'data-fragment-index' ), 10 );
}
// Visible fragments
if( i <= index ) {
if( !element.classList.contains( 'visible' ) ) fragmentsShown.push( element );
element.classList.add( 'visible' );
element.classList.remove( 'current-fragment' );
// Announce the fragments one by one to the Screen Reader
dom.statusDiv.textContent = element.textContent;
if( i === index ) {
element.classList.add( 'current-fragment' );
}
}
// Hidden fragments
else {
if( element.classList.contains( 'visible' ) ) fragmentsHidden.push( element );
element.classList.remove( 'visible' );
element.classList.remove( 'current-fragment' );
}
} );
if( fragmentsHidden.length ) {
dispatchEvent( 'fragmenthidden', { fragment: fragmentsHidden[0], fragments: fragmentsHidden } );
}
if( fragmentsShown.length ) {
dispatchEvent( 'fragmentshown', { fragment: fragmentsShown[0], fragments: fragmentsShown } );
}
updateControls();
updateProgress();
return !!( fragmentsShown.length || fragmentsHidden.length );
}
}
return false;
}
/**
* Navigate to the next slide fragment.
*
* @return {Boolean} true if there was a next fragment,
* false otherwise
*/
function nextFragment() {
return navigateFragment( null, 1 );
}
/**
* Navigate to the previous slide fragment.
*
* @return {Boolean} true if there was a previous fragment,
* false otherwise
*/
function previousFragment() {
return navigateFragment( null, -1 );
}
/**
* Cues a new automated slide if enabled in the config.
*/
function cueAutoSlide() {
cancelAutoSlide();
if( currentSlide ) {
var currentFragment = currentSlide.querySelector( '.current-fragment' );
var fragmentAutoSlide = currentFragment ? currentFragment.getAttribute( 'data-autoslide' ) : null;
var parentAutoSlide = currentSlide.parentNode ? currentSlide.parentNode.getAttribute( 'data-autoslide' ) : null;
var slideAutoSlide = currentSlide.getAttribute( 'data-autoslide' );
// Pick value in the following priority order:
// 1. Current fragment's data-autoslide
// 2. Current slide's data-autoslide
// 3. Parent slide's data-autoslide
// 4. Global autoSlide setting
if( fragmentAutoSlide ) {
autoSlide = parseInt( fragmentAutoSlide, 10 );
}
else if( slideAutoSlide ) {
autoSlide = parseInt( slideAutoSlide, 10 );
}
else if( parentAutoSlide ) {
autoSlide = parseInt( parentAutoSlide, 10 );
}
else {
autoSlide = config.autoSlide;
}
// If there are media elements with data-autoplay,
// automatically set the autoSlide duration to the
// length of that media. Not applicable if the slide
// is divided up into fragments.
if( currentSlide.querySelectorAll( '.fragment' ).length === 0 ) {
toArray( currentSlide.querySelectorAll( 'video, audio' ) ).forEach( function( el ) {
if( el.hasAttribute( 'data-autoplay' ) ) {
if( autoSlide && el.duration * 1000 > autoSlide ) {
autoSlide = ( el.duration * 1000 ) + 1000;
}
}
} );
}
// Cue the next auto-slide if:
// - There is an autoSlide value
// - Auto-sliding isn't paused by the user
// - The presentation isn't paused
// - The overview isn't active
// - The presentation isn't over
if( autoSlide && !autoSlidePaused && !isPaused() && !isOverview() && ( !Reveal.isLastSlide() || availableFragments().next || config.loop === true ) ) {
autoSlideTimeout = setTimeout( function() {
typeof config.autoSlideMethod === 'function' ? config.autoSlideMethod() : navigateNext();
cueAutoSlide();
}, autoSlide );
autoSlideStartTime = Date.now();
}
if( autoSlidePlayer ) {
autoSlidePlayer.setPlaying( autoSlideTimeout !== -1 );
}
}
}
/**
* Cancels any ongoing request to auto-slide.
*/
function cancelAutoSlide() {
clearTimeout( autoSlideTimeout );
autoSlideTimeout = -1;
}
function pauseAutoSlide() {
if( autoSlide && !autoSlidePaused ) {
autoSlidePaused = true;
dispatchEvent( 'autoslidepaused' );
clearTimeout( autoSlideTimeout );
if( autoSlidePlayer ) {
autoSlidePlayer.setPlaying( false );
}
}
}
function resumeAutoSlide() {
if( autoSlide && autoSlidePaused ) {
autoSlidePaused = false;
dispatchEvent( 'autoslideresumed' );
cueAutoSlide();
}
}
function navigateLeft() {
// Reverse for RTL
if( config.rtl ) {
if( ( isOverview() || nextFragment() === false ) && availableRoutes().left ) {
slide( indexh + 1 );
}
}
// Normal navigation
else if( ( isOverview() || previousFragment() === false ) && availableRoutes().left ) {
slide( indexh - 1 );
}
}
function navigateRight() {
// Reverse for RTL
if( config.rtl ) {
if( ( isOverview() || previousFragment() === false ) && availableRoutes().right ) {
slide( indexh - 1 );
}
}
// Normal navigation
else if( ( isOverview() || nextFragment() === false ) && availableRoutes().right ) {
slide( indexh + 1 );
}
}
function navigateUp() {
// Prioritize hiding fragments
if( ( isOverview() || previousFragment() === false ) && availableRoutes().up ) {
slide( indexh, indexv - 1 );
}
}
function navigateDown() {
// Prioritize revealing fragments
if( ( isOverview() || nextFragment() === false ) && availableRoutes().down ) {
slide( indexh, indexv + 1 );
}
}
/**
* Navigates backwards, prioritized in the following order:
* 1) Previous fragment
* 2) Previous vertical slide
* 3) Previous horizontal slide
*/
function navigatePrev() {
// Prioritize revealing fragments
if( previousFragment() === false ) {
if( availableRoutes().up ) {
navigateUp();
}
else {
// Fetch the previous horizontal slide, if there is one
var previousSlide;
if( config.rtl ) {
previousSlide = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR + '.future' ) ).pop();
}
else {
previousSlide = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR + '.past' ) ).pop();
}
if( previousSlide ) {
var v = ( previousSlide.querySelectorAll( 'section' ).length - 1 ) || undefined;
var h = indexh - 1;
slide( h, v );
}
}
}
}
/**
* The reverse of #navigatePrev().
*/
function navigateNext() {
// Prioritize revealing fragments
if( nextFragment() === false ) {
if( availableRoutes().down ) {
navigateDown();
}
else if( config.rtl ) {
navigateLeft();
}
else {
navigateRight();
}
}
}
/**
* Checks if the target element prevents the triggering of
* swipe navigation.
*/
function isSwipePrevented( target ) {
while( target && typeof target.hasAttribute === 'function' ) {
if( target.hasAttribute( 'data-prevent-swipe' ) ) return true;
target = target.parentNode;
}
return false;
}
// --------------------------------------------------------------------//
// ----------------------------- EVENTS -------------------------------//
// --------------------------------------------------------------------//
/**
* Called by all event handlers that are based on user
* input.
*/
function onUserInput( event ) {
if( config.autoSlideStoppable ) {
pauseAutoSlide();
}
}
/**
* Handler for the document level 'keypress' event.
*/
function onDocumentKeyPress( event ) {
// Check if the pressed key is question mark
if( event.shiftKey && event.charCode === 63 ) {
if( dom.overlay ) {
closeOverlay();
}
else {
showHelp( true );
}
}
}
/**
* Handler for the document level 'keydown' event.
*/
function onDocumentKeyDown( event ) {
// If there's a condition specified and it returns false,
// ignore this event
if( typeof config.keyboardCondition === 'function' && config.keyboardCondition() === false ) {
return true;
}
// Remember if auto-sliding was paused so we can toggle it
var autoSlideWasPaused = autoSlidePaused;
onUserInput( event );
// Check if there's a focused element that could be using
// the keyboard
var activeElementIsCE = document.activeElement && document.activeElement.contentEditable !== 'inherit';
var activeElementIsInput = document.activeElement && document.activeElement.tagName && /input|textarea/i.test( document.activeElement.tagName );
// Disregard the event if there's a focused element or a
// keyboard modifier key is present
if( activeElementIsCE || activeElementIsInput || (event.shiftKey && event.keyCode !== 32) || event.altKey || event.ctrlKey || event.metaKey ) return;
// While paused only allow resume keyboard events; 'b', '.''
var resumeKeyCodes = [66,190,191];
var key;
// Custom key bindings for togglePause should be able to resume
if( typeof config.keyboard === 'object' ) {
for( key in config.keyboard ) {
if( config.keyboard[key] === 'togglePause' ) {
resumeKeyCodes.push( parseInt( key, 10 ) );
}
}
}
if( isPaused() && resumeKeyCodes.indexOf( event.keyCode ) === -1 ) {
return false;
}
var triggered = false;
// 1. User defined key bindings
if( typeof config.keyboard === 'object' ) {
for( key in config.keyboard ) {
// Check if this binding matches the pressed key
if( parseInt( key, 10 ) === event.keyCode ) {
var value = config.keyboard[ key ];
// Callback function
if( typeof value === 'function' ) {
value.apply( null, [ event ] );
}
// String shortcuts to reveal.js API
else if( typeof value === 'string' && typeof Reveal[ value ] === 'function' ) {
Reveal[ value ].call();
}
triggered = true;
}
}
}
// 2. System defined key bindings
if( triggered === false ) {
// Assume true and try to prove false
triggered = true;
switch( event.keyCode ) {
// p, page up
case 80: case 33: navigatePrev(); break;
// n, page down
case 78: case 34: navigateNext(); break;
// h, left
case 72: case 37: navigateLeft(); break;
// l, right
case 76: case 39: navigateRight(); break;
// k, up
case 75: case 38: navigateUp(); break;
// j, down
case 74: case 40: navigateDown(); break;
// home
case 36: slide( 0 ); break;
// end
case 35: slide( Number.MAX_VALUE ); break;
// space
case 32: isOverview() ? deactivateOverview() : event.shiftKey ? navigatePrev() : navigateNext(); break;
// return
case 13: isOverview() ? deactivateOverview() : triggered = false; break;
// two-spot, semicolon, b, period, Logitech presenter tools "black screen" button
case 58: case 59: case 66: case 190: case 191: togglePause(); break;
// f
case 70: enterFullscreen(); break;
// a
case 65: if ( config.autoSlideStoppable ) toggleAutoSlide( autoSlideWasPaused ); break;
default:
triggered = false;
}
}
// If the input resulted in a triggered action we should prevent
// the browsers default behavior
if( triggered ) {
event.preventDefault && event.preventDefault();
}
// ESC or O key
else if ( ( event.keyCode === 27 || event.keyCode === 79 ) && features.transforms3d ) {
if( dom.overlay ) {
closeOverlay();
}
else {
toggleOverview();
}
event.preventDefault && event.preventDefault();
}
// If auto-sliding is enabled we need to cue up
// another timeout
cueAutoSlide();
}
/**
* Handler for the 'touchstart' event, enables support for
* swipe and pinch gestures.
*/
function onTouchStart( event ) {
if( isSwipePrevented( event.target ) ) return true;
touch.startX = event.touches[0].clientX;
touch.startY = event.touches[0].clientY;
touch.startCount = event.touches.length;
// If there's two touches we need to memorize the distance
// between those two points to detect pinching
if( event.touches.length === 2 && config.overview ) {
touch.startSpan = distanceBetween( {
x: event.touches[1].clientX,
y: event.touches[1].clientY
}, {
x: touch.startX,
y: touch.startY
} );
}
}
/**
* Handler for the 'touchmove' event.
*/
function onTouchMove( event ) {
if( isSwipePrevented( event.target ) ) return true;
// Each touch should only trigger one action
if( !touch.captured ) {
onUserInput( event );
var currentX = event.touches[0].clientX;
var currentY = event.touches[0].clientY;
// If the touch started with two points and still has
// two active touches; test for the pinch gesture
if( event.touches.length === 2 && touch.startCount === 2 && config.overview ) {
// The current distance in pixels between the two touch points
var currentSpan = distanceBetween( {
x: event.touches[1].clientX,
y: event.touches[1].clientY
}, {
x: touch.startX,
y: touch.startY
} );
// If the span is larger than the desire amount we've got
// ourselves a pinch
if( Math.abs( touch.startSpan - currentSpan ) > touch.threshold ) {
touch.captured = true;
if( currentSpan < touch.startSpan ) {
activateOverview();
}
else {
deactivateOverview();
}
}
event.preventDefault();
}
// There was only one touch point, look for a swipe
else if( event.touches.length === 1 && touch.startCount !== 2 ) {
var deltaX = currentX - touch.startX,
deltaY = currentY - touch.startY;
if( deltaX > touch.threshold && Math.abs( deltaX ) > Math.abs( deltaY ) ) {
touch.captured = true;
navigateLeft();
}
else if( deltaX < -touch.threshold && Math.abs( deltaX ) > Math.abs( deltaY ) ) {
touch.captured = true;
navigateRight();
}
else if( deltaY > touch.threshold ) {
touch.captured = true;
navigateUp();
}
else if( deltaY < -touch.threshold ) {
touch.captured = true;
navigateDown();
}
// If we're embedded, only block touch events if they have
// triggered an action
if( config.embedded ) {
if( touch.captured || isVerticalSlide( currentSlide ) ) {
event.preventDefault();
}
}
// Not embedded? Block them all to avoid needless tossing
// around of the viewport in iOS
else {
event.preventDefault();
}
}
}
// There's a bug with swiping on some Android devices unless
// the default action is always prevented
else if( UA.match( /android/gi ) ) {
event.preventDefault();
}
}
/**
* Handler for the 'touchend' event.
*/
function onTouchEnd( event ) {
touch.captured = false;
}
/**
* Convert pointer down to touch start.
*/
function onPointerDown( event ) {
if( event.pointerType === event.MSPOINTER_TYPE_TOUCH || event.pointerType === "touch" ) {
event.touches = [{ clientX: event.clientX, clientY: event.clientY }];
onTouchStart( event );
}
}
/**
* Convert pointer move to touch move.
*/
function onPointerMove( event ) {
if( event.pointerType === event.MSPOINTER_TYPE_TOUCH || event.pointerType === "touch" ) {
event.touches = [{ clientX: event.clientX, clientY: event.clientY }];
onTouchMove( event );
}
}
/**
* Convert pointer up to touch end.
*/
function onPointerUp( event ) {
if( event.pointerType === event.MSPOINTER_TYPE_TOUCH || event.pointerType === "touch" ) {
event.touches = [{ clientX: event.clientX, clientY: event.clientY }];
onTouchEnd( event );
}
}
/**
* Handles mouse wheel scrolling, throttled to avoid skipping
* multiple slides.
*/
function onDocumentMouseScroll( event ) {
if( Date.now() - lastMouseWheelStep > 600 ) {
lastMouseWheelStep = Date.now();
var delta = event.detail || -event.wheelDelta;
if( delta > 0 ) {
navigateNext();
}
else {
navigatePrev();
}
}
}
/**
* Clicking on the progress bar results in a navigation to the
* closest approximate horizontal slide using this equation:
*
* ( clickX / presentationWidth ) * numberOfSlides
*/
function onProgressClicked( event ) {
onUserInput( event );
event.preventDefault();
var slidesTotal = toArray( dom.wrapper.querySelectorAll( HORIZONTAL_SLIDES_SELECTOR ) ).length;
var slideIndex = Math.floor( ( event.clientX / dom.wrapper.offsetWidth ) * slidesTotal );
if( config.rtl ) {
slideIndex = slidesTotal - slideIndex;
}
slide( slideIndex );
}
/**
* Event handler for navigation control buttons.
*/
function onNavigateLeftClicked( event ) { event.preventDefault(); onUserInput(); navigateLeft(); }
function onNavigateRightClicked( event ) { event.preventDefault(); onUserInput(); navigateRight(); }
function onNavigateUpClicked( event ) { event.preventDefault(); onUserInput(); navigateUp(); }
function onNavigateDownClicked( event ) { event.preventDefault(); onUserInput(); navigateDown(); }
function onNavigatePrevClicked( event ) { event.preventDefault(); onUserInput(); navigatePrev(); }
function onNavigateNextClicked( event ) { event.preventDefault(); onUserInput(); navigateNext(); }
/**
* Handler for the window level 'hashchange' event.
*/
function onWindowHashChange( event ) {
readURL();
}
/**
* Handler for the window level 'resize' event.
*/
function onWindowResize( event ) {
layout();
}
/**
* Handle for the window level 'visibilitychange' event.
*/
function onPageVisibilityChange( event ) {
var isHidden = document.webkitHidden ||
document.msHidden ||
document.hidden;
// If, after clicking a link or similar and we're coming back,
// focus the document.body to ensure we can use keyboard shortcuts
if( isHidden === false && document.activeElement !== document.body ) {
// Not all elements support .blur() - SVGs among them.
if( typeof document.activeElement.blur === 'function' ) {
document.activeElement.blur();
}
document.body.focus();
}
}
/**
* Invoked when a slide is and we're in the overview.
*/
function onOverviewSlideClicked( event ) {
// TODO There's a bug here where the event listeners are not
// removed after deactivating the overview.
if( eventsAreBound && isOverview() ) {
event.preventDefault();
var element = event.target;
while( element && !element.nodeName.match( /section/gi ) ) {
element = element.parentNode;
}
if( element && !element.classList.contains( 'disabled' ) ) {
deactivateOverview();
if( element.nodeName.match( /section/gi ) ) {
var h = parseInt( element.getAttribute( 'data-index-h' ), 10 ),
v = parseInt( element.getAttribute( 'data-index-v' ), 10 );
slide( h, v );
}
}
}
}
/**
* Handles clicks on links that are set to preview in the
* iframe overlay.
*/
function onPreviewLinkClicked( event ) {
if( event.currentTarget && event.currentTarget.hasAttribute( 'href' ) ) {
var url = event.currentTarget.getAttribute( 'href' );
if( url ) {
showPreview( url );
event.preventDefault();
}
}
}
/**
* Handles click on the auto-sliding controls element.
*/
function onAutoSlidePlayerClick( event ) {
// Replay
if( Reveal.isLastSlide() && config.loop === false ) {
slide( 0, 0 );
resumeAutoSlide();
}
// Resume
else if( autoSlidePaused ) {
resumeAutoSlide();
}
// Pause
else {
pauseAutoSlide();
}
}
// --------------------------------------------------------------------//
// ------------------------ PLAYBACK COMPONENT ------------------------//
// --------------------------------------------------------------------//
/**
* Constructor for the playback component, which displays
* play/pause/progress controls.
*
* @param {HTMLElement} container The component will append
* itself to this
* @param {Function} progressCheck A method which will be
* called frequently to get the current progress on a range
* of 0-1
*/
function Playback( container, progressCheck ) {
// Cosmetics
this.diameter = 100;
this.diameter2 = this.diameter/2;
this.thickness = 6;
// Flags if we are currently playing
this.playing = false;
// Current progress on a 0-1 range
this.progress = 0;
// Used to loop the animation smoothly
this.progressOffset = 1;
this.container = container;
this.progressCheck = progressCheck;
this.canvas = document.createElement( 'canvas' );
this.canvas.className = 'playback';
this.canvas.width = this.diameter;
this.canvas.height = this.diameter;
this.canvas.style.width = this.diameter2 + 'px';
this.canvas.style.height = this.diameter2 + 'px';
this.context = this.canvas.getContext( '2d' );
this.container.appendChild( this.canvas );
this.render();
}
Playback.prototype.setPlaying = function( value ) {
var wasPlaying = this.playing;
this.playing = value;
// Start repainting if we weren't already
if( !wasPlaying && this.playing ) {
this.animate();
}
else {
this.render();
}
};
Playback.prototype.animate = function() {
var progressBefore = this.progress;
this.progress = this.progressCheck();
// When we loop, offset the progress so that it eases
// smoothly rather than immediately resetting
if( progressBefore > 0.8 && this.progress < 0.2 ) {
this.progressOffset = this.progress;
}
this.render();
if( this.playing ) {
features.requestAnimationFrameMethod.call( window, this.animate.bind( this ) );
}
};
/**
* Renders the current progress and playback state.
*/
Playback.prototype.render = function() {
var progress = this.playing ? this.progress : 0,
radius = ( this.diameter2 ) - this.thickness,
x = this.diameter2,
y = this.diameter2,
iconSize = 28;
// Ease towards 1
this.progressOffset += ( 1 - this.progressOffset ) * 0.1;
var endAngle = ( - Math.PI / 2 ) + ( progress * ( Math.PI * 2 ) );
var startAngle = ( - Math.PI / 2 ) + ( this.progressOffset * ( Math.PI * 2 ) );
this.context.save();
this.context.clearRect( 0, 0, this.diameter, this.diameter );
// Solid background color
this.context.beginPath();
this.context.arc( x, y, radius + 4, 0, Math.PI * 2, false );
this.context.fillStyle = 'rgba( 0, 0, 0, 0.4 )';
this.context.fill();
// Draw progress track
this.context.beginPath();
this.context.arc( x, y, radius, 0, Math.PI * 2, false );
this.context.lineWidth = this.thickness;
this.context.strokeStyle = '#666';
this.context.stroke();
if( this.playing ) {
// Draw progress on top of track
this.context.beginPath();
this.context.arc( x, y, radius, startAngle, endAngle, false );
this.context.lineWidth = this.thickness;
this.context.strokeStyle = '#fff';
this.context.stroke();
}
this.context.translate( x - ( iconSize / 2 ), y - ( iconSize / 2 ) );
// Draw play/pause icons
if( this.playing ) {
this.context.fillStyle = '#fff';
this.context.fillRect( 0, 0, iconSize / 2 - 4, iconSize );
this.context.fillRect( iconSize / 2 + 4, 0, iconSize / 2 - 4, iconSize );
}
else {
this.context.beginPath();
this.context.translate( 4, 0 );
this.context.moveTo( 0, 0 );
this.context.lineTo( iconSize - 4, iconSize / 2 );
this.context.lineTo( 0, iconSize );
this.context.fillStyle = '#fff';
this.context.fill();
}
this.context.restore();
};
Playback.prototype.on = function( type, listener ) {
this.canvas.addEventListener( type, listener, false );
};
Playback.prototype.off = function( type, listener ) {
this.canvas.removeEventListener( type, listener, false );
};
Playback.prototype.destroy = function() {
this.playing = false;
if( this.canvas.parentNode ) {
this.container.removeChild( this.canvas );
}
};
// --------------------------------------------------------------------//
// ------------------------------- API --------------------------------//
// --------------------------------------------------------------------//
Reveal = {
VERSION: VERSION,
initialize: initialize,
configure: configure,
sync: sync,
// Navigation methods
slide: slide,
left: navigateLeft,
right: navigateRight,
up: navigateUp,
down: navigateDown,
prev: navigatePrev,
next: navigateNext,
// Fragment methods
navigateFragment: navigateFragment,
prevFragment: previousFragment,
nextFragment: nextFragment,
// Deprecated aliases
navigateTo: slide,
navigateLeft: navigateLeft,
navigateRight: navigateRight,
navigateUp: navigateUp,
navigateDown: navigateDown,
navigatePrev: navigatePrev,
navigateNext: navigateNext,
// Forces an update in slide layout
layout: layout,
// Randomizes the order of slides
shuffle: shuffle,
// Returns an object with the available routes as booleans (left/right/top/bottom)
availableRoutes: availableRoutes,
// Returns an object with the available fragments as booleans (prev/next)
availableFragments: availableFragments,
// Toggles the overview mode on/off
toggleOverview: toggleOverview,
// Toggles the "black screen" mode on/off
togglePause: togglePause,
// Toggles the auto slide mode on/off
toggleAutoSlide: toggleAutoSlide,
// State checks
isOverview: isOverview,
isPaused: isPaused,
isAutoSliding: isAutoSliding,
// Adds or removes all internal event listeners (such as keyboard)
addEventListeners: addEventListeners,
removeEventListeners: removeEventListeners,
// Facility for persisting and restoring the presentation state
getState: getState,
setState: setState,
// Presentation progress on range of 0-1
getProgress: getProgress,
// Returns the indices of the current, or specified, slide
getIndices: getIndices,
getTotalSlides: getTotalSlides,
// Returns the slide element at the specified index
getSlide: getSlide,
// Returns the slide background element at the specified index
getSlideBackground: getSlideBackground,
// Returns the speaker notes string for a slide, or null
getSlideNotes: getSlideNotes,
// Returns the previous slide element, may be null
getPreviousSlide: function() {
return previousSlide;
},
// Returns the current slide element
getCurrentSlide: function() {
return currentSlide;
},
// Returns the current scale of the presentation content
getScale: function() {
return scale;
},
// Returns the current configuration object
getConfig: function() {
return config;
},
// Helper method, retrieves query string as a key/value hash
getQueryHash: function() {
var query = {};
location.search.replace( /[A-Z0-9]+?=([\w\.%-]*)/gi, function(a) {
query[ a.split( '=' ).shift() ] = a.split( '=' ).pop();
} );
// Basic deserialization
for( var i in query ) {
var value = query[ i ];
query[ i ] = deserialize( unescape( value ) );
}
return query;
},
// Returns true if we're currently on the first slide
isFirstSlide: function() {
return ( indexh === 0 && indexv === 0 );
},
// Returns true if we're currently on the last slide
isLastSlide: function() {
if( currentSlide ) {
// Does this slide has next a sibling?
if( currentSlide.nextElementSibling ) return false;
// If it's vertical, does its parent have a next sibling?
if( isVerticalSlide( currentSlide ) && currentSlide.parentNode.nextElementSibling ) return false;
return true;
}
return false;
},
// Checks if reveal.js has been loaded and is ready for use
isReady: function() {
return loaded;
},
// Forward event binding to the reveal DOM element
addEventListener: function( type, listener, useCapture ) {
if( 'addEventListener' in window ) {
( dom.wrapper || document.querySelector( '.reveal' ) ).addEventListener( type, listener, useCapture );
}
},
removeEventListener: function( type, listener, useCapture ) {
if( 'addEventListener' in window ) {
( dom.wrapper || document.querySelector( '.reveal' ) ).removeEventListener( type, listener, useCapture );
}
},
// Programatically triggers a keyboard event
triggerKey: function( keyCode ) {
onDocumentKeyDown( { keyCode: keyCode } );
},
// Registers a new shortcut to include in the help overlay
registerKeyboardShortcut: function( key, value ) {
keyboardShortcuts[key] = value;
}
};
return Reveal;
}));<|fim▁end|>
|
// the other elements
element.style.height = '0px';
newHeight = height - element.parentNode.offsetHeight;
|
<|file_name|>basic_sniff.py<|end_file_name|><|fim▁begin|>'''
The MIT License (MIT)
<|fim▁hole|>of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from scapy.all import sniff
from pcap_to_json import pcap_to_json
if __name__ == '__main__':
# Sniff two packets using scapy
a = sniff(count=2)
# Convert the captured packets to json
json_data = pcap_to_json(a,json_indent=2)
print(json_data)<|fim▁end|>
|
Copyright (c) 2015 Dan Gunter
Permission is hereby granted, free of charge, to any person obtaining a copy
|
<|file_name|>foodentery.directive.ts<|end_file_name|><|fim▁begin|>import { Directive, ElementRef ,OnInit} from "@angular/core";
@Directive({
selector: '[scoller]',
})
export class FoodEnteryDirective implements OnInit{
private el: ElementRef;
constructor(el: ElementRef) {
this.el=el;
}<|fim▁hole|> //这两个this不是同一个
el.nativeElement.addEventListener("touchstart", function (event: TouchEvent) {
this.startX = event.changedTouches["0"].pageX;
this.startY = event.changedTouches["0"].pageY;
this.movestart = this.startX;
this.clientX = event.view.innerWidth;
});
el.nativeElement.addEventListener("touchend", function (event: TouchEvent) {
var distanceX = event.changedTouches["0"].pageX - this.startX;
let elActive: any;
let elNotActive: any;
let indicatorP: any;
let indicatorActive: any;
let indicatorNotActive: any;
for (var i = 0; i < el.nativeElement.childNodes.length; i++) {
if (el.nativeElement.childNodes[i].className == 'food_items items_active') {
elActive = el.nativeElement.childNodes[i];
}
if (el.nativeElement.childNodes[i].className == 'food_items') {
elNotActive = el.nativeElement.childNodes[i];
}
if (el.nativeElement.childNodes[i].className == 'food_items') {
elNotActive = el.nativeElement.childNodes[i];
}
if (el.nativeElement.childNodes[i].className == 'mint-swipe-indicators') {
indicatorP = el.nativeElement.childNodes[i];
for (var j = 0; j < indicatorP.childNodes.length; j++) {
if (indicatorP.childNodes[j].className == 'mint-swipe-indicator is-active') {
indicatorActive = indicatorP.childNodes[j];
console.log("indicatorActive:" + indicatorActive);
}
if (indicatorP.childNodes[j].className == 'mint-swipe-indicator') {
indicatorNotActive = indicatorP.childNodes[j];
console.log("indicatorNotActive:" + indicatorNotActive);
}
}
}
}
if (Math.abs(distanceX) >= (this.clientX / 2)) {
elActive.className = '';
elActive.setAttribute('class', 'food_items');
elNotActive.setAttribute('class', 'food_items items_active');
elNotActive.style.transform = 'translateX(0px)';
elActive.style.transform = 'translateX(0px)';
indicatorNotActive.setAttribute('class', '');
indicatorNotActive.setAttribute('class', 'mint-swipe-indicator is-active');
indicatorActive.setAttribute('class', '');
indicatorActive.setAttribute('class', 'mint-swipe-indicator');
}
if (Math.abs(distanceX) < (this.clientX / 2)) {
elNotActive.style.transform = 'translateX(0px)'
elActive.style.transform = 'translateX(0px)';
}
});
el.nativeElement.addEventListener("touchmove", function (event: TouchEvent) {
//判断滑动方向
var distanceX = event.changedTouches["0"].pageX - this.startX;
if (event.changedTouches["0"].pageX > this.movestart) {
for (var i = 0; i < el.nativeElement.childNodes.length; i++) {
if (el.nativeElement.childNodes[i].className == 'food_items items_active') {
el.nativeElement.childNodes[i].style.transform = 'translateX(' + distanceX + 'px)';
// console.log("move");
}
if (el.nativeElement.childNodes[i].className == 'food_items') {
el.nativeElement.childNodes[i].style.transform = 'translateX(-' + (this.clientX - distanceX) + 'px)';
}
// console.log("->distanceX", distanceX);
}
// console.log("右滑");
this.movestart = event.changedTouches["0"].pageX;
}
if (event.changedTouches["0"].pageX < this.movestart) {
for (var i = 0; i < el.nativeElement.childNodes.length; i++) {
if (el.nativeElement.childNodes[i].className == 'food_items items_active') {
el.nativeElement.childNodes[i].style.transform = 'translateX(' + distanceX + 'px)';
}
if (el.nativeElement.childNodes[i].className == 'food_items') {
if (-(-this.clientX + distanceX) >= this.clientX) {
el.nativeElement.childNodes[i].style.transform = 'translateX(' + (this.clientX + distanceX) + 'px)';
} else {
el.nativeElement.childNodes[i].style.transform = 'translateX(' + (-this.clientX + distanceX) + 'px)';
}
}
// console.log("<-distanceX", distanceX);
}
//console.log("左滑");
this.movestart = event.changedTouches["0"].pageX;
}
}, false);
}
}<|fim▁end|>
|
ngOnInit(){
var el=this.el;
|
<|file_name|>repeating_task.cc<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/rtc_base/task_utils/repeating_task.h"
#include "absl/memory/memory.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/task_utils/pending_task_safety_flag.h"
#include "webrtc/rtc_base/task_utils/to_queued_task.h"
#include "webrtc/rtc_base/time_utils.h"
namespace webrtc {
namespace webrtc_repeating_task_impl {
RepeatingTaskBase::RepeatingTaskBase(
TaskQueueBase* task_queue,
TimeDelta first_delay,
Clock* clock,
rtc::scoped_refptr<PendingTaskSafetyFlag> alive_flag)
: task_queue_(task_queue),
clock_(clock),
next_run_time_(clock_->CurrentTime() + first_delay),
alive_flag_(std::move(alive_flag)) {}
RepeatingTaskBase::~RepeatingTaskBase() = default;
bool RepeatingTaskBase::Run() {
RTC_DCHECK_RUN_ON(task_queue_);
// Return true to tell the TaskQueue to destruct this object.<|fim▁hole|> TimeDelta delay = RunClosure();
// The closure might have stopped this task, in which case we return true to
// destruct this object.
if (!alive_flag_->alive())
return true;
RTC_DCHECK(delay.IsFinite());
TimeDelta lost_time = clock_->CurrentTime() - next_run_time_;
next_run_time_ += delay;
delay -= lost_time;
delay = std::max(delay, TimeDelta::Zero());
task_queue_->PostDelayedTask(absl::WrapUnique(this), delay.ms());
// Return false to tell the TaskQueue to not destruct this object since we
// have taken ownership with absl::WrapUnique.
return false;
}
} // namespace webrtc_repeating_task_impl
void RepeatingTaskHandle::Stop() {
if (repeating_task_) {
repeating_task_->SetNotAlive();
repeating_task_ = nullptr;
}
}
bool RepeatingTaskHandle::Running() const {
return repeating_task_ != nullptr;
}
namespace webrtc_repeating_task_impl {
// These methods are empty, but can be externally equipped with actions using
// dtrace.
void RepeatingTaskHandleDTraceProbeStart() {}
void RepeatingTaskHandleDTraceProbeDelayedStart() {}
void RepeatingTaskImplDTraceProbeRun() {}
} // namespace webrtc_repeating_task_impl
} // namespace webrtc<|fim▁end|>
|
if (!alive_flag_->alive())
return true;
|
<|file_name|>test_modeling_flax_gpt2.py<|end_file_name|><|fim▁begin|># Copyright 2021 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
import unittest
import numpy as np
import transformers
from transformers import GPT2Config, GPT2Tokenizer, is_flax_available, is_torch_available
from transformers.testing_utils import is_pt_flax_cross_test, require_flax, slow
from .test_generation_flax_utils import FlaxGenerationTesterMixin
from .test_modeling_flax_common import FlaxModelTesterMixin, ids_tensor, random_attention_mask
if is_flax_available():
import jax
import jax.numpy as jnp
from transformers.modeling_flax_pytorch_utils import (
convert_pytorch_state_dict_to_flax,
load_flax_weights_in_pytorch_model,
)
from transformers.models.gpt2.modeling_flax_gpt2 import FlaxGPT2LMHeadModel, FlaxGPT2Model
if is_torch_available():
import torch
class FlaxGPT2ModelTester:
def __init__(
self,
parent,
batch_size=14,
seq_length=7,
is_training=True,
use_input_mask=True,
use_token_type_ids=False,
use_labels=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=5,
num_attention_heads=4,
intermediate_size=37,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
initializer_range=0.02,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_input_mask = use_input_mask
self.use_token_type_ids = use_token_type_ids
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.hidden_act = hidden_act
self.hidden_dropout_prob = hidden_dropout_prob
self.attention_probs_dropout_prob = attention_probs_dropout_prob
self.max_position_embeddings = max_position_embeddings
self.initializer_range = initializer_range
self.scope = None
self.bos_token_id = vocab_size - 1
self.eos_token_id = vocab_size - 1
self.pad_token_id = vocab_size - 1
def prepare_config_and_inputs(self, gradient_checkpointing=False):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
config = GPT2Config(
vocab_size=self.vocab_size,
n_embd=self.hidden_size,
n_layer=self.num_hidden_layers,
n_head=self.num_attention_heads,
n_positions=self.max_position_embeddings,
n_ctx=self.max_position_embeddings,
use_cache=False,
bos_token_id=self.bos_token_id,
eos_token_id=self.eos_token_id,
pad_token_id=self.pad_token_id,
gradient_checkpointing=gradient_checkpointing,
)
return (config, input_ids, input_mask)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, attention_mask = config_and_inputs
inputs_dict = {"input_ids": input_ids, "attention_mask": attention_mask}
return config, inputs_dict
def check_use_cache_forward(self, model_class_name, config, input_ids, attention_mask):
max_decoder_length = 20
model = model_class_name(config)
past_key_values = model.init_cache(input_ids.shape[0], max_decoder_length)
attention_mask = jnp.ones((input_ids.shape[0], max_decoder_length), dtype="i4")
position_ids = jnp.broadcast_to(
jnp.arange(input_ids.shape[-1] - 1)[None, :], (input_ids.shape[0], input_ids.shape[-1] - 1)
)
outputs_cache = model(
input_ids[:, :-1],
attention_mask=attention_mask,
past_key_values=past_key_values,
position_ids=position_ids,
)
position_ids = jnp.array(input_ids.shape[0] * [[input_ids.shape[-1] - 1]], dtype="i4")
outputs_cache_next = model(
input_ids[:, -1:],
attention_mask=attention_mask,
past_key_values=outputs_cache.past_key_values,
position_ids=position_ids,
)
outputs = model(input_ids)
diff = np.max(np.abs((outputs_cache_next[0][:, -1, :5] - outputs[0][:, -1, :5])))
self.parent.assertTrue(diff < 1e-3, msg=f"Max diff is {diff}")
def check_use_cache_forward_with_attn_mask(self, model_class_name, config, input_ids, attention_mask):
max_decoder_length = 20
model = model_class_name(config)
attention_mask_cache = jnp.concatenate(
[attention_mask, jnp.zeros((attention_mask.shape[0], max_decoder_length - attention_mask.shape[1]))],
axis=-1,
)
past_key_values = model.init_cache(input_ids.shape[0], max_decoder_length)
position_ids = jnp.broadcast_to(
jnp.arange(input_ids.shape[-1] - 1)[None, :], (input_ids.shape[0], input_ids.shape[-1] - 1)
)
outputs_cache = model(
input_ids[:, :-1],
attention_mask=attention_mask_cache,
past_key_values=past_key_values,
position_ids=position_ids,
)
position_ids = jnp.array(input_ids.shape[0] * [[input_ids.shape[-1] - 1]], dtype="i4")
outputs_cache_next = model(
input_ids[:, -1:],
past_key_values=outputs_cache.past_key_values,
attention_mask=attention_mask_cache,
position_ids=position_ids,
)
outputs = model(input_ids, attention_mask=attention_mask)
diff = np.max(np.abs((outputs_cache_next[0][:, -1, :5] - outputs[0][:, -1, :5])))
self.parent.assertTrue(diff < 1e-3, msg=f"Max diff is {diff}")
@require_flax
class FlaxGPT2ModelTest(FlaxModelTesterMixin, FlaxGenerationTesterMixin, unittest.TestCase):
all_model_classes = (FlaxGPT2Model, FlaxGPT2LMHeadModel) if is_flax_available() else ()
all_generative_model_classes = (FlaxGPT2LMHeadModel,) if is_flax_available() else ()
def setUp(self):
self.model_tester = FlaxGPT2ModelTester(self)
def test_use_cache_forward(self):
for model_class_name in self.all_model_classes:
config, input_ids, attention_mask = self.model_tester.prepare_config_and_inputs()
self.model_tester.check_use_cache_forward(model_class_name, config, input_ids, attention_mask)
def test_use_cache_forward_with_attn_mask(self):
for model_class_name in self.all_model_classes:
config, input_ids, attention_mask = self.model_tester.prepare_config_and_inputs()
self.model_tester.check_use_cache_forward_with_attn_mask(
model_class_name, config, input_ids, attention_mask
)
@slow
def test_batch_generation(self):
tokenizer = GPT2Tokenizer.from_pretrained("gpt2", pad_token="</s>", padding_side="left")
inputs = tokenizer(["Hello this is a long string", "Hey"], return_tensors="jax", padding=True, truncation=True)
model = FlaxGPT2LMHeadModel.from_pretrained("gpt2")
model.do_sample = False
model.config.pad_token_id = model.config.eos_token_id
jit_generate = jax.jit(model.generate)
output_sequences = jit_generate(inputs["input_ids"], attention_mask=inputs["attention_mask"]).sequences
output_string = tokenizer.batch_decode(output_sequences, skip_special_tokens=True)
expected_string = [
"Hello this is a long string of words. I'm going to try to explain what I mean.",
"Hey, I'm not sure if I'm going to be able to do",
]
self.assertListEqual(output_string, expected_string)
# overwrite from common since `attention_mask` in combination
# with `causal_mask` behaves slighly differently
@is_pt_flax_cross_test
def test_equivalence_pt_to_flax(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
with self.subTest(model_class.__name__):
# prepare inputs
prepared_inputs_dict = self._prepare_for_class(inputs_dict, model_class)<|fim▁hole|> pt_model_class = getattr(transformers, pt_model_class_name)
batch_size, seq_length = pt_inputs["input_ids"].shape
rnd_start_indices = np.random.randint(0, seq_length - 1, size=(batch_size,))
for batch_idx, start_index in enumerate(rnd_start_indices):
pt_inputs["attention_mask"][batch_idx, :start_index] = 0
pt_inputs["attention_mask"][batch_idx, start_index:] = 1
prepared_inputs_dict["attention_mask"][batch_idx, :start_index] = 0
prepared_inputs_dict["attention_mask"][batch_idx, start_index:] = 1
pt_model = pt_model_class(config).eval()
fx_model = model_class(config, dtype=jnp.float32)
fx_state = convert_pytorch_state_dict_to_flax(pt_model.state_dict(), fx_model)
fx_model.params = fx_state
with torch.no_grad():
pt_outputs = pt_model(**pt_inputs).to_tuple()
fx_outputs = fx_model(**prepared_inputs_dict).to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
for fx_output, pt_output in zip(fx_outputs, pt_outputs):
self.assert_almost_equals(fx_output[:, -1], pt_output[:, -1].numpy(), 4e-2)
with tempfile.TemporaryDirectory() as tmpdirname:
pt_model.save_pretrained(tmpdirname)
fx_model_loaded = model_class.from_pretrained(tmpdirname, from_pt=True)
fx_outputs_loaded = fx_model_loaded(**prepared_inputs_dict).to_tuple()
self.assertEqual(
len(fx_outputs_loaded), len(pt_outputs), "Output lengths differ between Flax and PyTorch"
)
for fx_output_loaded, pt_output in zip(fx_outputs_loaded, pt_outputs):
self.assert_almost_equals(fx_output_loaded[:, -1], pt_output[:, -1].numpy(), 4e-2)
# overwrite from common since `attention_mask` in combination
# with `causal_mask` behaves slighly differently
@is_pt_flax_cross_test
def test_equivalence_flax_to_pt(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
with self.subTest(model_class.__name__):
# prepare inputs
prepared_inputs_dict = self._prepare_for_class(inputs_dict, model_class)
pt_inputs = {k: torch.tensor(v.tolist()) for k, v in prepared_inputs_dict.items()}
# load corresponding PyTorch class
pt_model_class_name = model_class.__name__[4:] # Skip the "Flax" at the beginning
pt_model_class = getattr(transformers, pt_model_class_name)
pt_model = pt_model_class(config).eval()
fx_model = model_class(config, dtype=jnp.float32)
pt_model = load_flax_weights_in_pytorch_model(pt_model, fx_model.params)
batch_size, seq_length = pt_inputs["input_ids"].shape
rnd_start_indices = np.random.randint(0, seq_length - 1, size=(batch_size,))
for batch_idx, start_index in enumerate(rnd_start_indices):
pt_inputs["attention_mask"][batch_idx, :start_index] = 0
pt_inputs["attention_mask"][batch_idx, start_index:] = 1
prepared_inputs_dict["attention_mask"][batch_idx, :start_index] = 0
prepared_inputs_dict["attention_mask"][batch_idx, start_index:] = 1
# make sure weights are tied in PyTorch
pt_model.tie_weights()
with torch.no_grad():
pt_outputs = pt_model(**pt_inputs).to_tuple()
fx_outputs = fx_model(**prepared_inputs_dict).to_tuple()
self.assertEqual(len(fx_outputs), len(pt_outputs), "Output lengths differ between Flax and PyTorch")
for fx_output, pt_output in zip(fx_outputs, pt_outputs):
self.assert_almost_equals(fx_output[:, -1], pt_output[:, -1].numpy(), 4e-2)
with tempfile.TemporaryDirectory() as tmpdirname:
fx_model.save_pretrained(tmpdirname)
pt_model_loaded = pt_model_class.from_pretrained(tmpdirname, from_flax=True)
with torch.no_grad():
pt_outputs_loaded = pt_model_loaded(**pt_inputs).to_tuple()
self.assertEqual(
len(fx_outputs), len(pt_outputs_loaded), "Output lengths differ between Flax and PyTorch"
)
for fx_output, pt_output in zip(fx_outputs, pt_outputs_loaded):
self.assert_almost_equals(fx_output[:, -1], pt_output[:, -1].numpy(), 4e-2)
@slow
def test_model_from_pretrained(self):
for model_class_name in self.all_model_classes:
model = model_class_name.from_pretrained("gpt2", from_pt=True)
outputs = model(np.ones((1, 1)))
self.assertIsNotNone(outputs)<|fim▁end|>
|
pt_inputs = {k: torch.tensor(v.tolist()) for k, v in prepared_inputs_dict.items()}
# load corresponding PyTorch class
pt_model_class_name = model_class.__name__[4:] # Skip the "Flax" at the beginning
|
<|file_name|>integer.py<|end_file_name|><|fim▁begin|>from lexer import lang
from ..tree import Node
class Integer(Node):
datatype = lang.SEMANTIC_INT_TYPE
"""docstring for Integer."""
def __init__(self, symbol, token):
super().__init__(symbol, token)
<|fim▁hole|><|fim▁end|>
|
def generate_code(self, **cond):
array, line = Node.assignated_array()
Node.array_append(array, f'{line} LIT {self.symbol}, 0')
|
<|file_name|>multiplication.js<|end_file_name|><|fim▁begin|>/**
* 乘法操作符
*/
define( function ( require, exports, modules ) {
var kity = require( "kity" );
return kity.createClass( 'MultiplicationOperator', {
base: require( "operator/binary-opr/left-right" ),
constructor: function () {
var ltr = new kity.Rect( 0, 20, 43, 3, 3 ).fill( "black"),<|fim▁hole|>
this.callBase( "Multiplication" );
this.addOperatorShape( ltr.setAnchor( 22, 22 ).rotate( 45 ) );
this.addOperatorShape( rtl.setAnchor( 22, 22 ).rotate( 45 ) );
}
} );
} );<|fim▁end|>
|
rtl = new kity.Rect( 20, 0, 3, 43, 3 ).fill( "black" );
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate uuid;
extern crate proc_macro2;
extern crate quote;
#[cfg(feature = "native_coin_cbc")]
extern crate coin_cbc;
#[cfg(feature = "minilp")]
extern crate minilp;
pub mod util;
pub mod dsl {
pub mod variables;
pub use self::variables::*;
pub mod operations;
pub use self::operations::*;
pub mod problem;
pub use self::problem::*;
}
<|fim▁hole|>
pub mod solvers;<|fim▁end|>
|
pub mod format {
pub mod lp_format;
}
|
<|file_name|>defaults_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1_test
import (
"encoding/json"
"reflect"
"testing"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/v1"
versioned "k8s.io/kubernetes/pkg/api/v1"
sccutil "k8s.io/kubernetes/pkg/securitycontextconstraints/util"
)
func roundTrip(t *testing.T, obj runtime.Object) runtime.Object {
codec := api.Codecs.LegacyCodec(v1.SchemeGroupVersion)
data, err := runtime.Encode(codec, obj)
if err != nil {
t.Errorf("%v\n %#v", err, obj)
return nil
}
obj2, err := runtime.Decode(codec, data)
if err != nil {
t.Errorf("%v\nData: %s\nSource: %#v", err, string(data), obj)
return nil
}
obj3 := reflect.New(reflect.TypeOf(obj).Elem()).Interface().(runtime.Object)
err = api.Scheme.Convert(obj2, obj3, nil)
if err != nil {
t.Errorf("%v\nSource: %#v", err, obj2)
return nil
}
return obj3
}
func TestSetDefaultReplicationController(t *testing.T) {
tests := []struct {
rc *v1.ReplicationController
expectLabels bool
expectSelector bool
}{
{
rc: &v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: true,
expectSelector: true,
},
{
rc: &v1.ReplicationController{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"bar": "foo",
},
},
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: false,
expectSelector: true,
},
{
rc: &v1.ReplicationController{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"bar": "foo",
},
},
Spec: v1.ReplicationControllerSpec{
Selector: map[string]string{
"some": "other",
},
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: false,
expectSelector: false,
},
{
rc: &v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Selector: map[string]string{
"some": "other",
},
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: true,
expectSelector: false,
},
}
for _, test := range tests {
rc := test.rc
obj2 := roundTrip(t, runtime.Object(rc))
rc2, ok := obj2.(*v1.ReplicationController)
if !ok {
t.Errorf("unexpected object: %v", rc2)
t.FailNow()
}
if test.expectSelector != reflect.DeepEqual(rc2.Spec.Selector, rc2.Spec.Template.Labels) {
if test.expectSelector {
t.Errorf("expected: %v, got: %v", rc2.Spec.Template.Labels, rc2.Spec.Selector)
} else {
t.Errorf("unexpected equality: %v", rc.Spec.Selector)
}
}
if test.expectLabels != reflect.DeepEqual(rc2.Labels, rc2.Spec.Template.Labels) {
if test.expectLabels {
t.Errorf("expected: %v, got: %v", rc2.Spec.Template.Labels, rc2.Labels)
} else {
t.Errorf("unexpected equality: %v", rc.Labels)
}
}
}
}
func newInt(val int32) *int32 {
p := new(int32)
*p = val
return p
}
func TestSetDefaultReplicationControllerReplicas(t *testing.T) {
tests := []struct {
rc v1.ReplicationController
expectReplicas int32
}{
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectReplicas: 1,
},
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Replicas: newInt(0),
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectReplicas: 0,
},
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Replicas: newInt(3),
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectReplicas: 3,
},
}
for _, test := range tests {
rc := &test.rc
obj2 := roundTrip(t, runtime.Object(rc))
rc2, ok := obj2.(*v1.ReplicationController)
if !ok {
t.Errorf("unexpected object: %v", rc2)
t.FailNow()
}
if rc2.Spec.Replicas == nil {
t.Errorf("unexpected nil Replicas")
} else if test.expectReplicas != *rc2.Spec.Replicas {
t.Errorf("expected: %d replicas, got: %d", test.expectReplicas, *rc2.Spec.Replicas)
}
}
}
func TestSetDefaultReplicationControllerImagePullPolicy(t *testing.T) {
containersWithoutPullPolicy, _ := json.Marshal([]map[string]interface{}{
{
"name": "install",
"image": "busybox:latest",
},
})
containersWithPullPolicy, _ := json.Marshal([]map[string]interface{}{
{
"name": "install",
"imagePullPolicy": "IfNotPresent",
},
})
tests := []struct {
rc v1.ReplicationController
expectPullPolicy v1.PullPolicy
}{
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Annotations: map[string]string{
"pod.beta.kubernetes.io/init-containers": string(containersWithoutPullPolicy),
},
},
},
},
},
expectPullPolicy: v1.PullAlways,
},
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Annotations: map[string]string{
"pod.beta.kubernetes.io/init-containers": string(containersWithPullPolicy),
},
},
},
},
},
expectPullPolicy: v1.PullIfNotPresent,
},
}
for _, test := range tests {
rc := &test.rc
obj2 := roundTrip(t, runtime.Object(rc))
rc2, ok := obj2.(*v1.ReplicationController)
if !ok {
t.Errorf("unexpected object: %v", rc2)
t.FailNow()
}
if test.expectPullPolicy != rc2.Spec.Template.Spec.InitContainers[0].ImagePullPolicy {
t.Errorf("expected ImagePullPolicy: %s, got: %s",
test.expectPullPolicy,
rc2.Spec.Template.Spec.InitContainers[0].ImagePullPolicy,
)
}
}
}
func TestSetDefaultService(t *testing.T) {
svc := &v1.Service{}
obj2 := roundTrip(t, runtime.Object(svc))
svc2 := obj2.(*v1.Service)
if svc2.Spec.SessionAffinity != v1.ServiceAffinityNone {
t.Errorf("Expected default session affinity type:%s, got: %s", v1.ServiceAffinityNone, svc2.Spec.SessionAffinity)
}
if svc2.Spec.Type != v1.ServiceTypeClusterIP {
t.Errorf("Expected default type:%s, got: %s", v1.ServiceTypeClusterIP, svc2.Spec.Type)
}
}
func TestSetDefaultSecretVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.Secret.DefaultMode
expectedMode := v1.SecretVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected secret DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultConfigMapVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
ConfigMap: &v1.ConfigMapVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.ConfigMap.DefaultMode
expectedMode := v1.ConfigMapVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected ConfigMap DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultDownwardAPIVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
DownwardAPI: &v1.DownwardAPIVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.DownwardAPI.DefaultMode
expectedMode := v1.DownwardAPIVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected DownwardAPI DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultProjectedVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
Projected: &v1.ProjectedVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.Projected.DefaultMode
expectedMode := v1.ProjectedVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected ProjectedVolumeSource DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultSecret(t *testing.T) {
s := &v1.Secret{}
obj2 := roundTrip(t, runtime.Object(s))
s2 := obj2.(*v1.Secret)
if s2.Type != v1.SecretTypeOpaque {
t.Errorf("Expected secret type %v, got %v", v1.SecretTypeOpaque, s2.Type)
}
}
func TestSetDefaultPersistentVolume(t *testing.T) {
pv := &v1.PersistentVolume{}
obj2 := roundTrip(t, runtime.Object(pv))
pv2 := obj2.(*v1.PersistentVolume)
if pv2.Status.Phase != v1.VolumePending {
t.Errorf("Expected volume phase %v, got %v", v1.VolumePending, pv2.Status.Phase)
}
if pv2.Spec.PersistentVolumeReclaimPolicy != v1.PersistentVolumeReclaimRetain {
t.Errorf("Expected pv reclaim policy %v, got %v", v1.PersistentVolumeReclaimRetain, pv2.Spec.PersistentVolumeReclaimPolicy)
}
}
func TestSetDefaultPersistentVolumeClaim(t *testing.T) {
pvc := &v1.PersistentVolumeClaim{}
obj2 := roundTrip(t, runtime.Object(pvc))
pvc2 := obj2.(*v1.PersistentVolumeClaim)
if pvc2.Status.Phase != v1.ClaimPending {
t.Errorf("Expected claim phase %v, got %v", v1.ClaimPending, pvc2.Status.Phase)
}
}
func TestSetDefaulEndpointsProtocol(t *testing.T) {
in := &v1.Endpoints{Subsets: []v1.EndpointSubset{
{Ports: []v1.EndpointPort{{}, {Protocol: "UDP"}, {}}},
}}
obj := roundTrip(t, runtime.Object(in))
out := obj.(*v1.Endpoints)
for i := range out.Subsets {
for j := range out.Subsets[i].Ports {
if in.Subsets[i].Ports[j].Protocol == "" {
if out.Subsets[i].Ports[j].Protocol != v1.ProtocolTCP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolTCP, out.Subsets[i].Ports[j].Protocol)
}
} else {
if out.Subsets[i].Ports[j].Protocol != in.Subsets[i].Ports[j].Protocol {
t.Errorf("Expected protocol %s, got %s", in.Subsets[i].Ports[j].Protocol, out.Subsets[i].Ports[j].Protocol)
}
}
}
}
}
func TestSetDefaulServiceTargetPort(t *testing.T) {
in := &v1.Service{Spec: v1.ServiceSpec{Ports: []v1.ServicePort{{Port: 1234}}}}
obj := roundTrip(t, runtime.Object(in))
out := obj.(*v1.Service)
if out.Spec.Ports[0].TargetPort != intstr.FromInt(1234) {
t.Errorf("Expected TargetPort to be defaulted, got %v", out.Spec.Ports[0].TargetPort)
}
in = &v1.Service{Spec: v1.ServiceSpec{Ports: []v1.ServicePort{{Port: 1234, TargetPort: intstr.FromInt(5678)}}}}
obj = roundTrip(t, runtime.Object(in))
out = obj.(*v1.Service)
if out.Spec.Ports[0].TargetPort != intstr.FromInt(5678) {
t.Errorf("Expected TargetPort to be unchanged, got %v", out.Spec.Ports[0].TargetPort)
}
}
func TestSetDefaultServicePort(t *testing.T) {
// Unchanged if set.
in := &v1.Service{Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
{Protocol: "UDP", Port: 9376, TargetPort: intstr.FromString("p")},
{Protocol: "UDP", Port: 8675, TargetPort: intstr.FromInt(309)},
},
}}
out := roundTrip(t, runtime.Object(in)).(*v1.Service)
if out.Spec.Ports[0].Protocol != v1.ProtocolUDP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolUDP, out.Spec.Ports[0].Protocol)
}
if out.Spec.Ports[0].TargetPort != intstr.FromString("p") {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[0].Port, out.Spec.Ports[0].TargetPort)
}
if out.Spec.Ports[1].Protocol != v1.ProtocolUDP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolUDP, out.Spec.Ports[1].Protocol)
}
if out.Spec.Ports[1].TargetPort != intstr.FromInt(309) {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[1].Port, out.Spec.Ports[1].TargetPort)
}
// Defaulted.
in = &v1.Service{Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
{Protocol: "", Port: 9376, TargetPort: intstr.FromString("")},
{Protocol: "", Port: 8675, TargetPort: intstr.FromInt(0)},
},
}}
out = roundTrip(t, runtime.Object(in)).(*v1.Service)
if out.Spec.Ports[0].Protocol != v1.ProtocolTCP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolTCP, out.Spec.Ports[0].Protocol)
}
if out.Spec.Ports[0].TargetPort != intstr.FromInt(int(in.Spec.Ports[0].Port)) {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[0].Port, out.Spec.Ports[0].TargetPort)
}
if out.Spec.Ports[1].Protocol != v1.ProtocolTCP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolTCP, out.Spec.Ports[1].Protocol)
}
if out.Spec.Ports[1].TargetPort != intstr.FromInt(int(in.Spec.Ports[1].Port)) {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[1].Port, out.Spec.Ports[1].TargetPort)
}
}
func TestSetDefaultNamespace(t *testing.T) {
s := &v1.Namespace{}
obj2 := roundTrip(t, runtime.Object(s))
s2 := obj2.(*v1.Namespace)
if s2.Status.Phase != v1.NamespaceActive {
t.Errorf("Expected phase %v, got %v", v1.NamespaceActive, s2.Status.Phase)
}
}
func TestSetDefaultPodSpecHostNetwork(t *testing.T) {
portNum := int32(8080)
s := v1.PodSpec{}
s.HostNetwork = true
s.Containers = []v1.Container{
{
Ports: []v1.ContainerPort{
{
ContainerPort: portNum,
},
},
},
}
s.InitContainers = []v1.Container{
{
Ports: []v1.ContainerPort{
{
ContainerPort: portNum,
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
obj2 := roundTrip(t, runtime.Object(pod))
pod2 := obj2.(*v1.Pod)
s2 := pod2.Spec
hostPortNum := s2.Containers[0].Ports[0].HostPort
if hostPortNum != portNum {
t.Errorf("Expected container port to be defaulted, was made %d instead of %d", hostPortNum, portNum)
}
hostPortNum = s2.InitContainers[0].Ports[0].HostPort
if hostPortNum != portNum {
t.Errorf("Expected container port to be defaulted, was made %d instead of %d", hostPortNum, portNum)
}
}
func TestSetDefaultNodeExternalID(t *testing.T) {
name := "node0"
n := &v1.Node{}
n.Name = name
obj2 := roundTrip(t, runtime.Object(n))
n2 := obj2.(*v1.Node)
if n2.Spec.ExternalID != name {
t.Errorf("Expected default External ID: %s, got: %s", name, n2.Spec.ExternalID)
}
if n2.Spec.ProviderID != "" {
t.Errorf("Expected empty default Cloud Provider ID, got: %s", n2.Spec.ProviderID)
}
}
func TestSetDefaultNodeStatusAllocatable(t *testing.T) {
capacity := v1.ResourceList{
v1.ResourceCPU: resource.MustParse("1000m"),
v1.ResourceMemory: resource.MustParse("10G"),
}
allocatable := v1.ResourceList{
v1.ResourceCPU: resource.MustParse("500m"),
v1.ResourceMemory: resource.MustParse("5G"),
}
tests := []struct {
capacity v1.ResourceList
allocatable v1.ResourceList
expectedAllocatable v1.ResourceList
}{{ // Everything set, no defaulting.
capacity: capacity,
allocatable: allocatable,
expectedAllocatable: allocatable,
}, { // Allocatable set, no defaulting.
capacity: nil,
allocatable: allocatable,
expectedAllocatable: allocatable,
}, { // Capacity set, allocatable defaults to capacity.
capacity: capacity,
allocatable: nil,
expectedAllocatable: capacity,
}, { // Nothing set, allocatable "defaults" to capacity.
capacity: nil,
allocatable: nil,
expectedAllocatable: nil,
}}
copyResourceList := func(rl v1.ResourceList) v1.ResourceList {
if rl == nil {
return nil
}
copy := make(v1.ResourceList, len(rl))
for k, v := range rl {
copy[k] = *v.Copy()
}
return copy
}
resourceListsEqual := func(a v1.ResourceList, b v1.ResourceList) bool {
if len(a) != len(b) {
return false
}
for k, v := range a {
vb, found := b[k]
if !found {
return false
}
if v.Cmp(vb) != 0 {
return false
}
}
return true
}
for i, testcase := range tests {
node := v1.Node{
Status: v1.NodeStatus{
Capacity: copyResourceList(testcase.capacity),
Allocatable: copyResourceList(testcase.allocatable),
},
}
node2 := roundTrip(t, runtime.Object(&node)).(*v1.Node)
actual := node2.Status.Allocatable
expected := testcase.expectedAllocatable
if !resourceListsEqual(expected, actual) {
t.Errorf("[%d] Expected NodeStatus.Allocatable: %+v; Got: %+v", i, expected, actual)
}
}
}
func TestSetDefaultObjectFieldSelectorAPIVersion(t *testing.T) {
s := v1.PodSpec{
Containers: []v1.Container{
{
Env: []v1.EnvVar{
{
ValueFrom: &v1.EnvVarSource{
FieldRef: &v1.ObjectFieldSelector{},
},
},
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
obj2 := roundTrip(t, runtime.Object(pod))
pod2 := obj2.(*v1.Pod)
s2 := pod2.Spec
apiVersion := s2.Containers[0].Env[0].ValueFrom.FieldRef.APIVersion
if apiVersion != "v1" {
t.Errorf("Expected default APIVersion v1, got: %v", apiVersion)
}
}
func TestSetMinimumScalePod(t *testing.T) {
// verify we default if limits are specified (and that request=0 is preserved)
s := v1.PodSpec{}
s.Containers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("1n"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("2n"),
},
},
},
}
s.InitContainers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("1n"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("2n"),
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
v1.SetObjectDefaults_Pod(pod)
if expect := resource.MustParse("1m"); expect.Cmp(pod.Spec.Containers[0].Resources.Requests[v1.ResourceMemory]) != 0 {
t.Errorf("did not round resources: %#v", pod.Spec.Containers[0].Resources)
}
if expect := resource.MustParse("1m"); expect.Cmp(pod.Spec.InitContainers[0].Resources.Requests[v1.ResourceMemory]) != 0 {
t.Errorf("did not round resources: %#v", pod.Spec.InitContainers[0].Resources)
}
}
func TestSetDefaultRequestsPod(t *testing.T) {
// verify we default if limits are specified (and that request=0 is preserved)
s := v1.PodSpec{}
s.Containers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("0"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
v1.ResourceMemory: resource.MustParse("1Gi"),
},
},
},
}
s.InitContainers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("0"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
v1.ResourceMemory: resource.MustParse("1Gi"),
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultRequest := pod2.Spec.Containers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "100m" {
t.Errorf("Expected request cpu: %s, got: %s", "100m", requestValue.String())
}
if requestValue := defaultRequest[v1.ResourceMemory]; requestValue.String() != "0" {
t.Errorf("Expected request memory: %s, got: %s", "0", requestValue.String())
}
defaultRequest = pod2.Spec.InitContainers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "100m" {
t.Errorf("Expected request cpu: %s, got: %s", "100m", requestValue.String())
}
if requestValue := defaultRequest[v1.ResourceMemory]; requestValue.String() != "0" {
t.Errorf("Expected request memory: %s, got: %s", "0", requestValue.String())
}
// verify we do nothing if no limits are specified
s = v1.PodSpec{}
s.Containers = []v1.Container{{}}
s.InitContainers = []v1.Container{{}}
pod = &v1.Pod{
Spec: s,
}
output = roundTrip(t, runtime.Object(pod))
pod2 = output.(*v1.Pod)
defaultRequest = pod2.Spec.Containers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "0" {
t.Errorf("Expected 0 request value, got: %s", requestValue.String())
}
defaultRequest = pod2.Spec.InitContainers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "0" {
t.Errorf("Expected 0 request value, got: %s", requestValue.String())
}
}
func TestDefaultRequestIsNotSetForReplicationController(t *testing.T) {
s := v1.PodSpec{}
s.Containers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
},
},
},
}
rc := &v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Replicas: newInt(3),
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
Spec: s,
},
},
}
output := roundTrip(t, runtime.Object(rc))
rc2 := output.(*v1.ReplicationController)
defaultRequest := rc2.Spec.Template.Spec.Containers[0].Resources.Requests
requestValue := defaultRequest[v1.ResourceCPU]
if requestValue.String() != "0" {
t.Errorf("Expected 0 request value, got: %s", requestValue.String())
}
}
func TestSetDefaultLimitRangeItem(t *testing.T) {
limitRange := &v1.LimitRange{
ObjectMeta: metav1.ObjectMeta{
Name: "test-defaults",
},
Spec: v1.LimitRangeSpec{
Limits: []v1.LimitRangeItem{{
Type: v1.LimitTypeContainer,
Max: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
},
Min: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("100Mi"),
},
Default: v1.ResourceList{},
DefaultRequest: v1.ResourceList{},
}},
},
}
output := roundTrip(t, runtime.Object(limitRange))
limitRange2 := output.(*v1.LimitRange)
defaultLimit := limitRange2.Spec.Limits[0].Default
defaultRequest := limitRange2.Spec.Limits[0].DefaultRequest
// verify that default cpu was set to the max
defaultValue := defaultLimit[v1.ResourceCPU]
if defaultValue.String() != "100m" {
t.Errorf("Expected default cpu: %s, got: %s", "100m", defaultValue.String())
}
// verify that default request was set to the limit
requestValue := defaultRequest[v1.ResourceCPU]
if requestValue.String() != "100m" {
t.Errorf("Expected request cpu: %s, got: %s", "100m", requestValue.String())
}<|fim▁hole|> t.Errorf("Expected request memory: %s, got: %s", "100Mi", requestMinValue.String())
}
}
func TestSetDefaultProbe(t *testing.T) {
originalProbe := v1.Probe{}
expectedProbe := v1.Probe{
InitialDelaySeconds: 0,
TimeoutSeconds: 1,
PeriodSeconds: 10,
SuccessThreshold: 1,
FailureThreshold: 3,
}
pod := &v1.Pod{
Spec: v1.PodSpec{
Containers: []v1.Container{{LivenessProbe: &originalProbe}},
},
}
output := roundTrip(t, runtime.Object(pod)).(*v1.Pod)
actualProbe := *output.Spec.Containers[0].LivenessProbe
if actualProbe != expectedProbe {
t.Errorf("Expected probe: %+v\ngot: %+v\n", expectedProbe, actualProbe)
}
}
func TestSetDefaultSchedulerName(t *testing.T) {
pod := &v1.Pod{}
output := roundTrip(t, runtime.Object(pod)).(*v1.Pod)
if output.Spec.SchedulerName != v1.DefaultSchedulerName {
t.Errorf("Expected scheduler name: %+v\ngot: %+v\n", v1.DefaultSchedulerName, output.Spec.SchedulerName)
}
}
func TestDefaultSecurityContextConstraints(t *testing.T) {
tests := map[string]struct {
scc *versioned.SecurityContextConstraints
expectedFSGroup versioned.FSGroupStrategyType
expectedSupGroup versioned.SupplementalGroupsStrategyType
}{
"shouldn't default": {
scc: &versioned.SecurityContextConstraints{
FSGroup: versioned.FSGroupStrategyOptions{
Type: versioned.FSGroupStrategyMustRunAs,
},
SupplementalGroups: versioned.SupplementalGroupsStrategyOptions{
Type: versioned.SupplementalGroupsStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyMustRunAs,
expectedSupGroup: versioned.SupplementalGroupsStrategyMustRunAs,
},
"default fsgroup runAsAny": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyRunAsAny,
},
SupplementalGroups: versioned.SupplementalGroupsStrategyOptions{
Type: versioned.SupplementalGroupsStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyRunAsAny,
expectedSupGroup: versioned.SupplementalGroupsStrategyMustRunAs,
},
"default sup group runAsAny": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyRunAsAny,
},
FSGroup: versioned.FSGroupStrategyOptions{
Type: versioned.FSGroupStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyMustRunAs,
expectedSupGroup: versioned.SupplementalGroupsStrategyRunAsAny,
},
"default fsgroup runAsAny with mustRunAs UID strat": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyMustRunAsRange,
},
SupplementalGroups: versioned.SupplementalGroupsStrategyOptions{
Type: versioned.SupplementalGroupsStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyRunAsAny,
expectedSupGroup: versioned.SupplementalGroupsStrategyMustRunAs,
},
"default sup group runAsAny with mustRunAs UID strat": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyMustRunAsRange,
},
FSGroup: versioned.FSGroupStrategyOptions{
Type: versioned.FSGroupStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyMustRunAs,
expectedSupGroup: versioned.SupplementalGroupsStrategyRunAsAny,
},
}
for k, v := range tests {
output := roundTrip(t, runtime.Object(v.scc))
scc := output.(*versioned.SecurityContextConstraints)
if scc.FSGroup.Type != v.expectedFSGroup {
t.Errorf("%s has invalid fsgroup. Expected: %v got: %v", k, v.expectedFSGroup, scc.FSGroup.Type)
}
if scc.SupplementalGroups.Type != v.expectedSupGroup {
t.Errorf("%s has invalid supplemental group. Expected: %v got: %v", k, v.expectedSupGroup, scc.SupplementalGroups.Type)
}
}
}
func TestDefaultSCCVolumes(t *testing.T) {
tests := map[string]struct {
scc *versioned.SecurityContextConstraints
expectedVolumes []versioned.FSType
expectedHostDir bool
}{
// this expects the volumes to default to all for an empty volume slice
// but since the host dir setting is false it should be all - host dir
"old client - default allow* fields, no volumes slice": {
scc: &versioned.SecurityContextConstraints{},
expectedVolumes: versioned.StringSetToFSType(sccutil.GetAllFSTypesExcept(string(versioned.FSTypeHostPath))),
expectedHostDir: false,
},
// this expects the volumes to default to all for an empty volume slice
"old client - set allowHostDir true fields, no volumes slice": {
scc: &versioned.SecurityContextConstraints{
AllowHostDirVolumePlugin: true,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeAll},
expectedHostDir: true,
},
"new client - allow* fields set with matching volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
AllowHostDirVolumePlugin: true,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
expectedHostDir: true,
},
"new client - allow* fields set with mismatch host dir volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
AllowHostDirVolumePlugin: false,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir},
expectedHostDir: false,
},
"new client - allow* fields set with mismatch FSTypeAll volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeAll},
AllowHostDirVolumePlugin: false,
},
expectedVolumes: versioned.StringSetToFSType(sccutil.GetAllFSTypesExcept(string(versioned.FSTypeHostPath))),
expectedHostDir: false,
},
"new client - allow* fields unset with volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir},
expectedHostDir: false,
},
"new client - extra volume params retained": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath, versioned.FSTypeGitRepo},
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeGitRepo},
expectedHostDir: false,
},
"new client - empty volume slice, host dir true": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{},
AllowHostDirVolumePlugin: true,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeHostPath},
expectedHostDir: true,
},
"new client - empty volume slice, host dir false": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{},
AllowHostDirVolumePlugin: false,
},
expectedVolumes: []versioned.FSType{},
expectedHostDir: false,
},
}
for k, v := range tests {
output := roundTrip(t, runtime.Object(v.scc))
scc := output.(*versioned.SecurityContextConstraints)
if !reflect.DeepEqual(scc.Volumes, v.expectedVolumes) {
t.Errorf("%s has invalid volumes. Expected: %v got: %v", k, v.expectedVolumes, scc.Volumes)
}
if scc.AllowHostDirVolumePlugin != v.expectedHostDir {
t.Errorf("%s has invalid host dir. Expected: %v got: %v", k, v.expectedHostDir, scc.AllowHostDirVolumePlugin)
}
}
}<|fim▁end|>
|
// verify that if a min is provided, it will be the default if no limit is specified
requestMinValue := defaultRequest[v1.ResourceMemory]
if requestMinValue.String() != "100Mi" {
|
<|file_name|>gatewaydef.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from dace.processinstance.gateway import (
ExclusiveGateway, ParallelGateway, InclusiveGateway)
from .core import FlowNodeDefinition, Path
class GatewayDefinition(FlowNodeDefinition):
factory = NotImplemented
class ExclusiveGatewayDefinition(GatewayDefinition):
factory = ExclusiveGateway
def find_startable_paths(self, source_path, source):
for transition in self.outgoing:
if transition.condition(None):
nodedef = self.process[transition.target_id]
initial_path = source_path.clone()
source_transaction = source_path.transaction.__parent__
source_transaction.remove_subtransaction(
source_path.transaction)
source_transaction.start_subtransaction(type='Find',
path=initial_path,
initiator=self)
initial_path.add_transition(transition)
startable_paths = nodedef.find_startable_paths(
initial_path, self)
for startable_path in startable_paths:
yield startable_path
class ParallelGatewayDefinition(GatewayDefinition):
factory = ParallelGateway
def find_startable_paths(self, source_path, source):
global_transaction = source_path.transaction.get_global_transaction()
paths = global_transaction.find_allsubpaths_for(self, 'Find')
test_path = Path()
for path in paths:
test_path.add_transition(path.transitions)
multiple_target = test_path.get_multiple_target
if multiple_target:
for node in multiple_target:
if isinstance(self.process[node.__name__], ExclusiveGatewayDefinition):
return
alllatest_transitions = []
for path in paths:
alllatest_transitions.extend(path.latest)
validated_nodes = set(t.source_id for t in alllatest_transitions)
validated = True
incoming_nodes = (t.source_id for t in self.incoming)
for node in incoming_nodes:
if not node in validated_nodes:
validated = False
break
if validated:
for transition in self.outgoing:
if transition.condition(None):
nodedef = self.process[transition.target_id]
for path in paths:
initial_path = path.clone()
source_transaction = path.transaction.__parent__
source_transaction.remove_subtransaction(
path.transaction)
source_transaction.start_subtransaction(type='Find',
path=initial_path, <|fim▁hole|> for startable_path in startable_paths:
yield startable_path
class InclusiveGatewayDefinition(GatewayDefinition):
factory = InclusiveGateway<|fim▁end|>
|
initiator=self)
initial_path.add_transition(transition)
startable_paths = nodedef.find_startable_paths(
initial_path, self)
|
<|file_name|>publisher.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from werkzeug.exceptions import ServiceUnavailable, NotFound
from r5d4.flask_redis import get_conf_db
def publish_transaction(channel, tr_type, payload):
conf_db = get_conf_db()
if tr_type not in ["insert", "delete"]:
raise ValueError("Unknown transaction type", tr_type)
subscribed = conf_db.scard("Subscriptions:%s:ActiveAnalytics" % channel)
if subscribed == 0:
raise NotFound(("Channel not found",
"Channel '%(channel)s' is not found or has 0 "
"subscriptions" % locals()))
listened = conf_db.publish(
channel,
'{'
' "tr_type" : "' + tr_type + '", '
' "payload" : ' + payload +
'}'
)<|fim▁hole|> listened,
subscribed
)
))<|fim▁end|>
|
if listened != subscribed:
raise ServiceUnavailable((
"Subscription-Listened mismatch",
"Listened count = %d doesn't match Subscribed count = %d" % (
|
<|file_name|>CMDParser.cpp<|end_file_name|><|fim▁begin|>/********************************************************************************
-- Halo Dev Controls
Copyright © 2011 Jesus7Freak
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*********************************************************************************
File: CMDParser.cpp
Project: HDC
Author: Jesus7Freak
Date: 11/22/2011
Game: Halo and Halo Custom Edition
Version: all
*********************************************************************************/
#include "dllmain.h"
wchar_t *FailLocalCmd = L"Failed: Local Command Only.",
*FailSvCmd = L"Failed: Server Command Only.",
*FailInvalNum = L"Failed: Invalid number(s).",
*FailInvalBool = L"Failed: Invalid boolean.",
*FailBadSpawn = L"Failed: Unable to create Object(s) next to %s",
//*FailBadExec = L"Failed: Execution failed, is dev enabled?.",
*FailPlyrNtFnd = L"Failed: Player(s) not found.",
*FailBadTeleLoc = L"Failed: Teleport location is not defined.",
*FailMissingLoc = L"Failed: Missing teleport location name.",
*FailNoSpaces = L"Failed: No spaces allowed in teleport location name.",
*FailLowAdminLvl = L"Failed: %s has a higher admin level.",
*FailPlyrNtSpwn = L"Failed: %s hasn't respawned.",
*FailPlyrNtInVeh = L"Failed: %s is not in a vehicle.",
*FailPlyrNoWep = L"Failed: %s does not have a weapon.";
//kill messages
wchar_t *SuccededKillMsgs[8] =
{
L"%s was given lethal injection",
L"%s was sent to a gas chamber",
L"%s was killed by firing squad",
L"%s was killed by hanging",
L"%s was killed by The Guardians",
L"THIS IS SPARTA!!!!!!!!! (%s gets kicked into a bottomless pit)",
L"%s was killed by a vehicle",
L"GIVE UP GIVE UP AND FEED THE MACHINE!!!!!!! (%s was fed to the machine)"
};
typedef BOOL (__fastcall *CmdFunc)(wchar_t *cmd_args, short exec_player_index);
CmdFunc HaloCmdFuncs[HALO_CMDS_SIZE] =
{
Halo::CommandHelp,
Halo::ListCommands,
Halo::ListTeleportLocs,
Halo::EnableConsole,
Halo::EnableDevMode,
Halo::CheatsDeathless,
Halo::CheatsInfiniteAmmo,
Halo::CheatsBottomlessClip,
Halo::ShowHudFunc,
Halo::LetterBoxFunc,
Halo::RiderEjectionFunc,
Halo::CheatsOmnipotent,
Halo::CheatsJetPack,
Halo::CheatsBumpPossession,
Halo::CheatsSuperJump,
Halo::CheatsMedusa,
Halo::CheatsReflexiveDamage,
Halo::CheatsXboxController,
Halo::ShowWireFrame,
Halo::ShowFog,
Halo::ShowFogPlane,
Halo::ShowFPS,
Halo::Game_Speed,
Halo::Rapid_Fire,
Halo::Time_Freeze,
Halo::Grav_Boots,
Halo::Vehicle_NTR,
Halo::Marines_HUD
};
CmdFunc RpgCmdFuncs[RPGB_CMDS_SIZE] =
{
RPG::Environment_Day,
RPG::Environment_Rain,
RPG::Environment_Night,
RPG::AirBase_Alarm,
RPG::AirBase_LockDown,
RPG::Fire_Halo,
RPG::LockDown_Timer,
RPG::Halo_Timer
};
CmdFunc PlayerCmdFuncs[PLAYER_CMDS_SIZE] =
{
Player::Speed,
Player::ActiveCamo,
Player::Suspend,
Player::Teleport,
Player::Jump_Teleport,
Player::Velocity,
Player::Ammo,
Player::Battery,
Player::Health,
Player::Shield,
Player::AFK,
Player::Team_Change,
Player::Kick,
Player::Ban,
Player::Kill,
Player::Eject,
Player::Flip_Vehicle,
Player::Admin,
Player::Set_Teleport_Loc,
Player::Spawn_Biped,
Player::Spawn_Hog,
Player::Spawn_All_Vehicles,
Player::Spawn_All_Weapons,
Player::Spawn_All_Powerups,
Player::Copy_Vehicle,
Player::Copy_Weapon,
Player::Destroy_Objects_Mode,
Player::Destroy_Weapon,
Player::Say,
Player::ObjectScale
};
CmdFunc *AllCmdFuncs[CMD_SET_SIZE] =
{
PlayerCmdFuncs,
HaloCmdFuncs,
RpgCmdFuncs
};
int ParseCMDStrPlayers(wchar_t *cmd_str, short* player_index_array, int &pi_found)
{
int max_players_to_find, wchars_processed = 0;
HaloCE_lib::DATA_HEADER *Players = *Players_ptr;
int NumOfPlayers = Players->NumOfItems;
if (pi_found)
{
max_players_to_find = pi_found;
pi_found = 0;
}
else
max_players_to_find = NumOfPlayers;
if (*cmd_str == L'\"')
{
cmd_str++;
wchars_processed += 2;
}
for (int pi = 0, vpi = 0; pi_found < max_players_to_find && vpi < NumOfPlayers; pi++)
{
if (!players[pi].PlayerID) continue;
else vpi++;
int j = 0;
wchar_t *player_name = players[pi].PlayerName0;
bool str_contain_search = false;
for (int i = 0; i < HaloCE_lib::PlayerNameMaxSize; i++)
{
wchar_t cmd_str_wchar = cmd_str[j];
if (cmd_str_wchar == L'*')
{
wchar_t next_wchar = cmd_str[j + 1];
if (!next_wchar || next_wchar == L' ' || next_wchar == L'\"')
{
wchars_processed = j;
player_index_array[pi_found++] = pi;
break;
}
else
{
cmd_str_wchar = cmd_str[++j];
str_contain_search = true;
}
}
if (cmd_str_wchar == L'?') continue;
if (cmd_str_wchar != player_name[i])
{
if (!str_contain_search) break;
else continue;
}
if (!player_name[i + 1])
{
wchars_processed = j;
player_index_array[pi_found++] = pi;
break;
}
j++;
}
}
if (pi_found > 0) wchars_processed += 1;
else wchars_processed = 0;
return wchars_processed;
}
//functions calling this need to test if first CMD_CALL_INFO::cmd[0] == '/'
DWORD __fastcall CMDParser(wchar_t *cmd_str, short exec_player_index)
{
if (!TempAdmin[exec_player_index]) return FALSE;
int cmd_name_length = 0;
while (cmd_str[cmd_name_length] && cmd_str[cmd_name_length++] != L' ');
wchar_t *add_space = &cmd_str[cmd_name_length];
if (!add_space[0])
{
add_space[1] = 0;
add_space[0] = ' ';
cmd_name_length++;
}
bool found = false;
BOOL succeded = FALSE;
for (int cmd_group_i = 0; !found && cmd_group_i < CMD_SET_SIZE; cmd_group_i++)
{
CMDsLib::COMMANDS *cmd_group = CMDsLib::all_commands[cmd_group_i];
char **cmd_strs = cmd_group->cmd_strs;
int group_size = cmd_group->size;
CmdFunc *CmdGroupFuncs = AllCmdFuncs[cmd_group_i];
for (int i = 0; i < group_size; i++)
{
//skip the / as it already has been check by the hook
if (str_cmpAW(&cmd_strs[i][1], cmd_str, cmd_name_length))
{
found = true;
succeded = (*CmdGroupFuncs[i])(&cmd_str[cmd_name_length], exec_player_index);
break;
}
}
}
return succeded;
}
BOOL __fastcall Halo::CommandHelp(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
//uses the space like a null terminator
short cmd_arg_length = 0; while (cmd_args[cmd_arg_length]) cmd_arg_length++;
cmd_args[cmd_arg_length + 1] = 0;
cmd_args[cmd_arg_length] = ' ';
wchar_t *chat_header = NULL, *chat_usage = NULL, *chat_descript = NULL;
for (int cmd_group_i = 0; !chat_header && cmd_group_i < 3; cmd_group_i++)
{
CMDsLib::COMMANDS *cmd_group = CMDsLib::all_commands[cmd_group_i];
char **cmd_strs = cmd_group->cmd_strs;
int group_size = cmd_group->size;
for (int i = 0; !chat_header && i < group_size; i++)
{
if (str_cmpAW(cmd_strs[i], cmd_args, cmd_arg_length))
{
CMDsLib::CMD_DESCRIPT *pCDS = cmd_group->cmd_descripts;
chat_header = pCDS[i].cmd_header;
chat_usage = pCDS[i].cmd_usage;
chat_descript = pCDS[i].cmd_descript;
break;
}
}
}
if (!chat_header && str_cmpAW("[pExpression] ", cmd_args, cmd_arg_length))
{
HaloSay(L"example use of [pExpression]: Shadow, AoO Aurora, N®Þ»Jedi", exec_player_index);
HaloSay(L"/spd Shadow 4 - normal use", exec_player_index);
HaloSay(L"/spd \"AoO Aurora\" 4 - use quotes when the name has spaces", exec_player_index);
HaloSay(L"/spd ????Jedi 4 - use ? when you don't know the character", exec_player_index);
chat_header = L"/spd *Jedi 4 - all players with Jedi at the end of their name";
chat_usage = L"/spd AoO* 4 - all players with AoO at the beggining of their name";
chat_descript = L"/spd * 4 - all players";
}
//defualt to help desciption
else if (!chat_header)
{
CMDsLib::CMD_DESCRIPT *pCDS = CMDsLib::halo_cmd_descripts;
chat_header = pCDS[0].cmd_header;
chat_usage = pCDS[0].cmd_usage;
}
if (chat_header) succeded = TRUE;
else chat_header = L"Failed: command not found.";
HaloSay(chat_header, exec_player_index);
if (chat_usage) HaloSay(chat_usage, exec_player_index);
if (chat_descript) HaloSay(chat_descript, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::ListCommands(wchar_t *cmd_args, short exec_player_index)
{
int list_i = 0;
wchar_t list_str[128];
list_str[list_i++] = ' ';
for (int cmd_group_i = 0; cmd_group_i < 3; cmd_group_i++)
{
CMDsLib::COMMANDS *cmd_group = CMDsLib::all_commands[cmd_group_i];
char **cmd_strs = cmd_group->cmd_strs;
int group_size = cmd_group->size;
for (int i = 0; i < group_size; i++)
{
char *cmd_str = cmd_strs[i];
int j = 0;
char _char = ' ';
do
{
list_str[list_i++] = (wchar_t)_char;
_char = cmd_str[j++];
}while (_char);
if (list_i > 112)
{
list_str[list_i] = 0;//add null terminator
HaloSay(list_str, exec_player_index);
list_i = 1;
}
}
//print the last line of the group
if (list_i > 1)
{
list_str[list_i] = 0;//add null terminator
HaloSay(list_str, exec_player_index);
list_i = 1;
}
}
return TRUE;
}
BOOL __fastcall Halo::ListTeleportLocs(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
int list_i = 0;
//seems like a good idea
wchar_t list_str[128];
list_str[list_i++] = ' ';
unsigned int map_i = 0;
if (FindMapIndex(&maps_tele_sites, Current_Map_Str, map_i))
{
int loc_num = maps_tele_sites[map_i].teleport_locations.size();
TELEPORT_LOCATION *locs = &maps_tele_sites[map_i].teleport_locations[0];
for (int loc_i = 0; loc_i < loc_num; loc_i++)
{
wchar_t *loc_str = locs[loc_i].teleport_loc_name;
int j = 0;
wchar_t _wchar = L' ';
do
{
list_str[list_i++] = _wchar;
_wchar = loc_str[j++];
}while (_wchar);
//extra padding
list_str[list_i++] = L' ';
if (list_i > 112)
{
list_str[list_i] = 0;//add null terminator
HaloSay(list_str, exec_player_index);
list_i = 1;
}
}
//print the last line of the group
if (list_i > 1)
{
list_str[list_i] = 0;//add null terminator
HaloSay(list_str, exec_player_index);
}
succeded = TRUE;
}
else
HaloSay(
L"Failed: Their are no teleport locations defined, for this map.",
exec_player_index);//, Current_Map_Str); is not a wchar_t*
return succeded;
}
BOOL __fastcall Halo::EnableConsole(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
*Console_enabled = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::EnableDevMode(wchar_t *cmd_args, short exec_player_index)
{
if (running_gt != haloce)
{
HaloSay(L"Failed: Halo Custom Edition only command", exec_player_index);
return FALSE;
}
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
*Dev_enabled = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::CheatsDeathless(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Deathless;
GenericMsg = L"Deathless is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Deathless = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Deathless has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsInfiniteAmmo(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Infinite_Ammo;
GenericMsg = L"Infinite Ammo is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Infinite_Ammo = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Infinite Ammo has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsBottomlessClip(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Bottomless_Clip;
GenericMsg = L"Bottomless Clip is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Bottomless_Clip = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Bottomless Clip has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::ShowHudFunc(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
*ShowHud = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::LetterBoxFunc(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
*LetterBox = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::RiderEjectionFunc(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = *RiderEjection;
GenericMsg = L"Rider Ejection is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
*RiderEjection = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Rider Ejection has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsOmnipotent(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Omnipotent;
GenericMsg = L"Omnipotent is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Omnipotent = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Omnipotent has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsJetPack(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = !cheats->JetPack;
GenericMsg = L"Fall Damage is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->JetPack = !(BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Fall Damage has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsBumpPossession(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Bmp_Possession;
GenericMsg = L"Bump Possession is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Bmp_Possession = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Bump Possession has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsSuperJump(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Super_jump;
GenericMsg = L"Super Jump is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Super_jump = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Super Jump has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsReflexiveDamage(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Reflexive_damage = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::CheatsMedusa(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = cheats->Medusa;
GenericMsg = L"Medusa is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Medusa = (BYTE)_bool;
succeded = TRUE;
GenericMsg = L"Medusa has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::CheatsXboxController(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
cheats->Controller = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::ShowWireFrame(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
rasterizer->WireFrame = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::ShowFog(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
rasterizer->FogAtmosphere = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::ShowFogPlane(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
rasterizer->FogPlane = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::ShowFPS(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
BOOL _bool;
if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
rasterizer->FPS = (BYTE)_bool;
succeded = TRUE;
}
else
HaloSay(FailInvalBool, exec_player_index);
return succeded;
}
BOOL __fastcall Halo::Game_Speed(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
float fnumber;
if (!*cmd_args)
{
fnumber = *game_speed;
GenericMsg = L"Game Speed is set at %.2f";
}
else if (CMDsLib::ParseStrFloat(cmd_args, &fnumber))
{
*game_speed = fnumber;
succeded = TRUE;
GenericMsg = L"Game Speed has been set to %.2f";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, fnumber);
return succeded;
}
BOOL __fastcall Halo::Rapid_Fire(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = ckbx_rapid_fire_CheckedChanged(-1);
GenericMsg = L"Rapid Fire is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
ckbx_rapid_fire_CheckedChanged((BYTE)_bool);
succeded = TRUE;
GenericMsg = L"Rapid Fire has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::Time_Freeze(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = ckbx_time_freeze_CheckedChanged(-1);
GenericMsg = L"Time Freeze is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
ckbx_time_freeze_CheckedChanged((BYTE)_bool);
succeded = TRUE;
GenericMsg = L"Time Freeze has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::Grav_Boots(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = ckbx_grav_boots_CheckedChanged(-1);
GenericMsg = L"Grav Boots is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
ckbx_grav_boots_CheckedChanged((BYTE)_bool);
succeded = TRUE;
GenericMsg = L"Grav Boots has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
BOOL __fastcall Halo::Vehicle_NTR(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL _bool;
if (!*cmd_args)
{
_bool = ckbx_vehicle_ntr_CheckedChanged(-1);
GenericMsg = L"Vehicle Team Restriction is set at %i";
}
else if (CMDsLib::ParseStrBool(cmd_args, &_bool))
{
ckbx_vehicle_ntr_CheckedChanged((BYTE)_bool);
succeded = TRUE;
GenericMsg = L"Vehicle Team Restriction has been set to %i";
}
else
GenericMsg = FailInvalBool;
HaloSay(GenericMsg, exec_player_index, _bool);
return succeded;
}
/*BOOL __stdcall Halo::Execute_Console_Func(CMD_CALL_INFO *pCCI)
{
BOOL succeded = FALSE;
char *console_cmd = &Chat_Buffer_64A[cmd_info_array_index][pCCI->cmd_name_length];
//for names with quotes
if (*console_cmd == '\"')
{
int last_char_i = pCCI->cmd_length - pCCI->cmd_name_length - 1;
if (console_cmd[last_char_i] == '\"')
{
console_cmd[last_char_i] = 0;
console_cmd++;
}
}
//this would be bad if this was allowed
if (!str_cmpA(console_cmd, "quit"))
{
//if (!Console(cmd))
// HaloSay(FailBadExec);
__asm
{
PUSH 0
//compiler puts this on the stack, not a register...
MOV EDI,[console_cmd];
CALL DWORD PTR [Console_func_address]
ADD ESP,4
TEST AL,AL
JNE SHORT console_succeded
}
HaloSay(FailBadExec, exec_player_index);
__asm
{
console_succeded:
MOV succeded,TRUE
}
}
return succeded;
}*/
BOOL __fastcall Halo::Marines_HUD(wchar_t *cmd_args, short exec_player_index)
{
//host only cmd
if (exec_player_index)
{
HaloSay(FailLocalCmd, exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
int number = 0;
if (CMDsLib::ParseStrInt(cmd_args, &number))
{
if (MV_chkBx_CheckedChanged(number))
succeded = TRUE;
}
else
HaloSay(FailInvalNum, exec_player_index);
return succeded;
}
BOOL __fastcall RPG::Environment_Day(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
short *setting = (short*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::setting_offset);
if (*setting != 0)
{
*setting = 0;
HaloSay(L"*the sun has come up*", -1);
}
return TRUE;
}
BOOL __fastcall RPG::Environment_Rain(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
short *setting = (short*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::setting_offset);
if (*setting != 1)
{
*setting = 1;
HaloSay(L"*a dense fog covers the entire area as it starts to rain*", -1);
}
return TRUE;
}
BOOL __fastcall RPG::Environment_Night(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
short *setting = (short*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::setting_offset);
if (*setting != 2)
{
*setting = 2;
HaloSay(L"*the sun has gone down*", -1);
}
return TRUE;
}
BOOL __fastcall RPG::AirBase_Alarm(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
wchar_t *GenericMsg;
bool *alarmed = (bool*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::alarmed_offset);
bool *alarm_control_2 = (bool*)((*Device_Groups_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_device_groups::alarm_control_2_offset);
int number = 0;
if (CMDsLib::ParseStrBool(cmd_args, &number))
{
if (number == 1)
{
if (!*alarmed) *alarm_control_2 = true;
GenericMsg = L"The Levis Station's alarm has been triggered.";
}
else if (number == 0)
{
if (*alarmed) *alarm_control_2 = true;
GenericMsg = L"The Levis Station's alarm has been switched off.";
}
succeded = TRUE;
}
else
GenericMsg = FailInvalNum;
HaloSay(GenericMsg, -1);
return succeded;
}
BOOL __fastcall RPG::AirBase_LockDown(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
wchar_t *GenericMsg;
bool *locked = (bool*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::locked_offset);
bool *lock_control = (bool*)((*Device_Groups_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_device_groups::lock_control_offset);
if (!*locked)
{
*lock_control = true;
succeded = TRUE;
GenericMsg = L"Levis Station's lockdown procedures have been initiated.";
}
else
GenericMsg = L"Failed: Levis Station has already been locked down.";
HaloSay(GenericMsg, -1);
return succeded;
}
BOOL __fastcall RPG::Fire_Halo(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
wchar_t *GenericMsg;
bool *nuked = (bool*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::nuked_offset);
bool *boom_control = (bool*)((*Device_Groups_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_device_groups::boom_control_offset);
if (!*nuked)
{
*boom_control = true;
succeded = TRUE;
GenericMsg = L"Halo will be fired when someone is in close proximity to the control room.";
}
else
GenericMsg = L"Failed: Halo is not ready to fire.";
HaloSay(GenericMsg, -1);
return succeded;
}
BOOL __fastcall RPG::LockDown_Timer(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
short *lock_timer = (short*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::lock_timer_offset);
int number;
//displayer info
if (!*cmd_args)
{
succeded = TRUE;
GenericMsg = L"lock_timer var is set at %i seconds.";
}
else if (CMDsLib::ParseStrInt(cmd_args, &number))
{
*lock_timer = (short)(number * 30);
succeded = TRUE;
GenericMsg = L"lock_timer var has been set to %i seconds.";
}
else
GenericMsg = FailInvalNum;
HaloSay(GenericMsg, exec_player_index, *lock_timer / 30);
return succeded;
}
BOOL __fastcall RPG::Halo_Timer(wchar_t *cmd_args, short exec_player_index)
{
if (!rpgb6_2_running)
{
HaloSay(L"Failed: rpg_beta6_2 only command", exec_player_index);
return FALSE;
}
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
short *boom_timer = (short*)((*HS_Globals_ptr)->FirstItem
+ HCE_Lib::rpg_beta6_2_hs_global::boom_timer_offset);
int number;
//displayer info
if (!*cmd_args)
{
succeded = TRUE;
GenericMsg = L"boom_timer var is set at %i seconds.";
}
else if (CMDsLib::ParseStrInt(cmd_args, &number))
{
*boom_timer = (short)(number * 30);
succeded = TRUE;
GenericMsg = L"boom_timer var has been set to %i seconds.";
}
else
GenericMsg = FailInvalNum;
HaloSay(GenericMsg, exec_player_index, *boom_timer / 30);
return succeded;
}
inline DWORD GetObj(short obj_index)
{
DWORD obj_address = NULL;
if(obj_index != -1)//valid index?
if (objects[obj_index].ObjectID)//valid ID?
obj_address = objects[obj_index].Object_ptr;
return obj_address;
}
inline HaloCE_lib::SPARTAN* GetPlayerObj(short player_index)
{
return (HaloCE_lib::SPARTAN*)GetObj(players[player_index].PlayerObjTag.Index);
}
inline HaloCE_lib::VEHICLE_OBJECT* GetPlayerVehObj(HaloCE_lib::SPARTAN* player_obj)
{
HaloCE_lib::VEHICLE_OBJECT *veh_obj_address = NULL;
if (player_obj->VehicleTag.Index != -1)
veh_obj_address = (HaloCE_lib::VEHICLE_OBJECT*)GetObj(player_obj->VehicleTag.Index);
return veh_obj_address;
}
inline bool KillPlayer(short player_index)
{
bool succeded = false;
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(player_index);
if(player_object)
{
player_object->KillPlayer = 0x20;
succeded = true;
}
return succeded;
}
//HaloCE_lib::OBJECT_TAG CreateObject(HaloCE_lib::OBJECT_TAG ObjTypeTag, float coordinates[3])
void __declspec(naked) CreateObject()
{
//HaloCE_lib::OBJECT_TAG NewObjTag;
__asm
{
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
PUSH 0x3F800000
//spin vector
PUSH 0//0x3F800000
PUSH 0
PUSH 0
//m_Scale
PUSH 0x3F800000//0xBF800000
PUSH 0x80000000
PUSH 0x80000000
//m_LowerRot
PUSH 0
PUSH 0//0xBEEDC4BB
PUSH 0x3F800000//0x3F62B8A6
//m_Velocity?
PUSH 0
PUSH 0
PUSH 0
PUSH 0
//coordinates
PUSH DWORD PTR [ECX+8]//0x42DD6B85//0x42DF3BE8
PUSH DWORD PTR [ECX+4]//0x441BDE9D//0x441D0C44
PUSH DWORD PTR [ECX]//0xC3933039//0xC38D246E
PUSH 0x0000FFFF
PUSH 0
PUSH 0xFFFFFFFF
PUSH 0xFFFFFFFF
PUSH 0
PUSH EDX ;//ObjTypeTag
MOV EDX,ESP
PUSH ECX;//saving ECX too
PUSH 0
PUSH EDX
CALL DWORD PTR [CreateObj_func_address]
ADD ESP,8
POP ECX
POP EDX ;//restore EDX
ADD ESP,84h
RETN
}
//return NewObjTag;
}
BOOL __fastcall Player::Speed(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
float fnumber = 0;
//displayer info
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
HaloCE_lib::STATIC_PLAYER *pSP = &players[Player_Indexes[i]];
HaloSay(
L"%s's speed modifier is %.2f",
exec_player_index,
pSP->PlayerName0,
pSP->SpeedModifier);
}
succeded = TRUE;
}
else if (CMDsLib::ParseStrFloat(++cmd_args, &fnumber))
{
for (int i = 0; i < pi_found; i++)
{
HaloCE_lib::STATIC_PLAYER *pSP = &players[Player_Indexes[i]];
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]),
*vehicle_object;
if (player_object &&
(vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object)))
{
//vehicle_object;
__asm
{
MOV EDX,vehicle_object
MOV EDX,DWORD PTR [EDX]
AND EDX,0xFFFF
SHL EDX,5
MOV ECX,0x00816DE4;
MOV ECX,DWORD PTR [ECX]
MOV EAX,DWORD PTR [EDX+ECX+14h]
MOV EDX,fnumber
MOV DWORD PTR [EAX+2F8h],EDX
}
SpecificMsg = L"%s's vehicle's speed modifier has been set to %.2f";
}
else
{
pSP->SpeedModifier = fnumber;
SpecificMsg = L"%s's speed modifier has been set to %.2f";
}
succeded = TRUE;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
pSP->PlayerName0,
fnumber);
}
}
}
else GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::ActiveCamo(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
int seconds;
if (CMDsLib::ParseStrInt((++cmd_args += arg_len), &seconds))
{
short durration = seconds * 30; //halo time units = 30 * seconds
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//player_object->IsInvisible = buffer_num;
DWORD playertag = players[Player_Indexes[i]].PlayerID;
playertag <<= 16;
playertag |= Player_Indexes[i];
__asm
{
MOVSX EDX,durration
PUSH EDX
PUSH 0
MOV EBX,playertag
CALL DWORD PTR [ActiveCamo_func_address]
ADD ESP,8
MOV succeded,TRUE
}
SpecificMsg = L"%s has been given active camouflage for %i seconds";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
durration / 30);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg) HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Suspend(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
BOOL suspend;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
if (CMDsLib::ParseStrBool((++cmd_args += arg_len), &suspend))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
//if (vehicle_object) player_object = vehicle_object;
player_object->IsSuspended = (BYTE)suspend;
succeded = TRUE;
if (suspend) SpecificMsg = L"%s is now suspended.";
else SpecificMsg = L"%s is now unsuspended.";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Teleport(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL, *teleport_to;
short Player_Indexes[16];
int pi_found = 0;
float coordinates[3];
int teleport_type = 0;
int arg_len;
if (str_cmpW(cmd_args, L"remove ", 7))
{
cmd_args += 7;
if (*cmd_args)
{
unsigned int map_i = 0, tele_loc_i = 0;
if (FindMapIndex(&maps_tele_sites, Current_Map_Str, map_i) &&
FindTeleLocNameIndex(&maps_tele_sites[map_i].teleport_locations, cmd_args, tele_loc_i))
{
std::vector<TELEPORT_LOCATION> *tl = &maps_tele_sites[map_i].teleport_locations;
tl->erase(tl->begin() + tele_loc_i);
//delete map if their are no more locs
if (!tl->size())
maps_tele_sites.erase(maps_tele_sites.begin() + map_i);
WriteLocationsToFile(LocationsFilePath, &maps_tele_sites);
SpecificMsg = L"\"%s\" has been removed.";
}
else
SpecificMsg = FailBadTeleLoc;
}
else
SpecificMsg = FailMissingLoc;
HaloSay(
SpecificMsg,
exec_player_index,
cmd_args);
}
else if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
int arg_count = CMDsLib::GetCMDArgCount(cmd_args);
if (arg_count == 3)
{
//use x y z coordinates
int arg_len;
if ((arg_len = CMDsLib::ParseStrFloat(++cmd_args, &coordinates[0])) &&
(arg_len = CMDsLib::ParseStrFloat((++cmd_args += arg_len), &coordinates[1])) &&
CMDsLib::ParseStrFloat((++cmd_args += arg_len), &coordinates[2]))
{
teleport_type = 1;
}
else
GenericMsg = FailInvalNum;
}
else if (arg_count == 1)
{
int pi2_to_find = 1; short player2_index;
if (ParseCMDStrPlayers(++cmd_args, &player2_index, pi2_to_find))
{
HaloCE_lib::SPARTAN *player2_object = GetPlayerObj(player2_index);
if (player2_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player2_object);
if (vehicle_object) player2_object = vehicle_object;
for (int i = 0; i < 3; i++)
coordinates[i] = player2_object->m_World[i];
teleport_to = players[player2_index].PlayerName0;
teleport_type = 2;
}
}
if (!teleport_type)
{
unsigned int map_i = 0, tele_loc_i = 0;
if (FindMapIndex(&maps_tele_sites, Current_Map_Str, map_i) &&
FindTeleLocNameIndex(&maps_tele_sites[map_i].teleport_locations, cmd_args, tele_loc_i))
{
TELEPORT_LOCATION *pTL = &maps_tele_sites[map_i].teleport_locations[tele_loc_i];
for (int i = 0; i < 3; i++)
coordinates[i] = pTL->coordinates[i];
teleport_to = pTL->teleport_loc_name;
teleport_type = 3;
}
else
GenericMsg = FailBadTeleLoc;
}
}
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg) HaloSay(GenericMsg, exec_player_index);
if (teleport_type)
{
for (int i = 0; i < pi_found; i++)
{
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
player_object->m_World[0] = coordinates[0];
player_object->m_World[1] = coordinates[1];
player_object->m_World[2] = coordinates[2] + 1 * i;
succeded = TRUE;
switch (teleport_type)
{
case 1:
HaloSay(
L"%s has been teleported to %.2fx %.2fy %.2fz",
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
coordinates[0],
coordinates[1],
coordinates[2]);
break;
case 2:
SpecificMsg = L"%s has been teleported to %s";
player_object->m_World[2] += 1;//does float support ++?
break;
case 3:
SpecificMsg = L"%s has been teleported to \"%s\"";
break;
}
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
teleport_to);
}
}
}
return succeded;
}
BOOL __fastcall Player::Jump_Teleport(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
float coordinates[3];
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
for (int i = 0; i < 3; i++)
coordinates[i] = player_object->m_World[i];
succeded = TRUE;
SpecificMsg = L"%s's current coordinates is %.2fx %.2fy %.2fz";
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
coordinates[0],
coordinates[1],
coordinates[2]);
}
}
}
else if ((arg_len = CMDsLib::ParseStrFloat(++cmd_args, &coordinates[0])) &&
(arg_len = CMDsLib::ParseStrFloat((++cmd_args += arg_len), &coordinates[1])) &&
CMDsLib::ParseStrFloat((++cmd_args += arg_len), &coordinates[2]))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
for (int i = 0; i < 3; i++)
player_object->m_World[i] += coordinates[i];
succeded = TRUE;
SpecificMsg = L"%s's coordinates has been adjusted by %.2fx %.2fy %.2fz";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
coordinates[0],
coordinates[1],
coordinates[2]);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg) HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Velocity(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
float vectors[3];
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
for (int i = 0; i < 3; i++)
vectors[i] = player_object->m_Velocity[i];
succeded = TRUE;
SpecificMsg = L"%s's current vector is %.2fx %.2fy %.2fz";
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
vectors[0],
vectors[1],
vectors[2]);
}
}
}
else if ((arg_len = CMDsLib::ParseStrFloat(++cmd_args, &vectors[0])) &&
(arg_len = CMDsLib::ParseStrFloat((++cmd_args += arg_len), &vectors[1])) &&
CMDsLib::ParseStrFloat((++cmd_args += arg_len), &vectors[2]))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
for (int i = 0; i < 3; i++)
player_object->m_Velocity[i] = vectors[i];
succeded = TRUE;
SpecificMsg = L"%s's vector has been changed to %.2fx %.2fy %.2fz";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else<|fim▁hole|> if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
vectors[0],
vectors[1],
vectors[2]);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Ammo(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
int number;
//displayer info
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
HaloCE_lib::WEAPON_OBJECT *weapon_object = (HaloCE_lib::WEAPON_OBJECT*)GetObj(player_object->WeaponTag.Index);
if (weapon_object)
{
number = weapon_object->rounds_total;
succeded = TRUE;
SpecificMsg = L"%s's weapon's ammo is at %i";
}
else
SpecificMsg = FailPlyrNoWep;
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
number);
}
}
}
else if (CMDsLib::ParseStrInt(++cmd_args, &number))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
HaloCE_lib::WEAPON_OBJECT *weapon_object = (HaloCE_lib::WEAPON_OBJECT*)GetObj(player_object->WeaponTag.Index);
if (weapon_object)
{
weapon_object->rounds_total = (short)number;
succeded = TRUE;
SpecificMsg = L"%s's weapon's ammo has been changed to %i";
}
else
SpecificMsg = FailPlyrNoWep;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
number);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Battery(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
float fnumber;
//displayer info
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
HaloCE_lib::WEAPON_OBJECT *weapon_object = (HaloCE_lib::WEAPON_OBJECT*)GetObj(player_object->WeaponTag.Index);
if (weapon_object)
{
fnumber = 100.0f - (weapon_object->battery_used * 100.0f);
succeded = TRUE;
SpecificMsg = L"%s's weapon's battery is at %.2f%%";
}
else
SpecificMsg = FailPlyrNoWep;
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
fnumber);
}
}
}
else if (CMDsLib::ParseStrFloat(++cmd_args, &fnumber))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
HaloCE_lib::WEAPON_OBJECT *weapon_object = (HaloCE_lib::WEAPON_OBJECT*)GetObj(player_object->WeaponTag.Index);
if (weapon_object)
{
weapon_object->battery_used = (100.0f - fnumber)/ 100.0f;
succeded = TRUE;
SpecificMsg = L"%s's weapon's battery has been changed to %.2f%%";
}
else
SpecificMsg = FailPlyrNoWep;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
fnumber);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Health(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
float fnumber;
//displayer info
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
fnumber = player_object->Health * 100.0f;
succeded = TRUE;
SpecificMsg = L"%s's health is at %.2f%%";
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
fnumber);
}
}
}
else if (CMDsLib::ParseStrFloat(++cmd_args, &fnumber))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
player_object->Health = fnumber / 100.0f;
succeded = TRUE;
SpecificMsg = L"%s's health has been set to %.2f%%";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
fnumber);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Shield(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
float fnumber;
//displayer info
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
fnumber = player_object->Shield_00 * 100.0f;
succeded = TRUE;
SpecificMsg = L"%s's shield is at %.2f%%";
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
fnumber);
}
}
}
else if (CMDsLib::ParseStrFloat(++cmd_args, &fnumber))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN* player_object = GetPlayerObj(Player_Indexes[i]);
if(player_object)
{
player_object->Shield_00 = fnumber / 100.0f;
succeded = TRUE;
SpecificMsg = L"%s's shield has been set to %.2f%%";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
fnumber);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::AFK(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::OBJECT_TAG ObjTag = players[Player_Indexes[i]].PlayerObjTag;
if (ObjTag.Tag != -1)
{
DWORD playertag = players[Player_Indexes[i]].PlayerID;
playertag <<= 16;
playertag |= Player_Indexes[i];
__asm
{
PUSH 0x7FFFFFF ;//respawn time after death
MOV EBX,playertag
CALL DWORD PTR [PlayerDeath_func_address]
ADD ESP,4
}
SpecificMsg = L"%s is now afk.";
}
else
{
players[Player_Indexes[i]].RespawnTimer = 30;//1 sec
SpecificMsg = L"%s is no longer afk.";
}
succeded = TRUE;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
-1,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Team_Change(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
DWORD Team = 0;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL, *team_str;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
KillPlayer(Player_Indexes[i]);
Team = players[Player_Indexes[i]].Team;
if (Team)
{
Team = 0;
team_str = L"Red";
}
else
{
Team = 1;
team_str = L"Blue";
}
players[Player_Indexes[i]].Team = Team;
succeded = TRUE;
SpecificMsg = L"%s has been switched to %s team.";
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
-1,
players[Player_Indexes[i]].PlayerName0,
team_str);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Kick(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
if (Player_Indexes[i])
{
char _pi_char[3] = {'\0','\0','\0'};
if (Player_Indexes[i] > 8)
{
_pi_char[0] = '1';
_pi_char[1] = (Player_Indexes[i] - 10) + '1';
}
else
{
_pi_char[0] = Player_Indexes[i] + '1';
_pi_char[1] = '\0';
}
__asm
{
LEA EAX,[_pi_char]
CALL DWORD PTR [sv_kick_func_address]
MOV succeded,TRUE
}
SpecificMsg = L"%s has been kicked.";
}
else
SpecificMsg = L"Failed: Cannot kick host (%s)!";
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
-1,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Ban(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
if (Player_Indexes[i])
{
char *ban_params[2];
char player_index_char[3] = {'\0','\0','\0'};
if (Player_Indexes[i] > 8)
{
player_index_char[0] = '1';
player_index_char[1] = (Player_Indexes[i] - 10) + '1';
}
else
player_index_char[0] = Player_Indexes[i] + '1';
char dhms_chars = '\0';
ban_params[0] = player_index_char;
ban_params[1] = &dhms_chars;
__asm
{
LEA ECX,[ban_params]
MOV EAX,1
CALL DWORD PTR [sv_ban_func_address]
MOV succeded,TRUE
}
SpecificMsg = L"%s has been banned.";
}
else
SpecificMsg = L"Failed: Cannot ban host (%s)!";
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
-1,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Kill(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
if (KillPlayer(Player_Indexes[i]))
{
succeded = TRUE;
//get random kill msg
SYSTEMTIME systime;
GetLocalTime(&systime);
int rand = (systime.wMilliseconds >> 2) + Player_Indexes[i];
rand &= 7;//only 0 to 7 indexs' are valid
SpecificMsg = SuccededKillMsgs[rand];
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
-1,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Eject(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
HaloCE_lib::VEHICLE_OBJECT *vehicle_object = GetPlayerVehObj(player_object);
if (vehicle_object)
{
HaloCE_lib::OBJECT_TAG PlayerObjTag = players[Player_Indexes[i]].PlayerObjTag;
__asm
{
MOV EAX,PlayerObjTag
CALL UnitExitVehicle_func_address
}
succeded = TRUE;
SpecificMsg = L"%s has been ejected from a vehicle.";
}
else
SpecificMsg = FailPlyrNtInVeh;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Flip_Vehicle(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object)
{
player_object = vehicle_object;
//player_object->m_LowerRot[0] = 0;
//player_object->m_LowerRot[1] = 0;
//player_object->m_LowerRot[2] = 1;
//wierd but its really "m_LowerRot"
player_object->m_Scale[0] = 0;
player_object->m_Scale[1] = 0;
float z_axis = player_object->m_Scale[2];
//flip 180
if (z_axis < 0) z_axis = 1;
else z_axis = -1;
player_object->m_Scale[2] = z_axis;
succeded = TRUE;
SpecificMsg = L"%s's vehicle has been fliped 180°.";
}
else
SpecificMsg = FailPlyrNtInVeh;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Admin(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
short Player_Indexes[16];
int buffer_num = 0, pi_found = 0;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
if (!*cmd_args)
{
succeded = TRUE;
for (int i = 0; i < pi_found; i++)
{
HaloSay(
L"%s's current admin level is %u.",
-1,
players[Player_Indexes[i]].PlayerName0,
TempAdmin[Player_Indexes[i]]);
}
}
else if (CMDsLib::ParseStrInt(++cmd_args, &buffer_num))
{
DWORD admin_level = (DWORD)buffer_num;
if (TempAdmin[exec_player_index] >= admin_level)
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
if (Player_Indexes[i])//same as != 0
{
TempAdmin[Player_Indexes[i]] = admin_level;
succeded = TRUE;
SpecificMsg = L"%s has been changed to admin level %u.";
}
else
SpecificMsg = L"Failed: Cannot change host (%s)'s admin level";
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
-1,
players[Player_Indexes[i]].PlayerName0,
admin_level);
}
}
}
else
GenericMsg = L"Failed: You can't promote an admin higher yous.";
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Set_Teleport_Loc(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
TELEPORT_LOCATION tele_site;
wchar_t *GenericMsg = NULL;
short Player_Index;
int pi_found = 1;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, &Player_Index, pi_found)))
{
cmd_args += arg_len;
if (*++cmd_args)
{
//count the number of spaces
bool space_found = false;
wchar_t *cmd_w_ptr = cmd_args;
while (*cmd_w_ptr++)
if (*cmd_w_ptr == ' ') space_found = true;
if (!space_found)
{
int i = 0; wchar_t wchar;
do
{
wchar = cmd_args[i];
tele_site.teleport_loc_name[i] = wchar;
}
while (wchar && i++ < TELE_LOC_NAME_SIZE);
//add null at end if too long
tele_site.teleport_loc_name[TELE_LOC_NAME_SIZE - 1] = '\0';
wchar_t *SpecificMsg = NULL;
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Index);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
for (int i = 0; i < 3; i++)
tele_site.coordinates[i] = player_object->m_World[i];
unsigned int map_i = 0, tele_loc_i = 0;
if (!FindMapIndex(&maps_tele_sites, Current_Map_Str, map_i))
{
MAPS map_loc;
for (int i = 0; i < MAP_STR_SIZE; i++)
map_loc.map_name[i] = Current_Map_Str[i];
maps_tele_sites.push_back(map_loc);
map_i = maps_tele_sites.size() - 1;
}
//if the tele site exists, overwrite it
if (FindTeleLocNameIndex(&maps_tele_sites[map_i].teleport_locations, tele_site.teleport_loc_name, tele_loc_i))
maps_tele_sites[map_i].teleport_locations[tele_loc_i] = tele_site;
else
maps_tele_sites[map_i].teleport_locations.push_back(tele_site);
WriteLocationsToFile(LocationsFilePath, &maps_tele_sites);
succeded = TRUE;
//unfortunatly the location name comes first, but in the error messages, player comes first
HaloSay(
L"New location \"%s\", has been set for %s's current position.",
-1,
cmd_args,
players[Player_Index].PlayerName0);
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Index].PlayerName0);
}
}
else
GenericMsg = FailNoSpaces;
}
else
GenericMsg = FailMissingLoc;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Spawn_Biped(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
if ((cmd_args += ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
int how_many_to_spawn;
if (CMDsLib::ParseStrInt(++cmd_args, &how_many_to_spawn))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
float coords[3];
coords[0] = player_object->m_World[0] + 1;
coords[1] = player_object->m_World[1] + 1;
coords[2] = player_object->m_World[2] + 0.5f;
__asm
{
MOV ECX,DWORD PTR [ObjTagList_ptr_address]
MOV ECX, DWORD PTR [ECX]
MOV EDX,DWORD PTR [ECX+174h]
MOV EDX,DWORD PTR [EDX+0Ch]
CMP EDX,0xFF
JE SHORT biped_inval
;//6E2280->addr+168h]->addr+1Ch]-> player obj type tag
MOV ECX,DWORD PTR [ECX+168h]
;//__fastcall params
MOV EDX,DWORD PTR [ECX+1Ch]
LEA ECX,[coords]
XOR ESI,ESI
continue_biped_create_loop:
CMP ESI,how_many_to_spawn
JGE SHORT bipeds_spawned//exit_biped_create_loop
CALL CreateObject
INC ESI
JMP SHORT continue_biped_create_loop
//exit_biped_create_loop:
biped_inval:
MOV ECX,DWORD PTR [FailBadSpawn]
MOV SpecificMsg,ECX
JMP SHORT biped_failed
bipeds_spawned:
MOV succeded,TRUE
}
SpecificMsg = L"Biped(s) spawned next to %s";
__asm biped_failed:
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Spawn_Hog(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
DWORD dwOldProtect;
if (VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
PAGE_EXECUTE_READWRITE,
&dwOldProtect))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
*Player0_index = Player_Indexes[i];
//doesnt spawn on some custom maps
//__asm CALL SpawnHog_func_address
__asm
{
//DWORD objlist = *(DWORD*)ObjTagList_ptr_address;
MOV ECX,DWORD PTR [ObjTagList_ptr_address]
MOV ECX, DWORD PTR [ECX]
//DWORD num_of_objs = *(DWORD*)(objlist+0x164);
//if (num_of_objs)
CMP DWORD PTR [ECX+164h],0
JE SHORT hog_bad_list
//DWORD vehicle_list_header = *(DWORD*)(objlist+0x168);
MOV ECX,DWORD PTR [ECX+168h]
//DWORD num_of_veh = *(WORD*)(vehicle_list_header+0x20);
MOVZX EAX,WORD PTR [ECX+20h]
//DWORD vehicle_list = *(DWORD*)(vehicle_list_header+0x24);
MOV ECX,DWORD PTR [ECX+24h]
//if (num_of_veh)
TEST EAX,EAX
JE SHORT hog_bad_list
PUSH 1// num_of_veh
PUSH ECX//vehicle_list
CALL DWORD PTR [SpawnObjAtPlayer0_func_address]
ADD ESP,8
CMP EAX,-1
JNZ SHORT hog_val_player
MOV ECX,DWORD PTR [FailPlyrNtSpwn]
MOV SpecificMsg,ECX
JMP SHORT hog_failed
hog_val_player:
TEST AX,AX
JNZ SHORT hog_spawned
hog_bad_list:
MOV ECX,DWORD PTR [FailBadSpawn]
MOV SpecificMsg,ECX
JMP SHORT hog_failed
hog_spawned:
MOV succeded,TRUE
}
SpecificMsg = L"Warthog spawned next to %s";
__asm hog_failed:
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
*Player0_index = 0;//change back to 0 when finished
VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
dwOldProtect,
&dwOldProtect);
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Spawn_All_Vehicles(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
DWORD dwOldProtect;
if (VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
PAGE_EXECUTE_READWRITE,
&dwOldProtect))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
//[40848BF0+i*10]= vehicle tag
//[40848BE4+0C]->E3D40260 (1st one?)
//[40848B44+24]->40848BE4
//[40848198+168]->40848B44
//[6E2280]->40848198
//[ObjTagList_ptr_address]->6E2280
*Player0_index = Player_Indexes[i];
__asm
{
//DWORD objlist = *(DWORD*)ObjTagList_ptr_address;
MOV ECX,DWORD PTR [ObjTagList_ptr_address]
MOV ECX, DWORD PTR [ECX]
//DWORD num_of_objs = *(DWORD*)(objlist+0x164);
//if (num_of_objs)
CMP DWORD PTR [ECX+164h],0
JE SHORT vehs_bad_list
//DWORD vehicle_list_header = *(DWORD*)(objlist+0x168);
MOV ECX,DWORD PTR [ECX+168h]
//DWORD num_of_veh = *(WORD*)(vehicle_list_header+0x20);
MOVZX EAX,WORD PTR [ECX+20h]
//DWORD vehicle_list = *(DWORD*)(vehicle_list_header+0x24);
MOV ECX,DWORD PTR [ECX+24h]
//if (num_of_veh)
TEST EAX,EAX
JE SHORT vehs_bad_list
PUSH EAX
PUSH ECX
CALL DWORD PTR SpawnObjAtPlayer0_func_address
ADD ESP,8
CMP EAX,-1
JNZ SHORT vehs_val_player
MOV ECX,DWORD PTR [FailPlyrNtSpwn]
MOV SpecificMsg,ECX
JMP SHORT vehs_failed
vehs_val_player:
TEST AX,AX
JNZ SHORT vehs_spawned
vehs_bad_list:
MOV ECX,DWORD PTR [FailBadSpawn]
MOV SpecificMsg,ECX
JMP SHORT vehs_failed
vehs_spawned:
MOV succeded,TRUE
}
SpecificMsg = L"Vehicles spawned next to %s";
__asm vehs_failed:
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
*Player0_index = 0;//change back to 0 when finished
VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
dwOldProtect,
&dwOldProtect);
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Spawn_All_Weapons(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
DWORD dwOldProtect;
if (VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
PAGE_EXECUTE_READWRITE,
&dwOldProtect))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
*Player0_index = Player_Indexes[i];
__asm
{
//DWORD objlist = *(DWORD*)ObjTagList_ptr_address;
MOV ECX,DWORD PTR [ObjTagList_ptr_address]
MOV ECX, DWORD PTR [ECX]
//DWORD num_of_objs = *(DWORD*)(objlist+0x14C);
//if (num_of_objs)
MOV EAX,DWORD PTR [ECX+14Ch]
TEST EAX,EAX
JE SHORT weps_bad_list
//DWORD weapon_list = *(DWORD*)(objlist+0x150);
MOV ECX,DWORD PTR [ECX+150h]
//if (weapon_list)
TEST ECX,ECX
JE SHORT weps_bad_list
PUSH EAX
PUSH ECX
CALL DWORD PTR [SpawnObjAtPlayer0_func_address]
ADD ESP,8
CMP EAX,-1
JNZ SHORT weps_val_player
MOV ECX,DWORD PTR [FailPlyrNtSpwn]
MOV SpecificMsg,ECX
JMP SHORT weps_failed
weps_val_player:
TEST AX,AX
JNZ SHORT weps_spawned
weps_bad_list:
MOV ECX,DWORD PTR [FailBadSpawn]
MOV SpecificMsg,ECX
JMP SHORT weps_failed
weps_spawned:
MOV succeded,1
}
SpecificMsg = L"Weapons spawned next to %s";
__asm weps_failed:
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
*Player0_index = 0;//change back to 0 when finished
VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
dwOldProtect,
&dwOldProtect);
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Spawn_All_Powerups(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
DWORD dwOldProtect;
if (VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
PAGE_EXECUTE_READWRITE,
&dwOldProtect))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
*Player0_index = Player_Indexes[i];
__asm
{
//DWORD objlist = *(DWORD*)ObjTagList_ptr_address;
MOV ECX,DWORD PTR [ObjTagList_ptr_address]
MOV ECX, DWORD PTR [ECX]
//DWORD num_of_objs = *(DWORD*)(objlist+0x158);
//if (num_of_objs)
MOV EAX,DWORD PTR [ECX+158h]
TEST EAX,EAX
JE SHORT pwrups_bad_list
//DWORD powerup_list = *(DWORD*)(objlist+0x15C);
MOV ECX,DWORD PTR [ECX+15Ch]
//if (powerup_list)
TEST ECX,ECX
JE SHORT pwrups_bad_list
PUSH EAX
PUSH ECX
CALL DWORD PTR [SpawnObjAtPlayer0_func_address]
ADD ESP,8
CMP EAX,-1
JNZ SHORT pwrups_val_player
MOV ECX,DWORD PTR [FailPlyrNtSpwn]
MOV SpecificMsg,ECX
JMP SHORT pwrups_failed
pwrups_val_player:
TEST AX,AX
JNZ SHORT pwrups_spawned
pwrups_bad_list:
MOV ECX,DWORD PTR [FailBadSpawn]
MOV SpecificMsg,ECX
JMP SHORT pwrups_failed
pwrups_spawned:
MOV succeded,TRUE
}
SpecificMsg = L"Powerups spawned next to %s";
__asm pwrups_failed:
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
*Player0_index = 0;//change back to 0 when finished
VirtualProtect(
(LPVOID)Player0_index,
sizeof(WORD),
dwOldProtect,
&dwOldProtect);
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Copy_Vehicle(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
wchar_t *GenericMsg = NULL;
int pi2_to_find = 1; short player2_index;
if ((cmd_args += ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)) &&
ParseCMDStrPlayers(++cmd_args, &player2_index, pi2_to_find))
{
if (TempAdmin[exec_player_index] >= TempAdmin[player2_index])
{
HaloCE_lib::SPARTAN *player2_object = GetPlayerObj(player2_index);
if (player2_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player2_object);
if (vehicle_object) player2_object = vehicle_object;
float coords[3];
coords[0] = player2_object->m_World[0] + 1;
coords[1] = player2_object->m_World[1] + 1;
coords[2] = player2_object->m_World[2] + 0.5f;
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object)
{
__asm
{
;//__fastcall params
MOV EDX,DWORD PTR [vehicle_object]
MOV EDX,DWORD PTR [EDX]
LEA ECX,[coords]
CALL CreateObject
MOV succeded,TRUE
}
HaloSay(
L"%s's vehicle has been spawned next to %s",
exec_player_index,
players[player2_index].PlayerName0,
players[Player_Indexes[i]].PlayerName0);
}
else
SpecificMsg = FailPlyrNtInVeh;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
GenericMsg = FailPlyrNtSpwn;
}
else
GenericMsg = FailLowAdminLvl;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index, players[player2_index].PlayerName0);
return succeded;
}
BOOL __fastcall Player::Copy_Weapon(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
wchar_t *GenericMsg = NULL;
int pi2_to_find = 1; short player2_index;
if ((cmd_args += ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)) &&
ParseCMDStrPlayers(++cmd_args, &player2_index, pi2_to_find))
{
if (TempAdmin[exec_player_index] >= TempAdmin[player2_index])
{
HaloCE_lib::SPARTAN *player2_object = GetPlayerObj(player2_index);
if (player2_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player2_object);
if (vehicle_object) player2_object = vehicle_object;
float coords[3];
coords[0] = player2_object->m_World[0] + 1;
coords[1] = player2_object->m_World[1] + 1;
coords[2] = player2_object->m_World[2] + 0.5f;
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
short weapon_index = player_object->WeaponTag.Index;
//if player is in a vehicle, use vehicle's weapon
//doesn't work on vehicle weapons
//short veh_wep_index = player_object->VehicleWeaponTag.Index;
//if (veh_wep_index != -1) weapon_index = veh_wep_index;
DWORD weapon_obj = GetObj(weapon_index);
if (weapon_obj)
{
__asm
{
;//__fastcall params
MOV EDX,DWORD PTR [weapon_obj]
MOV EDX,DWORD PTR [EDX]
LEA ECX,[coords]
CALL CreateObject
MOV succeded,TRUE
}
HaloSay(
L"%s's weapon has been spawned next to %s",
exec_player_index,
players[player2_index].PlayerName0,
players[Player_Indexes[i]].PlayerName0);
}
else
SpecificMsg = FailPlyrNoWep;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
GenericMsg = FailPlyrNtSpwn;
}
else
GenericMsg = FailLowAdminLvl;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg)
HaloSay(GenericMsg, exec_player_index, players[player2_index].PlayerName0);
return succeded;
}
BOOL __fastcall Player::Destroy_Objects_Mode(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL;
int enable;
if (!*cmd_args)
{
enable = DestroyObjsEnabled;
GenericMsg = L"Destroy objects mode is set at %u";
succeded = TRUE;
}
else if (CMDsLib::ParseStrInt(cmd_args, &enable))
{
DestroyObjsEnabled = enable;
GenericMsg = L"Destroy objects mode has been set to %u";
succeded = TRUE;
}
else
GenericMsg = FailInvalNum;
HaloSay(GenericMsg, exec_player_index, enable);
return succeded;
}
BOOL __fastcall Player::Destroy_Weapon(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
if (ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found))
{
for (int i = 0; i < pi_found; i++)
{
wchar_t *SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
HaloCE_lib::OBJECT_TAG WepObjTag = player_object->WeaponTag;
//use vehicle turret if valid tag
HaloCE_lib::OBJECT_TAG VehWepObjTag = player_object->VehicleWeaponTag;
if (VehWepObjTag.Tag != -1) WepObjTag = VehWepObjTag;
if (WepObjTag.Tag != -1)
{
__asm
{
MOV EAX,WepObjTag
CALL DWORD PTR [DestroyObj_func_address]
MOV succeded,TRUE
}
SpecificMsg = L"%s's weapon has been destroyed";
}
else
SpecificMsg = FailPlyrNoWep;
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
}
else
HaloSay(FailPlyrNtFnd, exec_player_index);
return succeded;
}
BOOL __fastcall Player::Say(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
short Player_Indexes[16];
int pi_found = 0;
wchar_t *msg_to = cmd_args, *GenericMsg = NULL;;
if (*ServerType == HOST)
{
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
if (pi_found == (*Players_ptr)->NumOfItems)
{
HaloSay(++cmd_args, -1);
}
else
{
msg_to[cmd_args - msg_to] = 0;//add null terminator
wchar_t *sv_buffer = &HaloSay_server_buffer[SV_NAME_SIZE];
swprintf_s(
sv_buffer,
SV_BUFFER_SIZE,
L"[Private Message][from %s][to %s]: %s",
players[exec_player_index].PlayerName0,
msg_to,
++cmd_args);
CHAT_INFO chat_info;
chat_info.ChatType = Server;
chat_info.From_PlayerIndex = 0;
chat_info.msg_str = sv_buffer;
for (int i = 0; i < pi_found; i++)
{
ServerSay(chat_info, players[Player_Indexes[i]].PlayerChatIndex);
HaloSay(
L"Message sent to %s",
exec_player_index,
players[Player_Indexes[i]].PlayerName0);
}
}
succeded = TRUE;
}
else
GenericMsg = FailPlyrNtFnd;
}
else
GenericMsg = FailSvCmd;
if (GenericMsg) HaloSay(GenericMsg, exec_player_index);
return succeded | DO_NOT_SEND_MSG;
}
BOOL __fastcall Player::ObjectScale(wchar_t *cmd_args, short exec_player_index)
{
BOOL succeded = FALSE;
wchar_t *GenericMsg = NULL, *SpecificMsg = NULL;
short Player_Indexes[16];
int pi_found = 0;
float scale;
int arg_len;
if ((arg_len = ParseCMDStrPlayers(cmd_args, Player_Indexes, pi_found)))
{
cmd_args += arg_len;
if (!*cmd_args)
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
succeded = TRUE;
SpecificMsg = L"%s's current object scale is %.2f%%";
}
else
SpecificMsg = FailPlyrNtSpwn;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
player_object->obj_scale * 100.0f);
}
}
}
else if (CMDsLib::ParseStrFloat(++cmd_args, &scale))
{
for (int i = 0; i < pi_found; i++)
{
SpecificMsg = NULL;
if (TempAdmin[exec_player_index] >= TempAdmin[Player_Indexes[i]])
{
HaloCE_lib::SPARTAN *player_object = GetPlayerObj(Player_Indexes[i]);
if (player_object)
{
//if player is in a vehicle, use vehicle's coords_or_vectors
HaloCE_lib::SPARTAN *vehicle_object = (HaloCE_lib::SPARTAN*)GetPlayerVehObj(player_object);
if (vehicle_object) player_object = vehicle_object;
player_object->obj_scale = scale;
succeded = TRUE;
SpecificMsg = L"%s's object scale has been changed to %.2f%%";
}
else
SpecificMsg = FailPlyrNtSpwn;
}
else
SpecificMsg = FailLowAdminLvl;
if (SpecificMsg)
{
HaloSay(
SpecificMsg,
exec_player_index,
players[Player_Indexes[i]].PlayerName0,
scale * 100.0f);
}
}
}
else
GenericMsg = FailInvalNum;
}
else
GenericMsg = FailPlyrNtFnd;
if (GenericMsg) HaloSay(GenericMsg, exec_player_index);
return succeded;
}<|fim▁end|>
|
SpecificMsg = FailLowAdminLvl;
|
<|file_name|>screenshot.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* eslint-env browser */
"use strict";
const {
TAKE_SCREENSHOT_START,
TAKE_SCREENSHOT_END,
} = require("./index");
const { getFormatStr } = require("../utils/l10n");
const { getToplevelWindow } = require("sdk/window/utils");
const { Task: { spawn } } = require("devtools/shared/task");
const e10s = require("../utils/e10s");
const Services = require("Services");
const CAMERA_AUDIO_URL = "resource://devtools/client/themes/audio/shutter.wav";
const animationFrame = () => new Promise(resolve => {
window.requestAnimationFrame(resolve);
});
function getFileName() {
let date = new Date();
let month = ("0" + (date.getMonth() + 1)).substr(-2);
let day = ("0" + date.getDate()).substr(-2);
let dateString = [date.getFullYear(), month, day].join("-");
let timeString = date.toTimeString().replace(/:/g, ".").split(" ")[0];
return getFormatStr("responsive.screenshotGeneratedFilename", dateString,
timeString);
}
function createScreenshotFor(node) {
let mm = node.frameLoader.messageManager;
return e10s.request(mm, "RequestScreenshot");
}
function saveToFile(data, filename) {
return spawn(function* () {
const chromeWindow = getToplevelWindow(window);
const chromeDocument = chromeWindow.document;
// append .png extension to filename if it doesn't exist
filename = filename.replace(/\.png$|$/i, ".png");
chromeWindow.saveURL(data, filename, null,
true, true,
chromeDocument.documentURIObject, chromeDocument);
});
}
<|fim▁hole|> cameraAudio.play();
}
node.animate({ opacity: [ 0, 1 ] }, 500);
}
module.exports = {
takeScreenshot() {
return function* (dispatch, getState) {
yield dispatch({ type: TAKE_SCREENSHOT_START });
// Waiting the next repaint, to ensure the react components
// can be properly render after the action dispatched above
yield animationFrame();
let iframe = document.querySelector("iframe");
let data = yield createScreenshotFor(iframe);
simulateCameraEffects(iframe);
yield saveToFile(data, getFileName());
dispatch({ type: TAKE_SCREENSHOT_END });
};
}
};<|fim▁end|>
|
function simulateCameraEffects(node) {
if (Services.prefs.getBoolPref("devtools.screenshot.audio.enabled")) {
let cameraAudio = new window.Audio(CAMERA_AUDIO_URL);
|
<|file_name|>hook.go<|end_file_name|><|fim▁begin|>// Copyright 2012, 2013 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
// hook provides types that define the hooks known to the Uniter
package hook
import (
"fmt"
"gopkg.in/juju/charm.v6-unstable/hooks"
"gopkg.in/juju/names.v2"
)
// TODO(fwereade): move these definitions to juju/charm/hooks.
const (
LeaderElected hooks.Kind = "leader-elected"
LeaderDeposed hooks.Kind = "leader-deposed"
LeaderSettingsChanged hooks.Kind = "leader-settings-changed"
)
// Info holds details required to execute a hook. Not all fields are
// relevant to all Kind values.
type Info struct {
Kind hooks.Kind `yaml:"kind"`
// RelationId identifies the relation associated with the hook. It is
// only set when Kind indicates a relation hook.
RelationId int `yaml:"relation-id,omitempty"`
// RemoteUnit is the name of the unit that triggered the hook. It is only
// set when Kind indicates a relation hook other than relation-broken.
RemoteUnit string `yaml:"remote-unit,omitempty"`
// ChangeVersion identifies the most recent unit settings change
// associated with RemoteUnit. It is only set when RemoteUnit is set.
ChangeVersion int64 `yaml:"change-version,omitempty"`
// StorageId is the ID of the storage instance relevant to the hook.
StorageId string `yaml:"storage-id,omitempty"`
}
// Validate returns an error if the info is not valid.
func (hi Info) Validate() error {
switch hi.Kind {
case hooks.RelationJoined, hooks.RelationChanged, hooks.RelationDeparted:
if hi.RemoteUnit == "" {
return fmt.Errorf("%q hook requires a remote unit", hi.Kind)
}
fallthrough
case hooks.Install, hooks.Start, hooks.ConfigChanged, hooks.UpgradeCharm, hooks.Stop, hooks.RelationBroken,<|fim▁hole|> case hooks.StorageAttached, hooks.StorageDetaching:
if !names.IsValidStorage(hi.StorageId) {
return fmt.Errorf("invalid storage ID %q", hi.StorageId)
}
return nil
// TODO(fwereade): define these in charm/hooks...
case LeaderElected, LeaderDeposed, LeaderSettingsChanged:
return nil
}
return fmt.Errorf("unknown hook kind %q", hi.Kind)
}
// Committer is an interface that may be used to convey the fact that the
// specified hook has been successfully executed, and committed.
type Committer interface {
CommitHook(Info) error
}
// Validator is an interface that may be used to validate a hook execution
// request prior to executing it.
type Validator interface {
ValidateHook(Info) error
}<|fim▁end|>
|
hooks.CollectMetrics, hooks.MeterStatusChanged, hooks.UpdateStatus:
return nil
case hooks.Action:
return fmt.Errorf("hooks.Kind Action is deprecated")
|
<|file_name|>_color.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="scatter.unselected.textfont", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
role=kwargs.pop("role", "style"),<|fim▁hole|><|fim▁end|>
|
**kwargs
)
|
<|file_name|>dataSetToJS.js<|end_file_name|><|fim▁begin|>import explicitElementToString from './elementToString';
import * as util from './util';
/**
* converts an explicit dataSet to a javascript object
* @param dataSet
* @param options
*/
export default function explicitDataSetToJS (dataSet, options) {
if (dataSet === undefined) {
throw 'dicomParser.explicitDataSetToJS: missing required parameter dataSet';
}
options = options || {
omitPrivateAttibutes: true, // true if private elements should be omitted
maxElementLength: 128 // maximum element length to try and convert to string format
};
var result = {
};
for (var tag in dataSet.elements) {
var element = dataSet.elements[tag];
// skip this element if it a private element and our options specify that we should
if (options.omitPrivateAttibutes === true && util.isPrivateTag(tag)) {
continue;
}
if (element.items) {
// handle sequences
var sequenceItems = [];
for (var i = 0; i < element.items.length; i++) {
sequenceItems.push(explicitDataSetToJS(element.items[i].dataSet, options));
}
result[tag] = sequenceItems;
} else {
var asString;<|fim▁hole|> }
if (asString !== undefined) {
result[tag] = asString;
} else {
result[tag] = {
dataOffset: element.dataOffset,
length: element.length
};
}
}
}
return result;
}<|fim▁end|>
|
asString = undefined;
if (element.length < options.maxElementLength) {
asString = explicitElementToString(dataSet, element);
|
<|file_name|>zlb.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render_to_response
from django.template import RequestContext
from django.core.exceptions import ObjectDoesNotExist
from django.views.decorators.cache import never_cache
from django.http import HttpResponse, HttpResponseRedirect
from session_csrf import anonymous_csrf
from ..models import ZLB, ZLBVirtualServer, ZLBVirtualServerRule, ZLBVirtualServerProtection
from ..models import ZLBRule, ZLBProtection, Offender, ZLBVirtualServerPref
from ..forms import ZLBForm, VirtualServerConfirm
from BanHammer.blacklist.management import zeus
import BanHammer.blacklist.tasks as tasks
from BanHammer import settings
@anonymous_csrf
@never_cache
def index(request, zlb=None, action=None):
request.session['order_by'] = request.GET.get('order_by', 'hostname')
request.session['order'] = request.GET.get('order', 'asc')
order_by = request.session.get('order_by', 'address')
order = request.session.get('order', 'asc')
zlbs = ZLB.objects.all()
if order_by == 'created_date':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.created_date)
elif order_by == 'updated_date':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.updated_date)
elif order_by == 'name':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.name)
elif order_by == 'hostname':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.hostname)
elif order_by == 'datacenter':
zlbs = sorted(list(zlbs), key=lambda zlb: zlb.datacenter)
if order == 'desc':
zlbs.reverse()
data = {'zlbs': zlbs}
if action == 'update':
data['zlb'] = zlb
data['action'] = 'update'
data['testing_env'] = settings.TESTING_ENV
return render_to_response(
'zlb/index.html',
data,
context_instance = RequestContext(request)
)
@anonymous_csrf
def new(request):
if request.method == 'POST':<|fim▁hole|> if form.is_valid():
name = form.cleaned_data['name']
hostname = form.cleaned_data['hostname']
datacenter = form.cleaned_data['datacenter']
doc_url = form.cleaned_data['doc_url']
login = form.cleaned_data['login']
password = form.cleaned_data['password']
comment = form.cleaned_data['comment']
zlb = ZLB(
name=name,
hostname=hostname,
datacenter=datacenter,
doc_url=doc_url,
login=login,
password=password,
comment=comment,
)
zlb.save()
return HttpResponseRedirect('/zlbs')
else:
form = ZLBForm()
return render_to_response(
'zlb/new.html',
{'form': form},
context_instance = RequestContext(request)
)
@anonymous_csrf
def edit(request, id):
if request.method == 'POST':
form = ZLBForm(request.POST)
if form.is_valid():
zlb = ZLB.objects.get(id=id)
zlb.name = form.cleaned_data['name']
zlb.hostname = form.cleaned_data['hostname']
zlb.datacenter = form.cleaned_data['datacenter']
zlb.doc_url = form.cleaned_data['doc_url']
zlb.comment = form.cleaned_data['comment']
zlb.login = form.cleaned_data['login']
if form.cleaned_data['password']:
zlb.password = form.cleaned_data['password']
zlb.save()
return HttpResponseRedirect('/zlbs')
else:
initial = ZLB.objects.get(id=id)
initial = initial.__dict__
id = initial['id']
initial['password'] = ''
form = ZLBForm(initial)
return render_to_response(
'zlb/edit.html',
{'form': form, 'id': id},
context_instance = RequestContext(request)
)
@anonymous_csrf
def delete(request, id):
zlb = ZLB.objects.get(id=id)
zlb.delete()
return HttpResponseRedirect('/zlbs')
@anonymous_csrf
@never_cache
def show(request, id):
zlb = ZLB.objects.get(id=id)
if zlb.updating:
return render_to_response(
'zlb/updating.html',
{'zlb': zlb,},
context_instance = RequestContext(request)
)
vs = ZLBVirtualServer.objects.filter(zlb_id=zlb.id)
prefs_o = ZLBVirtualServerPref.objects.filter(zlb=zlb)
prefs = {}
for p in prefs_o:
prefs[p.vs_name] = p
pr = {}
rul = {}
return render_to_response(
'zlb/show.html',
{'zlb': zlb,
'prefs': prefs,
'vs': vs,
'testing_env': settings.TESTING_ENV,},
context_instance = RequestContext(request)
)
@anonymous_csrf
@never_cache
def update(request, id):
tasks.update_zlb.delay(id)
zlb = ZLB.objects.get(id=id)
return HttpResponseRedirect('/zlbs')
def _parse_addr(addresses):
addr_list = addresses.split(', ')
addresses = []
for addr in addr_list:
network = addr.split('/')
addr = network[0]
if len(network) == 2:
cidr = network[1]
else:
cidr = None
if cidr:
offender = Offender.objects.filter(address=addr, cidr=cidr)
else:
offender = Offender.objects.filter(address=addr)
if offender.count() != 0:
addresses.append(offender[0])
else:
addresses.append(addr)
return addresses
@anonymous_csrf
def index_protection(request, zlb_id):
zlb = ZLB.objects.get(id=zlb_id)
protections = ZLBProtection.objects.filter(zlb_id=zlb_id)
for p in protections:
p.allowed_addresses = _parse_addr(p.allowed_addresses)
p.banned_addresses = _parse_addr(p.banned_addresses)
p.virtual_servers = ZLBVirtualServerProtection.objects.filter(zlb_id=zlb_id, protection_id=p.id)
return render_to_response(
'zlb/protections.html',
{'zlb': zlb,
'protections': protections,},
context_instance = RequestContext(request)
)
@anonymous_csrf
def index_rules(request, zlb_id):
zlb = ZLB.objects.get(id=zlb_id)
rules = ZLBRule.objects.filter(zlb_id=zlb_id)
for rule in rules:
rule.virtual_servers = ZLBVirtualServerRule.objects.filter(zlb_id=zlb_id, rule_id=rule.id)
return render_to_response(
'zlb/rules.html',
{'zlb': zlb,
'rules': rules,},
context_instance = RequestContext(request)
)
@never_cache
@anonymous_csrf
def virtual_server(request, zlb_id, vs_id):
if request.method == 'POST':
form = VirtualServerConfirm(request.POST)
if form.is_valid():
confirm = form.cleaned_data['confirm']
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.filter(zlb_id=zlb_id,vs_name=vs.name)
if pref.count() == 0:
p = ZLBVirtualServerPref(
zlb_id=zlb_id,
vs_name=vs.name,
favorite=False,
confirm=confirm,
)
p.save()
else:
pref = pref[0]
pref.confirm = confirm
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
else:
form = VirtualServerConfirm()
zlb = ZLB.objects.get(id=zlb_id)
virtual_server = ZLBVirtualServer.objects.get(id=vs_id)
prefs = ZLBVirtualServerPref.objects.filter(zlb=zlb,vs_name=virtual_server.name)
rules = ZLBVirtualServerRule.objects.filter(virtualserver=virtual_server)
protections = ZLBVirtualServerProtection.objects.filter(virtualserver=virtual_server)
for p in protections:
p.protection.allowed_addresses = _parse_addr(p.protection.allowed_addresses)
p.protection.banned_addresses = _parse_addr(p.protection.banned_addresses)
return render_to_response(
'zlb/virtual_server.html',
{'zlb': zlb,
'virtual_server': virtual_server,
'prefs': prefs,
'rules': rules,
'protections': protections,
'form': form,},
context_instance = RequestContext(request)
)
@never_cache
@anonymous_csrf
def virtual_server_name(request, zlb_id, vs_name):
virtual_server_o = ZLBVirtualServer.objects.get(zlb_id=zlb_id, name=vs_name)
return virtual_server(request, zlb_id, virtual_server_o.id)
@anonymous_csrf
def virtual_server_favorite(request, zlb_id, vs_id):
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.filter(zlb_id=zlb_id,vs_name=vs.name)
if pref.count() == 0:
p = ZLBVirtualServerPref(
zlb_id=zlb_id,
vs_name=vs.name,
favorite=True,
)
p.save()
else:
pref = pref[0]
pref.favorite = True
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
@anonymous_csrf
def virtual_server_unfavorite(request, zlb_id, vs_id):
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.get(zlb_id=zlb_id,vs_name=vs.name)
pref.favorite = False
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))
@anonymous_csrf
def virtual_server_unconfirm(request, zlb_id, vs_id):
vs = ZLBVirtualServer.objects.get(id=vs_id)
pref = ZLBVirtualServerPref.objects.get(zlb_id=zlb_id,vs_name=vs.name)
pref.confirm = ''
pref.save()
return HttpResponseRedirect('/zlb/%s/virtual_server/%s' % (zlb_id, vs_id))<|fim▁end|>
|
form = ZLBForm(request.POST)
|
<|file_name|>stata.py<|end_file_name|><|fim▁begin|>"""
pygments.lexers.stata
~~~~~~~~~~~~~~~~~~~~~
Lexer for Stata
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, default, include, words
from pygments.token import Comment, Keyword, Name, Number, \
String, Text, Operator
from pygments.lexers._stata_builtins import builtins_base, builtins_functions
__all__ = ['StataLexer']
class StataLexer(RegexLexer):
"""
For `Stata <http://www.stata.com/>`_ do files.
.. versionadded:: 2.2
"""
# Syntax based on
# - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
# - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
# - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
name = 'Stata'
aliases = ['stata', 'do']
filenames = ['*.do', '*.ado']
mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
include('comments'),
include('strings'),
include('macros'),<|fim▁hole|> (r'.', Text),
],
# Comments are a complicated beast in Stata because they can be
# nested and there are a few corner cases with that. See:
# - github.com/kylebarron/language-stata/issues/90
# - statalist.org/forums/forum/general-stata-discussion/general/1448244
'comments': [
(r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'),
(r'^\s*\*', Comment.Single, 'comments-star'),
(r'/\*', Comment.Multiline, 'comments-block'),
(r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash')
],
'comments-block': [
(r'/\*', Comment.Multiline, '#push'),
# this ends and restarts a comment block. but need to catch this so
# that it doesn\'t start _another_ level of comment blocks
(r'\*/\*', Comment.Multiline),
(r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'),
# Match anything else as a character inside the comment
(r'.', Comment.Multiline),
],
'comments-star': [
(r'///.*?\n', Comment.Single,
('#pop', 'comments-triple-slash')),
(r'(^//|(?<=\s)//)(?!/)', Comment.Single,
('#pop', 'comments-double-slash')),
(r'/\*', Comment.Multiline, 'comments-block'),
(r'.(?=\n)', Comment.Single, '#pop'),
(r'.', Comment.Single),
],
'comments-triple-slash': [
(r'\n', Comment.Special, '#pop'),
# A // breaks out of a comment for the rest of the line
(r'//.*?(?=\n)', Comment.Single, '#pop'),
(r'.', Comment.Special),
],
'comments-double-slash': [
(r'\n', Text, '#pop'),
(r'.', Comment.Single),
],
# `"compound string"' and regular "string"; note the former are
# nested.
'strings': [
(r'`"', String, 'string-compound'),
(r'(?<!`)"', String, 'string-regular'),
],
'string-compound': [
(r'`"', String, '#push'),
(r'"\'', String, '#pop'),
(r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
include('macros'),
(r'.', String)
],
'string-regular': [
(r'(")(?!\')|(?=\n)', String, '#pop'),
(r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
include('macros'),
(r'.', String)
],
# A local is usually
# `\w{0,31}'
# `:extended macro'
# `=expression'
# `[rsen](results)'
# `(++--)scalar(++--)'
#
# However, there are all sorts of weird rules wrt edge
# cases. Instead of writing 27 exceptions, anything inside
# `' is a local.
#
# A global is more restricted, so we do follow rules. Note only
# locals explicitly enclosed ${} can be nested.
'macros': [
(r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
],
'macro-local': [
(r'`', Name.Variable, '#push'),
(r"'", Name.Variable, '#pop'),
(r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'.', Name.Variable), # fallback
],
'macro-global-nested': [
(r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
(r'\}', Name.Variable.Global, '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
(r'\w', Name.Variable.Global), # fallback
default('#pop'),
],
'macro-global-name': [
(r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
(r'`', Name.Variable, 'macro-local', '#pop'),
(r'\w{1,32}', Name.Variable.Global, '#pop'),
],
# Built in functions and statements
'keywords': [
(words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'),
Name.Function),
(words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
Keyword),
],
# http://www.stata.com/help.cgi?operators
'operators': [
(r'-|==|<=|>=|<|>|&|!=', Operator),
(r'\*|\+|\^|/|!|~|==|~=', Operator)
],
# Stata numbers
'numbers': [
# decimal number
(r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
Number),
],
# Stata formats
'format': [
(r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other),
(r'%(21x|16H|16L|8H|8L)', Name.Other),
(r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other),
(r'%[-~]?\d{1,4}s', Name.Other),
]
}<|fim▁end|>
|
include('numbers'),
include('keywords'),
include('operators'),
include('format'),
|
<|file_name|>wxsimagetreeeditordlg.cpp<|end_file_name|><|fim▁begin|>/** \file wxsimagetreeeditordlg.cpp
*
* This file is part of wxSmith plugin for Code::Blocks Studio
* Copyright (C) 2010 Gary Harris
*
* wxSmith is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* wxSmith is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with wxSmith. If not, see <http://www.gnu.org/licenses/>.
*
* This code was taken from the wxSmithImage plug-in, copyright Ron Collins
* and released under the GPL.
*
*/
#include "wxsimagetreeeditordlg.h"
//(*InternalHeaders(wxsImageTreeEditDialog)
#include <wx/font.h>
#include <wx/intl.h>
#include <wx/string.h>
//*)
#include <wx/msgdlg.h>
#include "../properties/wxsimagelisteditordlg.h"
//(*IdInit(wxsImageTreeEditorDlg)
const long wxsImageTreeEditorDlg::ID_STATICTEXT1 = wxNewId();
const long wxsImageTreeEditorDlg::ID_TREECTRL1 = wxNewId();
const long wxsImageTreeEditorDlg::ID_IMAGEBUTTON1 = wxNewId();
const long wxsImageTreeEditorDlg::ID_IMAGEBUTTON2 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT3 = wxNewId();
const long wxsImageTreeEditorDlg::ID_IMAGEBUTTON3 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT4 = wxNewId();
const long wxsImageTreeEditorDlg::ID_IMAGEBUTTON4 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT5 = wxNewId();
const long wxsImageTreeEditorDlg::ID_BUTTON3 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT6 = wxNewId();
const long wxsImageTreeEditorDlg::ID_CHECKBOX1 = wxNewId();
const long wxsImageTreeEditorDlg::ID_IMAGEBUTTON5 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT11 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT12 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT13 = wxNewId();
const long wxsImageTreeEditorDlg::ID_COMBOBOX1 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT7 = wxNewId();
const long wxsImageTreeEditorDlg::ID_COMBOBOX2 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT8 = wxNewId();
const long wxsImageTreeEditorDlg::ID_COMBOBOX3 = wxNewId();<|fim▁hole|>const long wxsImageTreeEditorDlg::ID_BUTTON1 = wxNewId();
const long wxsImageTreeEditorDlg::ID_BUTTON2 = wxNewId();
//*)
BEGIN_EVENT_TABLE(wxsImageTreeEditorDlg, wxDialog)
//(*EventTable(wxsImageTreeEditorDlg)
//*)
END_EVENT_TABLE()
wxsImageTreeEditorDlg::wxsImageTreeEditorDlg(wxWindow *parent, wxWindowID id, const wxPoint &pos, const wxSize &size)
{
//(*Initialize(wxsImageTreeEditorDlg)
wxGridSizer* GridSizer1;
wxBoxSizer* BoxSizer3;
wxBoxSizer* BoxSizer10;
wxBoxSizer* BoxSizer7;
wxBoxSizer* BoxSizer11;
wxBoxSizer* BoxSizer13;
wxBoxSizer* BoxSizer2;
wxBoxSizer* BoxSizer9;
wxBoxSizer* BoxSizer4;
wxBoxSizer* BoxSizer8;
wxBoxSizer* BoxSizer1;
wxBoxSizer* BoxSizer12;
wxBoxSizer* BoxSizer6;
wxBoxSizer* BoxSizer5;
Create(parent, wxID_ANY, _("Tree Item Editor"), wxDefaultPosition, wxDefaultSize, wxDEFAULT_DIALOG_STYLE, _T("wxID_ANY"));
wxFont thisFont(8,wxSWISS,wxFONTSTYLE_NORMAL,wxNORMAL,false,_T("Arial"),wxFONTENCODING_DEFAULT);
SetFont(thisFont);
BoxSizer1 = new wxBoxSizer(wxVERTICAL);
BoxSizer2 = new wxBoxSizer(wxHORIZONTAL);
StaticText1 = new wxStaticText(this, ID_STATICTEXT1, _("Edit Tree Items"), wxPoint(0,0), wxSize(400,20), wxST_NO_AUTORESIZE|wxALIGN_CENTRE, _T("ID_STATICTEXT1"));
wxFont StaticText1Font(10,wxSWISS,wxFONTSTYLE_NORMAL,wxBOLD,false,_T("Arial"),wxFONTENCODING_DEFAULT);
StaticText1->SetFont(StaticText1Font);
BoxSizer2->Add(StaticText1, 1, wxEXPAND|wxALIGN_CENTER_HORIZONTAL|wxALIGN_CENTER_VERTICAL, 0);
BoxSizer1->Add(BoxSizer2, 0, wxEXPAND|wxALIGN_CENTER_HORIZONTAL|wxALIGN_CENTER_VERTICAL, 0);
BoxSizer3 = new wxBoxSizer(wxHORIZONTAL);
BoxSizer5 = new wxBoxSizer(wxHORIZONTAL);
StaticBoxSizer1 = new wxStaticBoxSizer(wxHORIZONTAL, this, _("tree-name"));
Tree1 = new wxTreeCtrl(this, ID_TREECTRL1, wxPoint(2,36), wxSize(246,359), wxTR_EDIT_LABELS|wxTR_DEFAULT_STYLE, wxDefaultValidator, _T("ID_TREECTRL1"));
StaticBoxSizer1->Add(Tree1, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer5->Add(StaticBoxSizer1, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer3->Add(BoxSizer5, 0, wxALL|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 0);
BoxSizer6 = new wxBoxSizer(wxHORIZONTAL);
StaticBoxSizer2 = new wxStaticBoxSizer(wxVERTICAL, this, _("Attributes"));
BoxSizer7 = new wxBoxSizer(wxHORIZONTAL);
bAddItem = new wxBitmapButton(this, ID_IMAGEBUTTON1, wxNullBitmap, wxPoint(256,36), wxSize(24,23), wxBU_AUTODRAW, wxDefaultValidator, _T("ID_IMAGEBUTTON1"));
bAddItem->SetToolTip(_("Add A New Item"));
BoxSizer7->Add(bAddItem, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
StaticText2 = new wxStaticText(this, wxID_ANY, _("Add Item"), wxPoint(290,40), wxDefaultSize, 0, _T("wxID_ANY"));
BoxSizer7->Add(StaticText2, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer7, 0, wxEXPAND|wxALIGN_LEFT|wxALIGN_TOP, 0);
BoxSizer8 = new wxBoxSizer(wxHORIZONTAL);
bAddSubItem = new wxBitmapButton(this, ID_IMAGEBUTTON2, wxNullBitmap, wxPoint(256,66), wxSize(24,23), wxBU_AUTODRAW, wxDefaultValidator, _T("ID_IMAGEBUTTON2"));
bAddSubItem->SetToolTip(_("Add A New Child"));
BoxSizer8->Add(bAddSubItem, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
StaticText3 = new wxStaticText(this, ID_STATICTEXT3, _("Add Sub-Item"), wxPoint(290,70), wxDefaultSize, 0, _T("ID_STATICTEXT3"));
BoxSizer8->Add(StaticText3, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer8, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer9 = new wxBoxSizer(wxHORIZONTAL);
bDelItem = new wxBitmapButton(this, ID_IMAGEBUTTON3, wxNullBitmap, wxPoint(256,96), wxSize(24,23), wxBU_AUTODRAW, wxDefaultValidator, _T("ID_IMAGEBUTTON3"));
bDelItem->SetToolTip(_("Delete Current Item"));
BoxSizer9->Add(bDelItem, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
StaticText4 = new wxStaticText(this, ID_STATICTEXT4, _("Delete Current Item"), wxPoint(290,100), wxDefaultSize, 0, _T("ID_STATICTEXT4"));
BoxSizer9->Add(StaticText4, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer9, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer10 = new wxBoxSizer(wxHORIZONTAL);
bDelAllItems = new wxBitmapButton(this, ID_IMAGEBUTTON4, wxNullBitmap, wxPoint(256,126), wxSize(24,23), wxBU_AUTODRAW, wxDefaultValidator, _T("ID_IMAGEBUTTON4"));
bDelAllItems->SetToolTip(_("Delete All Items"));
BoxSizer10->Add(bDelAllItems, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
StaticText5 = new wxStaticText(this, ID_STATICTEXT5, _("Delete All Items"), wxPoint(290,130), wxDefaultSize, 0, _T("ID_STATICTEXT5"));
BoxSizer10->Add(StaticText5, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer10, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer11 = new wxBoxSizer(wxHORIZONTAL);
bItemColor = new wxButton(this, ID_BUTTON3, _("C"), wxPoint(256,156), wxSize(24,24), 0, wxDefaultValidator, _T("ID_BUTTON3"));
wxFont bItemColorFont(10,wxSWISS,wxFONTSTYLE_NORMAL,wxBOLD,false,_T("Arial Black"),wxFONTENCODING_DEFAULT);
bItemColor->SetFont(bItemColorFont);
bItemColor->SetToolTip(_("Set Item Text Color"));
BoxSizer11->Add(bItemColor, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
StaticText6 = new wxStaticText(this, ID_STATICTEXT6, _("Set Item Text Color"), wxPoint(290,160), wxDefaultSize, 0, _T("ID_STATICTEXT6"));
BoxSizer11->Add(StaticText6, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer11, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer12 = new wxBoxSizer(wxHORIZONTAL);
cxItemBold = new wxCheckBox(this, ID_CHECKBOX1, _(" Set Item Text Bold"), wxPoint(262,192), wxDefaultSize, 0, wxDefaultValidator, _T("ID_CHECKBOX1"));
cxItemBold->SetValue(false);
cxItemBold->SetToolTip(_("Set Item Text Bold"));
BoxSizer12->Add(cxItemBold, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer12, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer13 = new wxBoxSizer(wxHORIZONTAL);
bEditItem = new wxBitmapButton(this, ID_IMAGEBUTTON5, wxNullBitmap, wxPoint(256,216), wxSize(24,23), wxBU_AUTODRAW, wxDefaultValidator, _T("ID_IMAGEBUTTON5"));
bEditItem->SetToolTip(_("Start Editor On Current Item"));
BoxSizer13->Add(bEditItem, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
StaticText11 = new wxStaticText(this, ID_STATICTEXT11, _("Edit Current Item"), wxPoint(290,220), wxDefaultSize, 0, _T("ID_STATICTEXT11"));
BoxSizer13->Add(StaticText11, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticBoxSizer2->Add(BoxSizer13, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
GridSizer1 = new wxGridSizer(5, 2, 0, 0);
StaticText12 = new wxStaticText(this, ID_STATICTEXT12, _("Image-List"), wxPoint(256,272), wxDefaultSize, 0, _T("ID_STATICTEXT12"));
GridSizer1->Add(StaticText12, 0, wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
StaticText13 = new wxStaticText(this, ID_STATICTEXT13, _("Label"), wxPoint(310,272), wxSize(82,14), wxST_NO_AUTORESIZE, _T("ID_STATICTEXT13"));
StaticText13->SetForegroundColour(wxColour(0,0,255));
GridSizer1->Add(StaticText13, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5);
cbNormal = new wxBitmapComboBox(this, ID_COMBOBOX1, wxEmptyString, wxPoint(256,296), wxSize(48,22), 0, NULL, wxCB_READONLY, wxDefaultValidator, _T("ID_COMBOBOX1"));
GridSizer1->Add(cbNormal, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 3);
StaticText7 = new wxStaticText(this, ID_STATICTEXT7, _("Normal Image"), wxPoint(310,300), wxDefaultSize, 0, _T("ID_STATICTEXT7"));
GridSizer1->Add(StaticText7, 0, wxTOP|wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 3);
cbSelected = new wxBitmapComboBox(this, ID_COMBOBOX2, wxEmptyString, wxPoint(256,326), wxSize(48,22), 0, NULL, wxCB_READONLY, wxDefaultValidator, _T("ID_COMBOBOX2"));
GridSizer1->Add(cbSelected, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 3);
StaticText8 = new wxStaticText(this, ID_STATICTEXT8, _("Selected Image"), wxPoint(310,330), wxDefaultSize, 0, _T("ID_STATICTEXT8"));
GridSizer1->Add(StaticText8, 0, wxTOP|wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 3);
cbExpanded = new wxBitmapComboBox(this, ID_COMBOBOX3, wxEmptyString, wxPoint(256,356), wxSize(48,22), 0, NULL, wxCB_READONLY, wxDefaultValidator, _T("ID_COMBOBOX3"));
GridSizer1->Add(cbExpanded, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 3);
StaticText9 = new wxStaticText(this, ID_STATICTEXT9, _("Expanded Image"), wxPoint(310,360), wxDefaultSize, 0, _T("ID_STATICTEXT9"));
GridSizer1->Add(StaticText9, 0, wxTOP|wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 3);
cbSelExpanded = new wxBitmapComboBox(this, ID_COMBOBOX4, wxEmptyString, wxPoint(256,386), wxSize(48,22), 0, NULL, wxCB_READONLY, wxDefaultValidator, _T("ID_COMBOBOX4"));
GridSizer1->Add(cbSelExpanded, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 3);
StaticText10 = new wxStaticText(this, ID_STATICTEXT10, _("Sel+Exp Image"), wxPoint(310,390), wxDefaultSize, 0, _T("ID_STATICTEXT10"));
GridSizer1->Add(StaticText10, 0, wxTOP|wxLEFT|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 3);
StaticBoxSizer2->Add(GridSizer1, 0, wxTOP|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer6->Add(StaticBoxSizer2, 0, wxLEFT|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer3->Add(BoxSizer6, 0, wxALL|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 0);
BoxSizer1->Add(BoxSizer3, 0, wxALL|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 0);
BoxSizer4 = new wxBoxSizer(wxHORIZONTAL);
bOK = new wxButton(this, ID_BUTTON1, _("OK"), wxPoint(48,440), wxDefaultSize, 0, wxDefaultValidator, _T("ID_BUTTON1"));
bOK->SetDefault();
BoxSizer4->Add(bOK, 0, wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer4->Add(-1,-1,1, wxLEFT|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
bCancel = new wxButton(this, ID_BUTTON2, _("Cancel"), wxPoint(280,440), wxDefaultSize, 0, wxDefaultValidator, _T("ID_BUTTON2"));
BoxSizer4->Add(bCancel, 0, wxLEFT|wxALIGN_LEFT|wxALIGN_BOTTOM, 5);
BoxSizer1->Add(BoxSizer4, 0, wxTOP|wxEXPAND|wxALIGN_LEFT|wxALIGN_BOTTOM, 0);
SetSizer(BoxSizer1);
static const char *ImageList1_0_XPM[] = {
"16 16 3 1",
". c Black",
"X c #00C000",
"_ c None",
"________......._",
"________.XXXXX._",
"___..___.XXXXX._",
"___..___.XXXXX._",
"_......_.XXXXX._",
"_......_.XXXXX._",
"___..___.XXXXX._",
"___..___.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XX.XX._",
"________.X._.X._",
"________..___.._",
"................"
};
static const char *ImageList1_1_XPM[] = {
"16 16 4 1",
"o c Black",
". c #000080",
"X c #0000FF",
"_ c None",
"________......._",
"________.XXXXX._",
"___oo___.XXXXX._",
"___oo___.XXXXX._",
"_oooooo_.XXXXX._",
"_oooooo_.XXXXX._",
"___oo___.XXXXX._",
"___oo___.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XX.XX._",
"________.X._.X._",
"________..___.._",
"oooooooooooooooo"
};
static const char *ImageList1_2_XPM[] = {
"16 16 3 1",
". c Black",
"_ c None",
"X c #FF4040",
"________......._",
"________.XXXXX._",
"__.___._.XXXXX._",
"__.._.._.XXXXX._",
"___...__.XXXXX._",
"____.___.XXXXX._",
"___...__.XXXXX._",
"__.._.._.XXXXX._",
"__.___._.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XXXXX._",
"________.XX.XX._",
"________.X._.X._",
"________..___.._",
"................"
};
static const char *ImageList1_3_XPM[] = {
"16 16 22 1",
"4 c Black",
"3 c #A5AEBD",
"= c #5478B4",
"1 c #95A3BB",
"O c #9AA7BC",
": c #758EB7",
"$ c #6986B6",
"# c #4971B2",
"* c #8A9CBA",
"X c #8598B9",
"o c #ABB2BE",
"; c #7F95B9",
"- c #4E74B3",
"2 c #A0ABBC",
"+ c #6F8AB7",
"_ c None",
". c #B5B9BF",
"@ c #3E69B1",
"< c #90A0BA",
"> c #6483B5",
", c #5A7BB4",
"& c #5F7FB5",
"________________",
"____.Xo______OO_",
"____+@#.____$@&_",
"____*@@X__.=@=o_",
"_____-@-_.=@=.__",
"_____;@@X=@=.___",
"_____.#@@@$.____",
"______:@@>______",
"_____:@@@+______",
"___.,@#&@@._____",
"__o=@=oO@@<_____",
"_1#@=._.@@-_____",
"*@@$____>@@2____",
":#*_____3#,.____",
"________________",
"4444444444444444"
};
static const char *ImageList1_4_XPM[] = {
"16 16 2 1",
". c Black",
"_ c None",
"________________",
"______..________",
"______..._______",
"_____...._______",
"_____._...______",
"____.._...______",
"____.___..______",
"___..___..._____",
"___._____.._____",
"___.........____",
"__.._____...____",
"__._______...___",
"_.._______...___",
"_....___......._",
"________________",
"................"
};
ImageList1 = new wxImageList(16, 16, 6);
ImageList1->Add(wxBitmap(ImageList1_0_XPM));
ImageList1->Add(wxBitmap(ImageList1_1_XPM));
ImageList1->Add(wxBitmap(ImageList1_2_XPM));
ImageList1->Add(wxBitmap(ImageList1_3_XPM));
ImageList1->Add(wxBitmap(ImageList1_4_XPM));
ColourDialog1 = new wxColourDialog(this);
BoxSizer1->Fit(this);
BoxSizer1->SetSizeHints(this);
Connect(ID_TREECTRL1,wxEVT_COMMAND_TREE_SEL_CHANGED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnTreeCtrl1SelectionChanged);
// Set the bitmaps for bAddItem.
bAddItem->SetBitmapLabel(ImageList1->GetBitmap(0));
Connect(ID_IMAGEBUTTON1,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbAddItemClick);
// Set the bitmaps for bAddSubItem.
bAddSubItem->SetBitmapLabel(ImageList1->GetBitmap(1));
Connect(ID_IMAGEBUTTON2,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbAddSubItemClick);
// Set the bitmaps for bDelItem.
bDelItem->SetBitmapLabel(ImageList1->GetBitmap(2));
Connect(ID_IMAGEBUTTON3,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbDelItemClick);
// Set the bitmaps for bDelAllItems.
bDelAllItems->SetBitmapLabel(ImageList1->GetBitmap(3));
Connect(ID_IMAGEBUTTON4,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbDelAllItemsClick);
Connect(ID_BUTTON3,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbItemColorClick);
Connect(ID_CHECKBOX1,wxEVT_COMMAND_CHECKBOX_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OncxItemBoldClick);
// Set the bitmaps for bEditItem.
bEditItem->SetBitmapLabel(ImageList1->GetBitmap(4));
Connect(ID_IMAGEBUTTON5,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbEditItemClick);
Connect(ID_COMBOBOX1,wxEVT_COMMAND_COMBOBOX_SELECTED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OncbNormalSelect);
Connect(ID_COMBOBOX2,wxEVT_COMMAND_COMBOBOX_SELECTED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OncbSelectedSelect);
Connect(ID_COMBOBOX3,wxEVT_COMMAND_COMBOBOX_SELECTED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OncbExpandedSelect);
Connect(ID_COMBOBOX4,wxEVT_COMMAND_COMBOBOX_SELECTED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OncbSelExpandedSelect);
Connect(ID_BUTTON1,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbOKClick);
Connect(ID_BUTTON2,wxEVT_COMMAND_BUTTON_CLICKED,(wxObjectEventFunction)&wxsImageTreeEditorDlg::OnbCancelClick);
//*)
}
wxsImageTreeEditorDlg::~wxsImageTreeEditorDlg()
{
//(*Destroy(wxsImageTreeEditorDlg)
//*)
}
/*! \brief Run the dialogue.
*
* \param aItems wxArrayString&
* \return bool
*
*/
bool wxsImageTreeEditorDlg::Execute(wxArrayString &aItems)
{
int i, n;
int jv, j1, j2, j3, j4;
wxColor jc;
bool jb;
wxString jt;
wxTreeItemId jp[32];
wxString ss, tt;
wxTreeItemId root;
wxTreeItemId item;
wxBitmap bmp;
wxsImageList *ilist;
// get name of combo-box and image-list
n = aItems.GetCount();
m_sTreeName = _("<unknown>");
m_sImageName = _("<none>");
if(n >= 1){
m_sTreeName = aItems.Item(0);
}
if(n >= 2){
m_sImageName = aItems.Item(1);
}
// show the names
ss = _("Tree Control: ") + m_sTreeName;
StaticBoxSizer1->GetStaticBox()->SetLabel(ss);
ss = m_sImageName;
StaticText13->SetLabel(ss);
// clear old junk
Tree1->DeleteAllItems();
// a valid image-list given?
m_imageList.RemoveAll();
ilist = (wxsImageList *) wxsImageListEditorDlg::FindTool(NULL, m_sImageName);
if(ilist != NULL){
ilist->GetImageList(m_imageList);
}
SetImageList(m_imageList);
// add all the new items
n = aItems.GetCount();
for(i = 2; i < n; i++){
ss = aItems.Item(i);
ParseTreeItem(ss, jv, jc, jb, j1, j2, j3, j4, jt);
if(jv == 0){
item = Tree1->AddRoot(jt);
}
else{
item = Tree1->AppendItem(jp[jv-1], jt);
}
jp[jv] = item;
if(jc.IsOk()){
Tree1->SetItemTextColour(item, jc);
}
Tree1->SetItemBold(item, jb);
Tree1->SetItemImage(item, j1, wxTreeItemIcon_Normal);
Tree1->SetItemImage(item, j2, wxTreeItemIcon_Selected);
Tree1->SetItemImage(item, j3, wxTreeItemIcon_Expanded);
Tree1->SetItemImage(item, j4, wxTreeItemIcon_SelectedExpanded);
}
Tree1->ExpandAll();
// show the dialog and wait for a response
n = ShowModal();
// save all new stuff?
if(n == wxOK){
// must save combo-box name and image-list name
aItems.Clear();
aItems.Add(m_sTreeName);
aItems.Add(m_sImageName);
// save the root item and all it's children
// this effectively saves every item in the tree
// I wanted to use a simple loop here, but it works MUCH easier with a recursive function
root = Tree1->GetRootItem();
if(root.IsOk()){
EncodeTreeItems(root, 0, aItems);
}
}
// done
return (n == wxOK);
}
/*! \brief Set the image list.
*
* \param inImageList wxImageList&
* \return void
*
*/
void wxsImageTreeEditorDlg::SetImageList(wxImageList &inImageList)
{
int i, n;
wxString ss, tt;
wxBitmap bmp;
// save the image list in the tree control
Tree1->SetImageList(&inImageList);
// valid list given?
n = inImageList.GetImageCount();
if(n <= 0){
cbNormal->Enable(false);
cbSelected->Enable(false);
cbExpanded->Enable(false);
cbSelExpanded->Enable(false);
}
else {
cbNormal->Enable(true);
cbSelected->Enable(true);
cbExpanded->Enable(true);
cbSelExpanded->Enable(true);
}
// set images in the drop-down lists
cbNormal->Clear();
cbSelected->Clear();
cbExpanded->Clear();
cbSelExpanded->Clear();
ss = _("<none>");
cbNormal->Append(ss);
cbSelected->Append(ss);
cbExpanded->Append(ss);
cbSelExpanded->Append(ss);
for(i = 0; i < n; i++){
ss.Printf(wxT("%d"), i);
bmp = inImageList.GetBitmap(i);
cbNormal->Append(ss, bmp);
cbSelected->Append(ss, bmp);
cbExpanded->Append(ss, bmp);
cbSelExpanded->Append(ss, bmp);
}
// default selections
cbNormal->SetSelection(0);
cbSelected->SetSelection(0);
cbExpanded->SetSelection(0);
cbSelExpanded->SetSelection(0);
}
/*! \brief Add a new item as a sibling of the current item.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbAddItemClick(wxCommandEvent &event)
{
int n;
wxTreeItemId current;
// how many items?
n = Tree1->GetCount();
// and current selection
current = Tree1->GetSelection();
// add a root item?
if(n <= 0){
current.Unset();
AddItem(current);
}
// no current item?
else if(! current.IsOk()){
current = Tree1->GetRootItem();
AddItem(current);
}
// else a sibling
else {
current = Tree1->GetItemParent(current);
AddItem(current);
}
}
/*! \brief Add a new item as a child of the current item.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbAddSubItemClick(wxCommandEvent &event)
{
int n;
wxTreeItemId current;
// how many items?
n = Tree1->GetCount();
// and current selection
current = Tree1->GetSelection();
// add a root item?
if(n <= 0){
current.Unset();
AddItem(current);
}
// no current item?
else if(! current.IsOk()){
current = Tree1->GetRootItem();
AddItem(current);
}
// else a child
else {
AddItem(current);
}
// make sure it is expanded
Tree1->Expand(current);
}
/*! \brief Add a new item to the tree.
*
* \param inParent wxTreeItemId&
* \return void
*
*/
void wxsImageTreeEditorDlg::AddItem(wxTreeItemId &inParent){
int n;
wxString ss, tt;
wxTreeItemId parent, current;
wxColour cc;
bool b;
// how many items?
n = Tree1->GetCount();
// add a root item?
if(n <= 0){
ss = _("root");
current = Tree1->AddRoot(ss);
}
// bad parent?
else if(! inParent.IsOk()){
ss.Printf(_("item %d"), n);
parent = Tree1->GetRootItem();
current = Tree1->AppendItem(parent, ss);
}
// else a child of whatever
else {
ss.Printf(_("item %d"), n);
current = Tree1->AppendItem(inParent, ss);
}
// if it failed, skip the rest of this
if(! current.IsOk()){
return;
}
// set text colour
cc = bItemColor->GetForegroundColour();
Tree1->SetItemTextColour(current, cc);
// bold or plain
b = cxItemBold->GetValue();
Tree1->SetItemBold(current, b);
// the images
n = cbNormal->GetSelection() - 1;
if(n >= 0){
Tree1->SetItemImage(current, n, wxTreeItemIcon_Normal);
}
n = cbSelected->GetSelection() - 1;
if(n >= 0){
Tree1->SetItemImage(current, n, wxTreeItemIcon_Selected);
}
n = cbExpanded->GetSelection() - 1;
if(n >= 0){
Tree1->SetItemImage(current, n, wxTreeItemIcon_Expanded);
}
n = cbSelExpanded->GetSelection() - 1;
if(n >= 0){
Tree1->SetItemImage(current, n, wxTreeItemIcon_SelectedExpanded);
}
// redraw the whole thing
Tree1->Refresh();
}
/*! \brief Delete a tree item.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbDelItemClick(wxCommandEvent &event)
{
wxTreeItemId current;
// current selection
current = Tree1->GetSelection();
// delete it
if(current.IsOk()){
Tree1->Delete(current);
}
}
/*! \brief Delete all tree items.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbDelAllItemsClick(wxCommandEvent &event)
{
int n;
wxString ss;
n = wxMessageBox(_("Delete ALL Items In Tree?"), _("Clear"), wxYES_NO);
if(n == wxYES){
Tree1->DeleteAllItems();
}
}
/*! \brief Select the item's colour.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbItemColorClick(wxCommandEvent &event)
{
int n;
wxColourData cd;
wxColour cc;
wxTreeItemId current;
// ask user for a new color
n = ColourDialog1->ShowModal();
if(n != wxID_OK){
return;
}
// get the color
cd = ColourDialog1->GetColourData();
cc = cd.GetColour();
// set the button text
bItemColor->SetForegroundColour(cc);
// and the current item
current = Tree1->GetSelection();
if(current.IsOk()){
Tree1->SetItemTextColour(current, cc);
}
}
/*! \brief Make the item text bold.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OncxItemBoldClick(wxCommandEvent &event)
{
bool b;
wxTreeItemId current;
// get checkbox value
b = cxItemBold->GetValue();
// and set the current item
current = Tree1->GetSelection();
if(current.IsOk()){
Tree1->SetItemBold(current, b);
}
}
/*! \brief Edit an item.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbEditItemClick(wxCommandEvent &event)
{
wxTreeItemId current;
// current selection
current = Tree1->GetSelection();
// delete it
if(current.IsOk()){
Tree1->EditLabel(current);
}
}
/*! \brief Select the normal state image.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OncbNormalSelect(wxCommandEvent &event)
{
int n;
wxTreeItemId current;
n = cbNormal->GetSelection();
n -= 1;
current = Tree1->GetSelection();
if(current.IsOk()){
Tree1->SetItemImage(current, n, wxTreeItemIcon_Normal);
}
}
/*! \brief Select the selected state image.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OncbSelectedSelect(wxCommandEvent &event)
{
int n;
wxTreeItemId current;
n = cbSelected->GetSelection();
n -= 1;
current = Tree1->GetSelection();
if(current.IsOk()){
Tree1->SetItemImage(current, n, wxTreeItemIcon_Selected);
}
}
/*! \brief Select the expanded state image.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OncbExpandedSelect(wxCommandEvent &event)
{
int n;
wxTreeItemId current;
n = cbExpanded->GetSelection();
n -= 1;
current = Tree1->GetSelection();
if(current.IsOk()){
Tree1->SetItemImage(current, n, wxTreeItemIcon_Expanded);
}
}
/*! \brief Select the selected and expanded state image.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OncbSelExpandedSelect(wxCommandEvent &event)
{
int n;
wxTreeItemId current;
n = cbSelExpanded->GetSelection();
n -= 1;
current = Tree1->GetSelection();
if(current.IsOk()){
Tree1->SetItemImage(current, n, wxTreeItemIcon_SelectedExpanded);
}
}
/*! \brief The tree item selection was changed.
*
* \param event wxTreeEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnTreeCtrl1SelectionChanged(wxTreeEvent &event)
{
int n;
wxTreeItemId current;
wxColour cc;
bool b;
// get current item
current = Tree1->GetSelection();
if(! current.IsOk()){
return;
}
// current text colour
cc = Tree1->GetItemTextColour(current);
bItemColor->SetForegroundColour(cc);
// bold or plain
b = Tree1->IsBold(current);
cxItemBold->SetValue(b);
// image indices
n = Tree1->GetItemImage(current, wxTreeItemIcon_Normal);
n += 1;
cbNormal->SetSelection(n);
n = Tree1->GetItemImage(current, wxTreeItemIcon_Selected);
n += 1;
cbSelected->SetSelection(n);
n = Tree1->GetItemImage(current, wxTreeItemIcon_Expanded);
n += 1;
cbExpanded->SetSelection(n);
n = Tree1->GetItemImage(current, wxTreeItemIcon_SelectedExpanded);
n += 1;
cbSelExpanded->SetSelection(n);
}
/*! \brief Parse tree item text.
*
* \param aSource wxString
* \param outLevel int&
* \param outColour wxColour&
* \param outBold bool&
* \param outImage1 int&
* \param outImage2 int&
* \param outImage3 int&
* \param outImage4 int&
* \param outText wxString&
* \return void
*
*/
void wxsImageTreeEditorDlg::ParseTreeItem(wxString aSource, int &outLevel, wxColour &outColour, bool &outBold, int &outImage1, int &outImage2, int &outImage3, int &outImage4, wxString &outText)
{
int i, n;
long ll;
wxString ss, tt;
// working copy
ss = aSource;
// the depth level
outLevel = 1;
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
if(tt.ToLong(&ll)) outLevel = ll;
}
// the color
outColour.Set(wxT("?"));
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
outColour.Set(tt);
}
// bold or normal text
n = 0;
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
if(tt.ToLong(&ll)){
n = ll;
}
}
outBold = (n != 0);
// 4 image indices
outImage1 = -1;
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
if(tt.ToLong(&ll)){
outImage1 = ll;
}
}
outImage2 = -1;
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
if(tt.ToLong(&ll)){
outImage2 = ll;
}
}
outImage3 = -1;
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
if(tt.ToLong(&ll)){
outImage3 = ll;
}
}
outImage4 = -1;
i = ss.Find(wxT(","));
if(i != wxNOT_FOUND){
tt = ss.Left(i);
ss.erase(0, i + 1);
if(tt.ToLong(&ll)){
outImage4 = ll;
}
}
// everything else is the text
ss.Trim(true);
ss.Trim(false);
outText = ss;
}
/*! \brief Encode tree item text.
*
* \param inParent wxTreeItemId
* \param inLevel int
* \param outList wxArrayString&
* \return void
*
*/
void wxsImageTreeEditorDlg::EncodeTreeItems(wxTreeItemId inParent, int inLevel, wxArrayString &outList)
{
int n;
wxColour cc;
wxString ss, tt;
wxTreeItemId child;
wxTreeItemIdValue cookie;
// nothing yet
ss = wxEmptyString;
// start with this item
tt.Printf(wxT("%d,"), inLevel);
ss += tt;
cc = Tree1->GetItemTextColour(inParent);
tt = cc.GetAsString(wxC2S_HTML_SYNTAX);
tt += wxT(",");
ss += tt;
if(Tree1->IsBold(inParent)){
tt = wxT("1,");
}
else{
tt = wxT("0,");
}
ss += tt;
n = Tree1->GetItemImage(inParent, wxTreeItemIcon_Normal);
tt.Printf(wxT("%d,"), n);
ss += tt;
n = Tree1->GetItemImage(inParent, wxTreeItemIcon_Selected);
tt.Printf(wxT("%d,"), n);
ss += tt;
n = Tree1->GetItemImage(inParent, wxTreeItemIcon_Expanded);
tt.Printf(wxT("%d,"), n);
ss += tt;
n = Tree1->GetItemImage(inParent, wxTreeItemIcon_SelectedExpanded);
tt.Printf(wxT("%d,"), n);
ss += tt;
tt = Tree1->GetItemText(inParent);
ss += tt;
// save it
outList.Add(ss);
// and all the children
child = Tree1->GetFirstChild(inParent, cookie);
while(child.IsOk()){
EncodeTreeItems(child, inLevel + 1, outList);
child = Tree1->GetNextChild(inParent, cookie);
}
}
/*! \brief The OK button was clicked.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbOKClick(wxCommandEvent &event)
{
EndModal(wxOK);
}
/*! \brief The Cancel button was clicked.
*
* \param event wxCommandEvent&
* \return void
*
*/
void wxsImageTreeEditorDlg::OnbCancelClick(wxCommandEvent &event)
{
EndModal(wxCANCEL);
}<|fim▁end|>
|
const long wxsImageTreeEditorDlg::ID_STATICTEXT9 = wxNewId();
const long wxsImageTreeEditorDlg::ID_COMBOBOX4 = wxNewId();
const long wxsImageTreeEditorDlg::ID_STATICTEXT10 = wxNewId();
|
<|file_name|>collada.rs<|end_file_name|><|fim▁begin|>/// Implements the logic behind converting COLLADA documents to polygon-rs meshes.
extern crate parse_collada as collada;
use math::*;
use polygon::geometry::mesh::*;
pub use self::collada::{
AnyUri,
ArrayElement,
Collada,
GeometricElement,
Geometry,
Node,
PrimitiveElements,
UriFragment,
VisualScene
};
#[derive(Debug)]
pub enum Error {
/// Indicates an error that occurred when the MeshBuilder was validating the mesh data. If the
/// COLLADA document passed parsing this should not occur.
BuildMeshError(BuildMeshError),
IncorrectPrimitiveIndicesCount {
primitive_count: usize,
stride: usize,
index_count: usize,
},
/// Indicates an error in loading or parsing the original collada document (i.e. the error
/// ocurred within the parse-collada library).
ParseColladaError(collada::Error),
/// Indicates that there was an input with the "NORMAL" semantic but the associated source
/// was missing.
MissingNormalSource,
/// Indicates that an <input> element specified a <source> element that was missing.
MissingSourceData,
/// Indicates that the <source> element with the "POSITION" semantic was missing an
/// array element.
MissingPositionData,
/// Indicates that the <source> element with the "NORMAL" semantic was missing an array element.
MissingNormalData,
/// Indicates that a <vertices> element had and <input> element with no "POSITION" semantic.
///
/// NOTE: This error means that the COLLADA document is ill-formed and should have failed
/// parsing. This indicates that there is a bug in the parse-collada library that should be
/// fixed.
MissingPositionSemantic,
/// Indicates that the <mesh> had no primitive elements.
MissingPrimitiveElement,
/// Indicates that one of the primitive elements (e.g. <trianges> et al) were missing a <p>
/// child element. While this is technically allowed by the standard I'm not really sure what
/// to do with that? Like how do you define a mesh without indices?
MissingPrimitiveIndices,
/// Indicates that a uri referenced an asset outside the document.
NonLocalUri(String),
UnsupportedGeometricElement,
UnsupportedPrimitiveType,
/// Indicates that a <source> element's array element was of a type other than <float_array>.
UnsupportedSourceData,
}
impl From<collada::Error> for Error {
fn from(from: collada::Error) -> Error {
Error::ParseColladaError(from)
}
}
pub type Result<T> = ::std::result::Result<T, Error>;
pub enum VertexSemantic {
Position,
Normal,
TexCoord,
}
/// Loads all resources from a COLLADA document and adds them to the resource manager.
pub fn load_resources<T: Into<String>>(source: T) -> Result<Mesh> {
let collada_data = Collada::parse(source)?;
// Load all meshes from the document and add them to the resource manager.
if let Some(library_geometries) = collada_data.library_geometries {
for geometry in library_geometries.geometry {
// // Retrieve the id for the geometry.
// // TODO: Generate an id for the geometry if it doesn't already have one.
// let id = match geometry.id {
// None => {
// println!("WARNING: COLLADA file contained a <geometry> element with no \"id\" attribute");
// println!("WARNING: This is unsupported because there is no way to reference that geometry to instantiate it");
// continue;
// },
// Some(id) => id,
// };
let mesh = match geometry.geometric_element {
GeometricElement::Mesh(ref mesh) => try!(collada_mesh_to_mesh(mesh)),
_ => return Err(Error::UnsupportedGeometricElement),
};
// TODO: Actually finish parsing all the other data from the file.
return Ok(mesh);
}
}<|fim▁hole|>
fn collada_mesh_to_mesh(mesh: &collada::Mesh) -> Result<Mesh> {
if mesh.primitive_elements.len() > 1 {
println!("WARNING: Mesh is composed of more than one geometric primitive, which is not currently supported, only part of the mesh will be loaded");
}
// Grab the first primitive element in the mesh.
// TODO: Handle all primitive elements in the mesh, not just one. This is dependent on polygon
// being able to support submeshes.
let primitive = try!(
mesh.primitive_elements.first()
.ok_or(Error::MissingPrimitiveElement));
let triangles = match *primitive {
PrimitiveElements::Triangles(ref triangles) => triangles,
_ => return Err(Error::UnsupportedPrimitiveType),
};
let primitive_indices =
triangles.p
.as_ref()
.ok_or(Error::MissingPrimitiveIndices)?;
// Iterate over the indices, rearranging the normal data to match the position data.
let stride = triangles.input.len(); // TODO: Do we have a better way of calculating stride? What if one of the sources isn't used? OR USED TWICE!?
let count = triangles.count;
let index_count = primitive_indices.len();
let vertex_count = count as u32 * 3;
// Verify we have the right number of indices to build the vertices.
if count * stride * 3 != index_count {
return Err(Error::IncorrectPrimitiveIndicesCount {
primitive_count: count,
stride: stride,
index_count: index_count,
});
}
// The indices list is a just a raw list of indices. They are implicity grouped based on the
// number of inputs for the primitive element (e.g. if there are 3 inputs for the primitive
// then there are 3 indices per vertex). To handle this we use GroupBy to do a strided
// iteration over indices list and build each vertex one at a time. Internally the mesh
// builder handles the details of how to assemble the vertex data in memory.
// Build a mapping between the vertex indices and the source that they use.
let mut source_map = Vec::new();
for (offset, input) in triangles.input.iter().enumerate() {
// Retrieve the approriate source. If the semantic is "VERTEX" then the offset is
// associated with all of the sources specified by the <vertex> element.
let source_ids = match &*input.semantic {
"VERTEX" => {
mesh.vertices.input
.iter()
.map(|input| (input.semantic.as_ref(), input.source.as_ref()))
.collect()
},
_ => vec![(input.semantic.as_ref(), input.source.as_ref())],
};
// For each of the semantics at the current offset, push their info into the source map.
for (semantic, source_id) in source_ids {
// Retrieve the <source> element for the input.
let source = try!(mesh.source
.iter()
.find(|source| source.id == source_id)
.ok_or(Error::MissingSourceData));
// Retrieve it's array_element, which is technically optional according to the spec but is
// probably going to be there for the position data.
let array_element = try!(
source.array_element
.as_ref()
.ok_or(Error::MissingPositionData));
// Get float data. Raw mesh data should only be float data (the only one that even
// remotely makes sense is int data, and even then that seems unlikely), so emit an
// error if the data is in the wrong format.
let data = match *array_element {
ArrayElement::Float(ref float_array) => float_array.contents.as_ref(),
_ => return Err(Error::UnsupportedSourceData),
};
source_map.push(IndexMapper {
offset: offset,
semantic: semantic,
data: data,
});
}
}
let mut mesh_builder = MeshBuilder::new();
let mut unsupported_semantic_flag = false;
for vertex_indices in GroupBy::new(primitive_indices, stride).unwrap() { // TODO: This can't fail... right? I'm pretty sure the above checks make sure this is correct.
// We iterate over each group of indices where each group represents the indices for a
// single vertex. Within that vertex we need
let mut vertex = Vertex::new(Point::origin());
for (offset, index) in vertex_indices.iter().enumerate() {
for mapper in source_map.iter().filter(|mapper| mapper.offset == offset) {
match mapper.semantic {
"POSITION" => {
vertex.position = Point::new(
// TODO: Don't assume that the position data is encoded as 3 coordinate
// vectors. The <technique_common> element for the source should have
// an <accessor> describing how the data is laid out.
mapper.data[index * 3 + 0],
mapper.data[index * 3 + 1],
mapper.data[index * 3 + 2],
);
},
"NORMAL" => {
vertex.normal = Some(Vector3::new(
mapper.data[index * 3 + 0],
mapper.data[index * 3 + 1],
mapper.data[index * 3 + 2],
));
},
"TEXCOORD" => {
vertex.texcoord.push(Vector2::new(
mapper.data[index * 2 + 0],
mapper.data[index * 2 + 1],
));
},
_ => if !unsupported_semantic_flag {
unsupported_semantic_flag = true;
println!("WARNING: Unsupported vertex semantic {} in mesh will not be used", mapper.semantic);
},
}
}
}
mesh_builder.add_vertex(vertex);
}
let indices: Vec<u32> = (0..vertex_count).collect();
mesh_builder
.set_indices(&*indices)
.build()
.map_err(|err| Error::BuildMeshError(err))
}
struct IndexMapper<'a> {
offset: usize,
semantic: &'a str,
data: &'a [f32],
}
// TODO: Where even should this live? It's generally useful but I'm only using it here right now.
struct GroupBy<'a, T: 'a> {
next: *const T,
end: *const T,
stride: usize,
_phantom: ::std::marker::PhantomData<&'a T>,
}
impl<'a, T: 'a> GroupBy<'a, T> {
fn new(slice: &'a [T], stride: usize) -> ::std::result::Result<GroupBy<'a, T>, ()> {
if slice.len() % stride != 0 {
return Err(());
}
Ok(GroupBy {
next: slice.as_ptr(),
end: unsafe { slice.as_ptr().offset(slice.len() as isize) },
stride: stride,
_phantom: ::std::marker::PhantomData,
})
}
}
impl<'a, T: 'a> Iterator for GroupBy<'a, T> {
type Item = &'a [T];
fn next(&mut self) -> Option<&'a [T]> {
if self.next == self.end {
return None;
}
let next = self.next;
self.next = unsafe { self.next.offset(self.stride as isize) };
Some(unsafe {
::std::slice::from_raw_parts(next, self.stride)
})
}
}<|fim▁end|>
|
unimplemented!();
}
|
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
@Component({
selector: 'my-app',
template:`
<h1>{{title}}</h1>
<ul class="heroes">
<li *ngFor="let hero of heroes" (click)="onSelect(hero)"">
<span class="badge">{{hero.id}}</span> {{hero.name}}
</li>
</ul>
<div *ngIf="selectedHero"><|fim▁hole|> <input [(ngModel)]="selectedHero.name" placeHolder="name">
</div>
</div>
`,
styles: [`
.selected {
background-color: #CFD8DC !important;
color: white;
}
.heroes {
margin: 0 0 2em 0;
list-style-type: none;
padding: 0;
width: 15em;
}
.heroes li {
cursor: pointer;
position: relative;
left: 0;
background-color: #EEE;
margin: .5em;
padding: .3em 0;
height: 1.6em;
border-radius: 4px;
}
.heroes li.selected:hover {
background-color: #BBD8DC !important;
color: white;
}
.heroes li:hover {
color: #607D8B;
background-color: #DDD;
left: .1em;
}
.heroes .text {
position: relative;
top: -3px;
}
.heroes .badge {
display: inline-block;
font-size: small;
color: white;
padding: 0.8em 0.7em 0 0.7em;
background-color: #607D8B;
line-height: 1em;
position: relative;
left: -1px;
top: -4px;
height: 1.8em;
margin-right: .8em;
border-radius: 4px 0 0 4px;
}
`]
})
export class AppComponent {
title = 'Tour of Heroes';
heroes = HEROES;
selectedHero : Hero;
onSelect(hero: Hero): void {
this.selectedHero = hero;
}
}
export class Hero {
id : number;
name : string;
}
const HEROES: Hero[] = [
{ id: 11, name: 'Mr. Nice' },
{ id: 12, name: 'Narco' },
{ id: 13, name: 'Bombasto' },
{ id: 14, name: 'Celeritas' },
{ id: 15, name: 'Magneta' },
{ id: 16, name: 'RubberMan' },
{ id: 17, name: 'Dynama' },
{ id: 18, name: 'Dr IQ' },
{ id: 19, name: 'Magma' },
{ id: 20, name: 'Tornado' }
];<|fim▁end|>
|
<h2>{{selectedHero.name}} details</h2>
<div><label>id: </label>{{selectedHero.id}}</div>
<div>
<label>name: </label>
|
<|file_name|>nxos_vlan.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_vlan
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Manages VLAN resources and attributes.
description:
- Manages VLAN configurations on NX-OS switches.
author: Jason Edelman (@jedelman8)
options:
vlan_id:
description:
- Single VLAN ID.
required: false
default: null
vlan_range:
description:
- Range of VLANs such as 2-10 or 2,5,10-15, etc.
required: false
default: null
name:
description:
- Name of VLAN.
required: false
default: null
vlan_state:
description:
- Manage the vlan operational state of the VLAN
(equivalent to state {active | suspend} command.
required: false
default: active
choices: ['active','suspend']
admin_state:
description:
- Manage the VLAN administrative state of the VLAN equivalent
to shut/no shut in VLAN config mode.
required: false
default: up
choices: ['up','down']
mapped_vni:
description:
- The Virtual Network Identifier (VNI) ID that is mapped to the
VLAN. Valid values are integer and keyword 'default'.
required: false
default: null
version_added: "2.2"
state:
description:
- Manage the state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: Ensure a range of VLANs are not present on the switch
nxos_vlan:
vlan_range: "2-10,20,50,55-60,100-150"
host: 68.170.147.165
username: cisco
password: cisco
state: absent
transport: nxapi
- name: Ensure VLAN 50 exists with the name WEB and is in the shutdown state
nxos_vlan:
vlan_id: 50
host: 68.170.147.165
admin_state: down
name: WEB
transport: nxapi
username: cisco
password: cisco
- name: Ensure VLAN is NOT on the device
nxos_vlan:
vlan_id: 50
host: 68.170.147.165
state: absent
transport: nxapi
username: cisco
password: cisco
'''
RETURN = '''
proposed_vlans_list:
description: list of VLANs being proposed
returned: when debug enabled
type: list
sample: ["100"]
existing_vlans_list:
description: list of existing VLANs on the switch prior to making changes
returned: when debug enabled
type: list
sample: ["1", "2", "3", "4", "5", "20"]
end_state_vlans_list:
description: list of VLANs after the module is executed
returned: when debug enabled
type: list
sample: ["1", "2", "3", "4", "5", "20", "100"]
proposed:
description: k/v pairs of parameters passed into module (does not include
vlan_id or vlan_range)
returned: when debug enabled
type: dict or null
sample: {"admin_state": "down", "name": "app_vlan",
"vlan_state": "suspend", "mapped_vni": "5000"}
existing:
description: k/v pairs of existing vlan or null when using vlan_range
returned: when debug enabled
type: dict
sample: {"admin_state": "down", "name": "app_vlan",
"vlan_id": "20", "vlan_state": "suspend", "mapped_vni": ""}
end_state:
description: k/v pairs of the VLAN after executing module or null
when using vlan_range
returned: when debug enabled
type: dict or null
sample: {"admin_state": "down", "name": "app_vlan", "vlan_id": "20",
"vlan_state": "suspend", "mapped_vni": "5000"}
updates:
description: command string sent to the device
returned: always
type: list
sample: ["vlan 20", "vlan 55", "vn-segment 5000"]
commands:
description: command string sent to the device
returned: always
type: list
sample: ["vlan 20", "vlan 55", "vn-segment 5000"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.nxos import run_commands, load_config, get_config
from ansible.module_utils.basic import AnsibleModule
def vlan_range_to_list(vlans):
result = []
if vlans:
for part in vlans.split(','):
if part == 'none':
break
if '-' in part:
a, b = part.split('-')
a, b = int(a), int(b)
result.extend(range(a, b + 1))
else:
a = int(part)
result.append(a)
return numerical_sort(result)
return result
def numerical_sort(string_int_list):
"""Sort list of strings (VLAN IDs) that are digits in numerical order.
"""
as_int_list = []
as_str_list = []
for vlan in string_int_list:
as_int_list.append(int(vlan))
as_int_list.sort()
for vlan in as_int_list:
as_str_list.append(str(vlan))
return as_str_list
def build_commands(vlans, state):
commands = []
for vlan in vlans:
if state == 'present':
command = 'vlan {0}'.format(vlan)
commands.append(command)
elif state == 'absent':
command = 'no vlan {0}'.format(vlan)
commands.append(command)
return commands
def get_vlan_config_commands(vlan, vid):
"""Build command list required for VLAN configuration
"""
reverse_value_map = {
"admin_state": {
"down": "shutdown",
"up": "no shutdown"
}
}
if vlan.get('admin_state'):
# apply value map when making change to the admin state
# note: would need to be a loop or more in depth check if
# value map has more than 1 key
vlan = apply_value_map(reverse_value_map, vlan)
VLAN_ARGS = {
'name': 'name {0}',
'vlan_state': 'state {0}',
'admin_state': '{0}',
'mode': 'mode {0}',
'mapped_vni': 'vn-segment {0}'
}
commands = []
for param, value in vlan.items():
if param == 'mapped_vni' and value == 'default':
command = 'no vn-segment'
else:
command = VLAN_ARGS.get(param).format(vlan.get(param))
if command:
commands.append(command)
commands.insert(0, 'vlan ' + vid)
commands.append('exit')
return commands
def get_list_of_vlans(module):
body = run_commands(module, ['show vlan | json'])
vlan_list = []
vlan_table = body[0].get('TABLE_vlanbrief')['ROW_vlanbrief']
if isinstance(vlan_table, list):
for vlan in vlan_table:
vlan_list.append(str(vlan['vlanshowbr-vlanid-utf']))
else:
vlan_list.append('1')
return vlan_list
def get_vni(vlanid, module):
flags = str('all | section vlan.{0}'.format(vlanid)).split(' ')
body = get_config(module, flags=flags)
#command = 'show run all | section vlan.{0}'.format(vlanid)<|fim▁hole|> if body:
REGEX = re.compile(r'(?:vn-segment\s)(?P<value>.*)$', re.M)
if 'vn-segment' in body:
value = REGEX.search(body).group('value')
return value
def get_vlan(vlanid, module):
"""Get instance of VLAN as a dictionary
"""
command = 'show vlan id %s | json' % vlanid
body = run_commands(module, [command])
#command = 'show vlan id ' + vlanid
#body = execute_show_command(command, module)
try:
vlan_table = body[0]['TABLE_vlanbriefid']['ROW_vlanbriefid']
except (TypeError, IndexError):
return {}
key_map = {
"vlanshowbr-vlanid-utf": "vlan_id",
"vlanshowbr-vlanname": "name",
"vlanshowbr-vlanstate": "vlan_state",
"vlanshowbr-shutstate": "admin_state"
}
vlan = apply_key_map(key_map, vlan_table)
value_map = {
"admin_state": {
"shutdown": "down",
"noshutdown": "up"
}
}
vlan = apply_value_map(value_map, vlan)
vlan['mapped_vni'] = get_vni(vlanid, module)
return vlan
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = str(value)
return new_dict
def apply_value_map(value_map, resource):
for key, value in value_map.items():
resource[key] = value[resource.get(key)]
return resource
def main():
argument_spec = dict(
vlan_id=dict(required=False, type='str'),
vlan_range=dict(required=False),
name=dict(required=False),
vlan_state=dict(choices=['active', 'suspend'], required=False),
mapped_vni=dict(required=False, type='str'),
state=dict(choices=['present', 'absent'], default='present',
required=False),
admin_state=dict(choices=['up', 'down'], required=False),
include_defaults=dict(default=False),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[['vlan_range', 'name'],
['vlan_id', 'vlan_range']],
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
warnings = list()
check_args(module, warnings)
vlan_range = module.params['vlan_range']
vlan_id = module.params['vlan_id']
name = module.params['name']
vlan_state = module.params['vlan_state']
admin_state = module.params['admin_state']
mapped_vni = module.params['mapped_vni']
state = module.params['state']
changed = False
if vlan_id:
if not vlan_id.isdigit():
module.fail_json(msg='vlan_id must be a valid VLAN ID')
args = dict(name=name, vlan_state=vlan_state,
admin_state=admin_state, mapped_vni=mapped_vni)
proposed = dict((k, v) for k, v in args.items() if v is not None)
proposed_vlans_list = numerical_sort(vlan_range_to_list(
vlan_id or vlan_range))
existing_vlans_list = numerical_sort(get_list_of_vlans(module))
commands = []
existing = {}
if vlan_range:
if state == 'present':
# These are all of the VLANs being proposed that don't
# already exist on the switch
vlans_delta = list(
set(proposed_vlans_list).difference(existing_vlans_list))
commands = build_commands(vlans_delta, state)
elif state == 'absent':
# VLANs that are common between what is being proposed and
# what is on the switch
vlans_common = list(
set(proposed_vlans_list).intersection(existing_vlans_list))
commands = build_commands(vlans_common, state)
else:
existing = get_vlan(vlan_id, module)
if state == 'absent':
if existing:
commands = ['no vlan ' + vlan_id]
elif state == 'present':
if (existing.get('mapped_vni') == '0' and
proposed.get('mapped_vni') == 'default'):
proposed.pop('mapped_vni')
delta = dict(set(
proposed.items()).difference(existing.items()))
if delta or not existing:
commands = get_vlan_config_commands(delta, vlan_id)
end_state = existing
end_state_vlans_list = existing_vlans_list
if commands:
if existing.get('mapped_vni') and state != 'absent':
if (existing.get('mapped_vni') != proposed.get('mapped_vni') and
existing.get('mapped_vni') != '0' and proposed.get('mapped_vni') != 'default'):
commands.insert(1, 'no vn-segment')
if module.check_mode:
module.exit_json(changed=True,
commands=commands)
else:
load_config(module, commands)
changed = True
end_state_vlans_list = numerical_sort(get_list_of_vlans(module))
if 'configure' in commands:
commands.pop(0)
if vlan_id:
end_state = get_vlan(vlan_id, module)
results = {
'commands': commands,
'updates': commands,
'changed': changed,
'warnings': warnings
}
if module._debug:
results.update({
'proposed_vlans_list': proposed_vlans_list,
'existing_vlans_list': existing_vlans_list,
'proposed': proposed,
'existing': existing,
'end_state': end_state,
'end_state_vlans_list': end_state_vlans_list
})
module.exit_json(**results)
if __name__ == '__main__':
main()<|fim▁end|>
|
#body = execute_show_command(command, module, command_type='cli_show_ascii')[0]
value = ''
|
<|file_name|>MonitorPlatformServerDao.java<|end_file_name|><|fim▁begin|>package com.asura.monitor.platform.dao;
import com.asura.framework.base.paging.PagingResult;
import com.asura.framework.base.paging.SearchMap;
import com.asura.framework.dao.mybatis.base.MybatisDaoContext;
import com.asura.framework.dao.mybatis.paginator.domain.PageBounds;
import com.asura.common.dao.BaseDao;
import com.asura.monitor.platform.entity.MonitorPlatformServerEntity;
import org.springframework.stereotype.Repository;
import javax.annotation.Resource;
/**
* <p></p>
* <p/>
* <PRE>
* <BR>
* <BR>-----------------------------------------------
* <BR>
* </PRE>
*
* @author zhaozq14<|fim▁hole|> */
@Repository("com.asura.monitor.configure.dao.MonitorPlatformServerDao")
public class MonitorPlatformServerDao extends BaseDao<MonitorPlatformServerEntity>{
@Resource(name="monitor.MybatisDaoContext")
private MybatisDaoContext mybatisDaoContext;
/**
*
* @param searchMap
* @param pageBounds
* @return
*/
public PagingResult<MonitorPlatformServerEntity> findAll(SearchMap searchMap, PageBounds pageBounds, String sqlId){
return mybatisDaoContext.findForPage(this.getClass().getName()+"."+sqlId,MonitorPlatformServerEntity.class,searchMap,pageBounds);
}
}<|fim▁end|>
|
* @version 1.0
* @date 2016-11-07 11:35:05
* @since 1.0
|
<|file_name|>keyword-mod-as-identifier.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>
fn main() {
let mod = "foo"; //~ error: ident
}<|fim▁end|>
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using 'src/etc/generate-keyword-tests.py mod'
|
<|file_name|>search.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# vim: sw=4:ts=4:sts=4:fdm=indent:fdl=0:
# -*- coding: UTF8 -*-
#
# A sword KJV indexed search module.
# Copyright (C) 2012 Josiah Gordon <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
copying_str = \
'''
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
'''
warranty_str = \
'''
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
'''
""" KJV indexer and search modules.
BibleSearch: Can index and search the 'KJV' sword module using different types
of searches, including the following:
Strongs number search - Searches for all verses containing either
the phrase strongs phrase, any strongs
number or a superset of the strongs
numbers.
Morphological tags search - Same as the strongs...
Word or phrase search - Same as the strongs...
Regular expression search - Searches the whole Bible using the provided
regular expression.
"""
from sys import argv, exit
from cmd import Cmd
from difflib import get_close_matches
from functools import wraps
from time import strftime
from textwrap import fill
from collections import defaultdict
from itertools import product
import os
import sys
import json
import re
from .utils import *
try:
import bla
from .sword_verses import *
except ImportError:
Sword = None
from .verses import *
COLOR_LEVEL = 3
# Highlight colors.
highlight_color = '\033[7m'
highlight_text = '%s\\1\033[m' % highlight_color
word_regx = re.compile(r'\b([\w-]+)\b')
# Strip previous color.
strip_color_regx = re.compile('\033\[[\d;]*m')
def render_raw2(verse_text, strongs=False, morph=False):
""" Render raw verse text.
"""
strong_regx = re.compile(r'strong:([GH]\d+)', re.I)
morph_regx = re.compile(r'(?:Morph|robinson):([\w-]*)', re.I)
test_regx = re.compile(r'''
([^<]*)
<(?P<tag>seg|q|w|transChange|note)([^>]*)>
([\w\W]*?)
</(?P=tag)>
([^<]*)
''', re.I | re.X)
divname_regx = re.compile(r'''
<(?:divineName)>
([^<]*?)
([\'s]*)
</(?:divineName)>
''', re.I | re.X)
div_upper = lambda m: m.group(1).upper() + m.group(2)
marker_regx = re.compile(r'.*marker="(.)".*', re.I)
info_print(verse_text, tag=4)
def recurse_tag(text):
""" Recursively parse raw verse text using regular expressions, and
returns the correctly formatted text.
"""
v_text = ''
for match in test_regx.finditer(text):
opt, tag_name, tag_attr, tag_text, punct = match.groups()
strongs_str = ''
morph_str = ''
italic_str = '<i>%s</i>' if 'added' in tag_attr.lower() else '%s'
if 'note' in tag_name.lower() or 'study' in tag_attr.lower():
note_str = ' <n>%s</n>'
else:
note_str = '%s'
if strongs and strong_regx.search(tag_attr):
strongs_list = strong_regx.findall(tag_attr)
strongs_str = ' <%s>' % '> <'.join(strongs_list)
if morph and morph_regx.search(tag_attr):
morph_list = morph_regx.findall(tag_attr)
morph_str = ' {%s}' % '} {'.join(morph_list)
if match.re.search(tag_text):
temp_text = recurse_tag(tag_text) + strongs_str + morph_str
v_text += note_str % italic_str % (temp_text)
else:
info_print((opt, tag_name, tag_attr, tag_text, punct), tag=4)
opt = marker_regx.sub('<p>\\1</p> ', opt)
tag_text = divname_regx.sub(div_upper, tag_text)
tag_text = note_str % italic_str % tag_text
v_text += opt + tag_text + strongs_str + morph_str
v_text += punct
return v_text
return recurse_tag(verse_text)
def render_raw(verse_text, strongs=False, morph=False):
""" Render raw verse text.
"""
strong_regx = re.compile(r'strong:([GH]\d+)', re.I)
morph_regx = re.compile(r'(?:Morph|robinson):([\w-]*)', re.I)
test_regx = re.compile(r'''
([^<]*)
<(?P<tag>q|w|transChange|note)([^>]*)>
([\w\W]*?)
</(?P=tag)>
([^<]*)
''', re.I | re.X)
divname_regx = re.compile(r'''
(?:<seg>)?
<(?:divineName)>+
([^<]*?)
([\'s]*)
</(?:divineName)>
(?:</seg>)?
''', re.I | re.X)
xadded_regx = re.compile(r'<seg subType="x-added"[^>]*>([^<]*)</seg>',
re.I)
div_upper = lambda m: m.group(1).upper() + m.group(2)
marker_regx = re.compile(r'.*marker="(.)".*', re.I)
v_text = ''
info_print(verse_text, tag=4)
for match in test_regx.finditer(verse_text):
opt, tag_name, tag_attr, tag_text, punct = match.groups()
italic_str = '%s'
if match.re.search(tag_text):
if 'added' in tag_attr.lower():
italic_str = '<i>%s</i>' + punct
punct = ''
match_list = match.re.findall(tag_text + punct)
else:
match_list = [match.groups()]
temp_text = ''
for opt, tag_name, tag_attr, tag_text, punct in match_list:
info_print((opt, tag_name, tag_attr, tag_text, punct), tag=4)
tag_text = divname_regx.sub(div_upper, tag_text)
tag_text = xadded_regx.sub('<i>\\1</i>', tag_text)
if 'marker' in opt.lower():
temp_text += '<p>%s</p> ' % marker_regx.sub('\\1', opt)
opt = ''
if 'note' in tag_name.lower() or 'study' in tag_attr.lower():
temp_text += ' <n>%s</n>' % tag_text
tag_text = ''
temp_italic = '<i>%s</i>' if 'added' in tag_attr.lower() else '%s'
temp_text += temp_italic % (opt + tag_text)
if tag_name.strip().lower() in ['transchange', 'w', 'seg']:
if strong_regx.search(tag_attr) and strongs:
temp_text += \
' <%s>' % '> <'.join(strong_regx.findall(tag_attr))
if morph_regx.search(tag_attr) and morph:
temp_text += \
' {%s}' % '} {'.join(morph_regx.findall(tag_attr))
temp_text += punct
v_text += italic_str % temp_text
continue
opt, tag_name, tag_attr, tag_text, punct = match.groups()
tag_text = divname_regx.sub(
lambda m: m.group(1).upper() + m.group(2), tag_text)
if 'marker' in opt.lower():
v_text += '<p>%s</p> ' % marker_regx.sub('\\1', opt)
if 'added' in tag_attr.lower():
v_text += '<i>'
elif 'note' in tag_name.lower() or 'study' in tag_attr.lower():
v_text += ' <n>%s</n>' % tag_text
if match.re.search(tag_text):
for i in match.re.finditer(tag_text):
info_print(i.groups(), tag=4)
o, t_n, t_a, t_t, p = i.groups()
if t_n.strip().lower() in ['transchange', 'w']:
v_text += o + t_t
if strong_regx.search(t_a) and strongs:
v_text += \
' <%s>' % '> <'.join(strong_regx.findall(t_a))
if morph_regx.search(t_a) and morph:
v_text += \
' {%s}' % '} {'.join(morph_regx.findall(t_a))
v_text += p
else:
if tag_name.strip().lower() in ['transchange', 'w']:
v_text += tag_text
if strong_regx.search(tag_attr) and strongs:
v_text += \
' <%s>' % '> <'.join(strong_regx.findall(tag_attr))
if morph_regx.search(tag_attr) and morph:
v_text += \
' {%s}' % '} {'.join(morph_regx.findall(tag_attr))
if 'added' in tag_attr.lower():
v_text += '</i>'
v_text += punct
info_print('%s: %s: %s: %s: %s' % (opt, tag_name, tag_attr,
tag_text, punct), tag=4)
return v_text
def render_verses_with_italics(ref_list, wrap=True, strongs=False,
morph=False, added=True, notes=False,
highlight_func=None, module='KJV', *args):
""" Renders a the verse text at verse_ref with italics highlighted.
Returns a strong "verse_ref: verse_text"
ref_list - List of references to render
wrap - Whether to wrap the text.
strongs - Include Strong's Numbers in the output.
morph - Include Morphological Tags in the output.
added - Include added text (i.e. italics) in the output.
notes - Include study notes at the end of the text.
highlight_func - A function to highlight anything else
(i.e. search terms.)
module - Sword module to render from.
*args - Any additional arguments to pass to
hightlight_func
highlight_func should take at least three arguments, verse_text,
strongs, and morph.
"""
# Set the colors of different items.
end_color = '\033[m'
# Build replacement strings that highlight Strong's Numbers and
# Morphological Tags.
if COLOR_LEVEL >= 2:
# The Strong's and Morphology matching regular expressions.
# Match strongs numbers.
strongs_regx = re.compile(r'''
<((?:\033\[[\d;]*m)*?[GH]?\d+?(?:\033\[[\d;]*m)*?)>
''', re.I | re.X)
# It needs to match with braces or it will catch all capitalized
# word and words with '-'s in them.
info_print("Rendering results, please wait...\n", tag=0)
morph_regx = re.compile(r'''
\{((?:\033\[[\d+;]*m)*?[\w-]*?(?:\033\[[\d+;]*m)*?)\}
''', re.X)
strongs_color = '\033[36m'
morph_color = '\033[35m'
strongs_highlight = '<%s\\1%s>' % (strongs_color, end_color)
morph_highlight = '{%s\\1%s}' % (morph_color, end_color)
if COLOR_LEVEL >= 0:
ref_color = '\033[32m'
ref_highlight = '%s\\1%s' % (ref_color, end_color)
if COLOR_LEVEL >= 1 and added:
italic_color = '\033[4m'
italic_regx = re.compile(r'<i>\s?(.*?)\s?</i>', re.S)
italic_highlight = '%s\\1%s' % (italic_color, end_color)
# Get the local text encoding.
encoding = get_encoding()
# A substitution replacement function for highlighting italics.
def italic_color(match):
""" Color italic text, but first remove any previous color.
"""
# Strip any previous colors.
match_text = strip_color_regx.sub('', match.groups()[0])
# Color the italics.
return word_regx.sub(italic_highlight, match_text)
# Get an iterator over all the requested verses.
verse_iter = IndexedVerseTextIter(iter(ref_list), strongs, morph,
italic_markers=(COLOR_LEVEL >= 1),
added=added, paragraph=added,
notes=notes, module=module)
if VERBOSE_LEVEL == 20:
verse_iter = VerseTextIter(iter(ref_list), strongs, morph,
module=module, markup=1, #Sword.FMT_PLAIN,
render='render_raw')
if VERBOSE_LEVEL >= 30:
verse_iter = RawDict(iter(ref_list), module=module)
for verse_ref, verse_text in verse_iter:
if VERBOSE_LEVEL >= 30:
len_longest_key = len(max(verse_text[1].keys(), key=len))
for key, value in verse_text[1].items():
print('\033[33m{0:{1}}\033[m: {2}'.format(key,
len_longest_key,
value))
verse_text = verse_text[1]['_verse_text'][0]
# Encode than decode the verse text to make it compatable with
# the locale.
verse_text = verse_text.strip().encode(encoding, 'replace')
verse_text = verse_text.decode(encoding, 'replace')
verse_text = '%s: %s' % (verse_ref, verse_text)
# The text has to be word wrapped before adding any color, or else the
# color will add to the line length and the line will wrap too soon.
if wrap:
verse_text = fill(verse_text, screen_size()[1],
break_on_hyphens=False)
if COLOR_LEVEL >= 0:
# Color the verse reference.
colored_ref = word_regx.sub(ref_highlight, verse_ref)
verse_text = re.sub(verse_ref, colored_ref, verse_text)
if COLOR_LEVEL >= 1 and added:
# Highlight the italic text we previously pulled out.
verse_text = italic_regx.sub(italic_color, verse_text)
if COLOR_LEVEL >= 2:
# Highlight Strong's and Morphology if they are visible.
if strongs:
verse_text = strongs_regx.sub(strongs_highlight, verse_text)
if morph:
verse_text = morph_regx.sub(morph_highlight, verse_text)
if COLOR_LEVEL >= 3:
# Highlight the different elements.
if highlight_func:
verse_text = highlight_func(verse_text, *args)
# Finally produce the formated text.
yield verse_text
def highlight_search_terms(verse_text, regx_list, highlight_text,
color_tag='\033\[[\d+;]*m', *args):
""" Highlight search terms in the verse text.
"""
def highlight_group(match):
""" Highlight each word/Strong's Number/Morphological Tag in the
match.
"""
match_text = match.group()
for word in set(match.groups()):
if word: # and word != match_text:
# if word.lower() == 'strong' and word == match_text:
# continue
info_print(word, tag=20)
try:
match_text = re.sub('''
(
(?:{0}|\\b)+
{1}
(?:{0}|\\b)+
)
'''.format(color_tag, re.escape(word)),
highlight_text, match_text, flags=re.X)
except Exception as err:
info_print("Error with highlighting word %s: %s" % \
(word, err), tag=4)
#match_text = match_text.replace(word, '\033[7m%s\033[m' % word)
# print(match_text)
return match_text
# Strip any previous colors.
# match_text = strip_color_regx.sub('', match.group())
# return word_regx.sub(highlight_text, match_text)
verse_text = verse_text.strip()
# Apply each highlighting regular expression to the text.
for regx in regx_list:
verse_text = regx.sub(highlight_group, verse_text)
return verse_text
def build_highlight_regx(search_list, case_sensitive, sloppy=False,
color_tag='\033\[[\\\\d+;]*m', extra_tag='\033'):
""" Build a regular expression and highlight string to colorize the
items in search_list as they appear in a verse.
"""
if not search_list:
return []
regx_list = []
# Extra word boundry to catch ansi color escape sequences.
escaped_word_bound = '(?:{0}|\\\\b)+'.format(color_tag)
word_bound = '(?:{0}|\\b)+'.format(color_tag)
# Extra space filler to pass over ansi color escape sequences.
extra_space = '|{0}|{1}'.format(color_tag, extra_tag)
# print(word_bound, extra_space, '(?:\033\[[\d+;]*m|\\b)+')
for item in search_list:
item = item.strip()
is_regex = (('*' in item and ' ' not in item) or item.startswith('&'))
if ('*' in item and ' ' not in item) and not item.startswith('&'):
# Build a little regular expression to highlight partial words.
item = item[1:] if item[0] in '!^+|' else item
item = item.replace('*', '\w*')
item = r'{0}({1}){0}'.format(word_bound, item)
if item.startswith('&'):
# Just use a regular expression. ('&' marks the term as a regular
# expression.)
item = item[1:]
regx_list.append(Search.search_terms_to_regex(item, case_sensitive,
word_bound=escaped_word_bound, extra_space=extra_space,
sloppy=(sloppy or '~' in item), is_regex=is_regex))
return regx_list
def mod_lookup(mod, items):
""" Looks up items in a module and returns the formated text.
"""
item_lookup = Lookup(mod)
# Seperate all elements by a comma.
item_list = ','.join(items.split()).split(',')
text_list = []
for item in item_list:
item_text = item_lookup.get_formatted_text(item)
text_list.append('\033[1m%s\033[m:\n%s' % (item, item_text))
return '\n\n'.join(text_list)
class StdoutRedirect(object):
""" Redirect stdout to a specified output function.
"""
def __init__(self, output_func, *args):
""" Set the output function and get the extra arguments to pass to it.
"""
self._output_func = output_func
self._args = args
self._old_stdout = sys.stdout
def write(self, data):
""" Write data to the output function.
"""
if data.strip():
self._output_func(data, *self._args)
def __enter__(self):
""" Change sys.stdout to this class.
"""
try:
sys.stdout = self
return self
except Exception as err:
print("Error in __enter__: %s" % err, file=sys.stderr)
return None
def __exit__(self, exc_type, exc_value, traceback):
""" Change sys.stdout back to its old value.
"""
try:
sys.stdout = self._old_stdout
if exc_type:
return False
return True
except Exception as err:
print("Error in __exit__: %s" % err, file=sys.stderr)
return False
class IndexedVerseTextIter(object):
""" An iterable object for accessing verses in the Bible. Maybe it will
be easier maybe not.
"""
def __init__(self, reference_iter, strongs=False, morph=False,
module='KJV', italic_markers=False, added=True,
paragraph=True, notes=False, path=''):
""" Initialize.
"""
reg_list = []
if not strongs:
reg_list.append(r'\s*<([GH]\d+)>')
if not morph:
reg_list.append(r'\s*\{([\w-]+)\}')
if not added:
reg_list.append(r'\s?<i>\s?(.*?)\s?</i>')
if not italic_markers:
reg_list.append(r'(<i>\s?|\s?</i>)')
if not paragraph:
reg_list.append(r'\s?<p>\s?(.*?)\s?</p>')
else:
reg_list.append(r'(<p>\s?|\s?</p>)')
reg_str = r'(?:%s)' % r'|'.join(reg_list)
self._clean_regex = re.compile(reg_str, re.S)
self._notes_regex = re.compile(r'\s?<n>\s?(.*?)\s?</n>', re.S)
self._notes_str = ' (Notes: \\1)' if notes else ''
self._index_dict = IndexDict('%s' % module, path)
self._ref_iter = reference_iter
def next(self):
""" Returns the next verse reference and text.
"""
return self.__next__()
def __next__(self):
""" Returns a tuple of the next verse reference and text.
"""
# Retrieve the next reference.
verse_ref = next(self._ref_iter)
# Set the verse and render the text.
verse_text = self._get_text(verse_ref)
return (verse_ref, verse_text.strip())
def __iter__(self):
""" Returns an iterator of self.
"""
return self
def _get_text(self, verse_ref):
""" Returns the verse text. Override this to produce formatted verse
text.
"""
verse_text = self._index_dict[verse_ref]<|fim▁hole|> return verse_text
class CombinedParse(object):
""" A parser for simple combined search parsing.
((in OR tree) AND the) AND (house OR bush) =>
['in the house', 'in the bush', 'tree the house', 'tree the bush']
Also it has a NOT word list.
created NOT (and OR but) => ['created'] ['and', 'but']
"""
def __init__(self, arg_str):
""" Initialize the parser and parse the arg string.
"""
self._arg_str = arg_str
self._arg_list = arg_str.split()
parsed_list = self.parse_string(list(arg_str))
self._word_list, self._not_list = self.parse_list(parsed_list)
# Make the results accesable via read-only properties.
word_list = property(lambda self: self._word_list)
not_list = property(lambda self: self._not_list)
def parse_list(self, arg_list):
""" Parse a list such as ['created', 'NOT', ['and', 'OR', 'but']] into
search_args = ['created'] not_list = ['and', 'but']
"""
# The list we're working on building.
working_list = []
# The list of words not to include.
not_list = []
for i in arg_list:
# Skip 'OR's
if i == 'OR':
continue
if isinstance(i, list):
# A list was found so parse it and get the results.
temp_list, temp_not_list = self.parse_list(i)
# Add the returned not list to the current not list.
not_list.extend(temp_not_list)
if working_list:
if working_list[-1] == 'AND':
# Pop the 'AND' off the end of the list.
working_list.pop()
# Combine each element of the working listh with each
# element of the returned list replace the working
# list with those combinations.
# (i.e. working_list = ['this', 'that']
# temp_list = ['tree', 'house']
# result = ['this tree', 'this house',
# 'that tree', 'that house']
working_list = ['%s %s' % j \
for j in product(working_list, temp_list)]
elif working_list[-1] == 'NOT':
# Take the 'NOT' off to show we've processed it.
working_list.pop()
# Add the returned list to the NOT list.
not_list.extend(temp_list)
else:
# Just extend the working list with the retuned list.
working_list.extend(temp_list)
else:
# Just extend the working list with the retuned list.
working_list.extend(temp_list)
else:
if i == 'AND':
# Put the 'AND' on the list for later processing.
working_list.append(i)
elif working_list:
if working_list[-1] == 'AND':
# Take the 'AND' off the list.
working_list.pop()
# Combine all the elements of working_list with i, and
# replace working list with the resulting list.
# (i.e. working_list = ['he', 'it'] i = 'said'
# result = ['he said', 'it said']
working_list = ['%s %s' % (j, i) for j in working_list]
elif working_list[-1] == 'NOT':
# Remove the 'NOT'.
working_list.pop()
# Add the word to the not list.
not_list.append(i)
else:
# Add the word to the working list.
working_list.append(i)
else:
# Add the word to the working list.
working_list.append(i)
# Split and then combine all the strings in working_list.
# Basically removes runs of whitespace.
working_list = [' '.join(i.split()) for i in working_list]
# Return the final list and not list.
return working_list, not_list
def parse_parenthesis(self, arg_list):
""" Recursively processes strings in parenthesis converting them
to nested lists of strings.
"""
# The return list.
return_list = []
# Temorary string.
temp_str = ''
while arg_list:
# Get the next character.
c = arg_list.pop(0)
if c == '(':
# An opening parenthesis was found so split the current string
# at the spaces putting them in the return list, and clean
# the string.
if temp_str:
return_list.extend(temp_str.split())
temp_str = ''
# Process from here to the closing parenthesis.
return_list.append(self.parse_parenthesis(arg_list))
elif c == ')':
# The parenthesis is closed so return back to the calling
# function.
break
else:
# Append the current not parenthesis character to the string.
temp_str += c
if temp_str:
# Split and add the string to the return list.
return_list.extend(temp_str.split())
# Return what we found.
return return_list
def parse_string(self, arg_list):
""" Parse a combined search arg string. Convert a string such as:
'created NOT (and OR but)' => ['created', 'NOT', ['and', 'OR', 'but']]
"""
# This does the same thing only using json.
#
# Regular expression to group all words.
#word_regx = re.compile(r'\b(\w*)\b')
# Put quotes around all words and opening replace paranthesis with
# brackets, put all of that in brackets.
#temp_str = '[%s]' % word_regx.sub('"\\1"', arg_str).replace('(', '[')
# Replace closing parenthesis with brackets and replace a '" ' with
# '", '.
#temp_str = temp_str.replace(')', ']').replace('" ', '",')
# finally replace '] ' with '], '. The end result should be a valid
# json string that can be converted to a list.
#temp_str = temp_str.replace('] ', '],')
# Convert the string to a list.
#return_list = json.loads(temp_str)
#return return_list
# The return list.
return_list = []
# Temporary string.
temp_str = ''
while arg_list:
# Pop the next character.
c = arg_list.pop(0)
if c == '(':
# A parenthesis was found store and reset the string.
# And parse the what is in the parenthesis.
if temp_str:
return_list.extend(temp_str.split())
temp_str = ''
return_list.append(self.parse_parenthesis(arg_list))
else:
# Append the non parenthesis character to the string.
temp_str += c
if temp_str:
# Store the final string in the list.
return_list.extend(temp_str.split())
#info_print(return_list)
# Return the list.
return return_list
class Search(object):
""" Provides a simple way of searching an IndexDict for verses.
"""
# To check for spaces.
_whitespace_regx = re.compile(r'\s')
# Cleanup regular expressions.
_non_alnum_regx = re.compile(r'[^\w\*<>\{\}\(\)-]')
_fix_regx = re.compile(r'\s+')
# Match strongs numbers.
_strongs_regx = re.compile(r'[<]?(\b[GH]\d+\b)[>]?', re.I)
# It needs to match with braces or it will catch all capitalized
# word and words with '-'s in them.
_morph_regx = re.compile(r'[\(\{](\b[\w-]+\b)[\}\)]', re.I)
_word_regx = re.compile(r'\b([\w\\-]+)\b')
_space_regx = re.compile(r'\s+')
_non_word_regx = re.compile(r'[<>\(\)]')
_fix_strongs = classmethod(lambda c, m: '<%s>' % m.groups()[0].upper())
_fix_morph = classmethod(lambda c, m: '{%s}' % m.groups()[0].upper())
# Escape the morphological tags.
_escape_morph = classmethod(lambda c, m: \
'\{%s\}' % re.escape(m.groups()[0]).upper())
def __init__(self, module='KJV', path='', multiword=False):
""" Initialize the search.
"""
# The index dictionary.
self._index_dict = IndexDict(module, path)
self._module_name = module
self._multi = multiword
@classmethod
def search_terms_to_regex(cls, search_terms, case_sensitive,
word_bound='\\\\b', extra_space='',
sloppy=False, is_regex=False):
""" Build a regular expression from the search_terms to match a verse
in the Bible.
"""
# Set the flags for the regular expression.
flags = re.I if not case_sensitive else 0
if is_regex:
reg_str = search_terms
info_print('\nUsing regular expression: %s\n' % reg_str, tag=2)
try:
return re.compile(reg_str, flags)
except Exception as err:
print("An error occured while compiling the highlight "
"regular expression %s: %s." % (reg_str, err),
" There will be no highlighting.\n", file=sys.stderr)
return re.compile(r'')
# This will skip words.
not_words_str = r'\b\w+\b'
# This will skip Strong's Numbers.
not_strongs_str = r'<[^>]*>'
# This wil skip Morphological Tags.
not_morph_str = r'\{[^\}]*\}'
# This will skip all punctuation. Skipping ()'s is a problem for
# searching Morphological Tags, but it is necessary for the
# parenthesized words. May break highlighting.
not_punct_str = r'[\s,\?\!\.;:\\/_\(\)\[\]"\'-]'
# This will skip ansi color.
not_color_str = r'\033\[[\d;]*m'
# Match all *'s
star_regx = re.compile(r'\*')
# Hold the string that fills space between search terms.
space_str = ''
# Get stars past so we can replace them with '\w*' later.
temp_str, word_count = star_regx.subn(r'_star_', search_terms)
# Hack to get rid of unwanted characters.
temp_str = cls._non_alnum_regx.sub(' ', temp_str).split()
temp_str = ' '.join(temp_str)
# Phrases will have spaces in them
phrase = bool(cls._whitespace_regx.search(temp_str))
# Escape the morphological tags, and also find how many there are.
temp_str, morph_count = cls._morph_regx.subn(cls._escape_morph,
temp_str)
# Make all Strong's Numbers uppercase, also find how many there are.
temp_str, strongs_count = cls._strongs_regx.subn(cls._fix_strongs,
temp_str)
# Select all words.
#repl = '(\\\\b\\1\\\\b)'
# This works:
# temp_str, word_count = \
# cls._word_regx.subn('{0}(\\1){0}'.format(word_bound), temp_str)
repl = '(?:{0}(\\1){0})'.format(word_bound)
temp_str, word_count = cls._word_regx.subn(repl, temp_str)
# Replace what used to be *'s with '\w*'.
temp_str = temp_str.replace('_star_', '\w*')
# All the Strong's and Morphology were changed in the previous
# substitution, so if that number is greater than the number of
# Strong's plus Morphology then there were words in the search terms.
# I do this because I don't know how to only find words.
words_found = (strongs_count + morph_count) < word_count
if phrase:
# Build the string that is inserted between the items in the
# search string.
space_str = r'(?:%s%s' % (not_punct_str, extra_space)
if not bool(strongs_count) or sloppy:
# Skip over all Strong's Numbers.
space_str = r'%s|%s' % (space_str, not_strongs_str)
if not bool(morph_count) or sloppy:
# Skip all Morphological Tags.
space_str = r'%s|%s' % (space_str, not_morph_str)
if not words_found or bool(morph_count) or bool(strongs_count) or \
sloppy:
# Skip words. If word attributes are in the search we can
# skip over words and still keep it a phrase.
space_str = r'%s|%s' % (space_str, not_words_str)
# Finally make it not greedy.
space_str = r'%s)*?' % space_str
else:
space_str = ''
# Re-combine the search terms with the regular expression string
# between each element.
reg_str = space_str.join(temp_str.split())
info_print('\nUsing regular expression: %s\n' % reg_str, tag=2)
try:
return re.compile(reg_str, flags)
except Exception as err:
print("An error occured while compiling the highlight "
"regular expression %s: %s." % (reg_str, err),
" There will be no highlighting.\n", file=sys.stderr)
return re.compile(r'')
def _sorted_iter(self, verse_ref_set):
""" Returns an iterator over a sorted version of verse_ref_set.
"""
# Speed up the iteration by first sorting the range.
return iter(sorted(verse_ref_set, key=sort_key))
def _clean_text(self, text):
""" Return a clean (only alphanumeric) text of the provided string.
"""
# Do we have to use two regular expressions to do this.
# Replace all non-alphanumeric characters with a space.
temp_text = self._non_alnum_regx.sub(' ', text)
# Replace one or more spaces with one space.
clean_text = self._fix_regx.sub(' ', temp_text)
return clean_text.strip()
def _fix_strongs_morph(self, search_terms):
""" Make any Strong's or Morphology uppercase, put parenthesis around
the Morphological Tags, and put <>'s around the Strong's Numbers.
"""
# Capitalize all strongs numbers and remove the <> from them.
temp_str = self._strongs_regx.sub(self._fix_strongs, search_terms)
# Capitalize all morphological tags and make sure they are in
# parenthesis.
temp_str = self._morph_regx.sub(self._fix_morph, temp_str)
return temp_str
def _process_search(func):
""" Returns a wrapper function that processes the search terms, calls
the wrapped function, and, if applicable, confines the resulting verse
set to a range.
"""
@wraps(func)
def wrapper(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" Process the search terms according to the wrapped functions
requirements, then apply the range, if given, to the returned set
of verses.
"""
if func.__name__ in ['sword_search']:
if not Sword:
print("Sword library not found.")
return
if not isinstance(search_terms, str):
# Combine the terms for use by the different methods.
search_terms = ' '.join(search_terms)
# Get a valid set of verse references that conform to the passed
# range.
range_set = parse_verse_range(range_str)
if func.__name__ not in ['regex_search', 'partial_word_search']:
# Try to catch and fix any Strong's Numbers or Morphological
# Tags.
search_terms = self._fix_strongs_morph(search_terms)
# Regular expression and combined searches get the search terms as
# they were passed.
if func.__name__ in ['multiword_search', 'anyword_search',
'phrase_search', 'mixed_phrase_search']:
# Get rid of any non-alphanumeric or '-' characters from
# the search string.
search_str = self._clean_text(search_terms).strip()
if strongs or morph:
# Strong's numbers and Morphological tags are all
# uppercase. This is only required if the Morphological
# Tags were not surrounded by parenthesis.
search_str = search_str.upper().strip()
else:
search_str = search_terms
# Get the set of found verses.
found_set = func(self, search_str, strongs, morph, added,
case_sensitive, range_set)
# The phrase, regular expression, and combined searches apply the
# range before searching, so only multi-word and any-word searches
# have it applied here.
if func.__name__ in ['multiword_search', 'anyword_search',
'partial_word_search']:
if range_set:
found_set.intersection_update(range_set)
return found_set
# Return wrapper function.
return wrapper
@_process_search
def combined_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" combined_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str=''): ->
Perform a combined search. Search terms could be
'created NOT (and OR but)' and it would find all verses with the word
'created' in them and remove any verse that had either 'and' or 'but.'
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for '%s'..." % search_terms, tag=1)
# Process the search_terms.
arg_parser = CombinedParse(search_terms)
# Get the list of words and/or phrases to include.
word_list = arg_parser.word_list
# Get the list of words and/or phrases to NOT include.
not_list = arg_parser.not_list
phrase_search = self.phrase_search
multiword_search = self.multiword_search
def combine_proc(str_list):
""" Performs combined search on the strings in str_list, and
returns a set of references that match.
"""
and_it = False
temp_set = set()
for word in str_list:
# A '+' before or after a word means it should have a phrase
# search done on it and the words with it.
if '+' in word:
# Do a phrase search on the word string.
result_set = phrase_search(word.replace('+', ' '), strongs,
morph, case_sensitive,
range_str)
elif word == '&':
# Combine the next search results with this one.
and_it = True
continue
else:
# Do a multi-word search on the word string.
result_set = multiword_search(word, strongs, morph,
case_sensitive, range_str)
if and_it:
# The previous word said to find verses that match both.
temp_set.intersection_update(result_set)
and_it = False
else:
# Only keep the verses that have either one group or the
# other but not both.
temp_set.symmetric_difference_update(result_set)
return temp_set
# Remove any verses that have the NOT words in them.
found_set = combine_proc(word_list).difference(combine_proc(not_list))
return found_set
@_process_search
def combined_phrase_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" combined_phrase_search(self, search_terms, strongs=False,
morph=False, case_sensitive=False, range_str=''): ->
Perform a combined phrase search. Search terms could be
'created NOT (and AND but)' and it would find all verses with the word
'created' in them and remove any verse that had the phrase 'and but.'
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for '%s'..." % search_terms, tag=1)
# Process the search_terms.
arg_parser = CombinedParse(search_terms)
# Get the list of words and/or phrases to include.
word_list = arg_parser.word_list
# Get the list of words and/or phrases to NOT include.
not_list = arg_parser.not_list
phrase_search = self.phrase_search
def combine_proc(str_list):
""" Performs combined phrase search on the strings in str_list, and
returns a set of references that match.
"""
temp_set = set()
for word in str_list:
# Do a phrase search on the word string.
result_set = phrase_search(word.replace('+', ' '), strongs,
morph, case_sensitive,
range_str)
# Include all the verses that have any of the word groups.
temp_set.update(result_set)
return temp_set
# Remove any verses that have the NOT words in them.
found_set = combine_proc(word_list).difference(combine_proc(not_list))
return found_set
@_process_search
def multiword_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" multiword_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a multiword search using the search_terms.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with all these words "
"'%s'..." % ', '.join(search_terms.split()), tag=1)
# All that needs to be done is find all references with all the
# searched words in them.
found_set = self._index_dict.value_intersect(search_terms.split(),
case_sensitive)
return found_set
@_process_search
def eitheror_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" eitheror_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a search returning any verse with one and only one of the terms
searched for.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with one and not all of these words "
"'%s'..." % ', '.join(search_terms.split()), tag=1)
# Any verse with one and only one of the searched words.
found_set = self._index_dict.value_sym_diff(search_terms.split(),
case_sensitive)
return found_set
@_process_search
def anyword_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" anyword_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a search returning any verse with one or more of the search
terms.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with any of these words "
"'%s'..." % ', '.join(search_terms.split()), tag=1)
# Any verse with one or more of the searched words.
found_set = self._index_dict.value_union(search_terms.split(),
case_sensitive)
return found_set
@_process_search
def partial_word_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" partial_word_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a search returning any verse with one or more words matching
the partial words given in the search terms. Partial words are markes
tih *'s (e.g. '*guil*' will match any word with 'guil' in it such as
'guilt' or 'beguile.'
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with any of these partial words "
"'%s'..." % ', '.join(search_terms.split()), tag=1)
#found_set = self._index_dict.value_union(
#self._words_from_partial(search_terms, case_sensitive),
#case_sensitive)
search_list = search_terms.split()
found_set = self._index_dict.from_partial(search_list, case_sensitive)
return found_set
def _words_from_partial(self, partial_word_list, case_sensitive=False):
""" Search through a list of partial words and yield words that match.
"""
flags = re.I if not case_sensitive else 0
# Split the search terms and search through each word key in the index
# for any word that contains the partial word.
word_list = partial_word_list.split()
for word in self._index_dict['_words_']:
for partial_word in word_list:
# A Regular expression that matches any number of word
# characters for every '*' in the term.
reg_str = '\\b%s\\b' % partial_word.replace('*', '\w*')
try:
word_regx = re.compile(reg_str, flags)
except Exception as err:
print('There is a problem with the regular expression '
'%s: %s' % (reg_str, err), file=sys.stderr)
exit()
if word_regx.match(word):
yield word
def _process_phrase(func):
""" Returns a wrapper function for wrapping phrase like searches.
"""
@wraps(func)
def wrapper(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" Gets a regular expression from the wrapped function, then
builds a set of verse references to search, finally it calls the
searching function with the regular expression and the verse
reference iterator, and returns the resulting set of references.
"""
search_regx = func(self, search_terms, strongs, morph, added,
case_sensitive, range_str)
# First make sure we are only searching verses that have all the
# search terms in them.
search_list = search_terms.split()
if '*' in search_terms:
ref_set = self._index_dict.from_partial(search_list,
case_sensitive,
common_limit=5000)
else:
ref_set = self._index_dict.value_intersect(search_list,
case_sensitive)
if range_str:
# Only search through the supplied range.
ref_set.intersection_update(range_str)
# No need to search for a single word phrase.
if len(search_terms.split()) == 1:
return ref_set
# Sort the list so it may be a little faster. Only needed if we're
# using the sword module to look them up.
ref_iter = self._sorted_iter(ref_set)
# Disable Strong's and Morphological if only words are used.
strongs = bool(self._strongs_regx.search(search_terms))
morph = bool(self._morph_regx.search(search_terms))
return self.find_from_regex(ref_iter, search_regx, strongs, morph)
return wrapper
@_process_search
@_process_phrase
def ordered_multiword_search(self, search_terms, strongs=False,
morph=False, added=True, case_sensitive=False,
range_str=''):
""" ordered_multiword_search(self, search_terms, strongs=False,
morph=False, case_sensitive=False, range_str='') ->
Perform an ordered multiword search. Like a multiword search, but all
the words have to be in order.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with these words in order "
"'%s'..." % search_terms, tag=1)
return self.search_terms_to_regex(search_terms, case_sensitive,
sloppy=True)
@_process_search
@_process_phrase
def phrase_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" phrase_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a phrase search.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with this phrase "
"'%s'..." % search_terms, tag=1)
# Make all the terms the same case if case doesn't matter.
flags = re.I if not case_sensitive else 0
if strongs:
# Match strongs phrases.
search_reg_str = search_terms.replace(' ', r'[^<]*')
elif morph:
# Match morphological phrases.
search_reg_str = search_terms.replace(' ', r'[^\{]*')
else:
# Match word phrases
search_reg_str = '\\b%s\\b' % search_terms.replace(' ',
r'\b(<[^>]*>|\{[^\}]*\}|\W)*\b')
# Make a regular expression from the search terms.
return re.compile(search_reg_str, flags)
@_process_search
@_process_phrase
def mixed_phrase_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" mixed_phrase_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a phrase search.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for verses with this phrase "
"'%s'..." % search_terms, tag=1)
# Make a regular expression from the search terms.
return self.search_terms_to_regex(search_terms, case_sensitive)
@_process_search
def regex_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" regex_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a regular expression search.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
info_print("Searching for regular expression '%s'..." % search_terms,
tag=1)
# re.I is case insensitive.
flags = re.I if not case_sensitive else 0
try:
# Make a regular expression from the search_terms.
search_regx = re.compile(r'%s' % search_terms, flags)
except Exception as err:
print('There is a problem with the regular expression "%s": %s' % \
(search_terms, err), file=sys.stderr)
exit()
if range_str:
# Only search through the supplied range.
ref_iter = self._sorted_iter(range_str)
else:
# Search the entire Bible.
ref_iter = VerseIter('Genesis 1:1')
return self.find_from_regex(ref_iter, search_regx, strongs, morph,
tag=1, try_clean=True)
def find_from_regex(self, ref_iter, search_regex, strongs=False,
morph=False, added=True, tag=3, try_clean=False):
""" Iterates through all the verses in the ref iter iterator and
returns a list of verses whose text matches search_regx.
"""
# Get an iterator that will return tuples
# (verse_reference, verse_text).
verse_iter = IndexedVerseTextIter(ref_iter, strongs=strongs,
morph=morph, added=added,
module=self._module_name)
found_set = set()
for verse_ref, verse_text in verse_iter:
info_print('\033[%dD\033[KSearching...%s' % \
(len(verse_ref) + 20, verse_ref), end='', tag=tag)
# Search for matches in the verse text.
if search_regex.search(verse_text):
found_set.add(verse_ref)
elif try_clean and not strongs and not morph:
# Should we do this or should we trust the user knows what
# puctuation are in the verses?
clean_verse_text = self._clean_text(verse_text)
if search_regex.search(clean_verse_text):
found_set.add(verse_ref)
info_print("...Done.", tag=tag)
return found_set
def mixed_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" mixed_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='') ->
Perform a mixed search.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
added - Search in the added text (i.e. italics).
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
"""
found_set = set()
not_set = set()
and_set = set()
or_set = set()
xor_set = set()
combine_dict = {
'!': not_set.update,
'+': and_set.intersection_update,
'|': or_set.update,
'^': xor_set.symmetric_difference_update,
}
for term in search_terms:
if term[0] in '!+^|':
# Set the correct combining function, and cleanup the item.
if term[0] == '+' and not and_set:
# All of these verses go in the output.
combine_func = and_set.update
else:
combine_func = combine_dict[term[0]]
term = term[1:]
else:
if self._multi and found_set:
# If multiword is default and found_set is not empty
# make all search terms appear in the output.
combine_func = found_set.intersection_update
else:
# Any of these verses could be in the output
combine_func = found_set.update
if term.startswith('&'):
# Allow regular expression searching.
term = term[1:]
search_func = self.regex_search
elif ' ' in term:
# Search term is a quoted string, so treat it like a phrase.
if term.startswith('~'):
# ~'s trigger ordered multiword or sloppy phrase search.
term = term[1:]
search_func = self.ordered_multiword_search
else:
search_func = self.mixed_phrase_search
elif '*' in term:
# Search for partial words.
search_func = self.partial_word_search
else:
# A single word should be (multi/any)-word.
search_func = self.multiword_search
# Perform a strongs search.
strongs = bool(self._strongs_regx.match(term.upper()))
# Perform a morpholagical search.
morph = bool(self._morph_regx.match(term.upper()))
# Search for words or phrases.
temp_set = search_func(term, strongs, morph, added, case_sensitive,
range_str)
# Add the results to the correct set.
combine_func(temp_set)
# Update the result set.
found_set.update(or_set)
found_set.update(xor_set)
if and_set and found_set:
# Make sure all the verses that are in the output have the words
# or phrases that hade a '+' in front of them.
found_set = and_set.union(found_set.intersection(and_set))
elif and_set:
# Found set must be empty to fill it with and_set's contents.
found_set.update(and_set)
# Finally remove all the verses that are in the not_set.
found_set.difference_update(not_set)
return found_set
def sword_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str='',
search_type='lucene'):
""" sword_search(self, search_terms, strongs=False, morph=False,
case_sensitive=False, range_str='', search_type=-4) ->
Use the sword module to search for the terms.
search_terms - Terms to search for.
strongs - Search for Strong's Number phrases.
morph - Search for Morphological Tag phrases.
case_sensitive - Perform a case sensitive search.
range_str - A verse range to limit the search to.
search_type - What search type to use.
"""
search_terms = ' '.join(search_terms)
info_print("Searching using the Sword library for "
"'%s'..." % search_terms, tag=1)
found_set = set()
search_type_dict = {
'regex': 0,
'phrase': -1,
'multiword': -2,
'entryattrib': -3, # (e.g. Word//Lemma//G1234)
'lucene': -4
}
try:
# Render the text as plain.
markup = Sword.MarkupFilterMgr(Sword.FMT_PLAIN)
# Don't own this or it will crash.
markup.thisown = False
mgr = Sword.SWMgr(markup)
# Load the module.
module = mgr.getModule(self._module_name)
# Set the search type based on the search_type argument.
search_type = search_type_dict.get(search_type.lower(), -4)
# Make sure we can search like this.
if not module.isSearchSupported(search_terms, search_type):
print("Search not supported", file=sys.stderr)
return found_set()
# Get the range key.
if not range_str:
range_str = 'Genesis-Revelation'
range_k = Sword.VerseKey().parseVerseList(range_str, 'Genesis 1:1',
True)
flags = re.I if not case_sensitive else 0
if strongs:
# Search for strongs numbers.
# I don't know how to search for morphological tags using
# Swords search function.
prefix = 'lemma:'
for term in ','.join(search_terms.split()).split(','):
if not term.startswith('lemma:'):
# Make the term start with lemma: so sword will find
# it.
term = '%s%s' % (prefix, term)
# Perform the search.
resource = module.doSearch(term, search_type, flags,
range_k)
# Get the list of references from the range text.
found_set.update(resource.getRangeText().split('; '))
else:
# Perform the search.
resource = module.doSearch(search_terms, search_type, flags,
range_k)
# Get the list of references from the range text.
found_set.update(resource.getRangeText().strip().split('; '))
except Exception as err:
print("There was a problem while searching: %s" % err,
file=sys.stderr)
found_set.discard('')
return found_set
@_process_search
def test_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" A Test.
"""
ref_set = self._index_dict.value_union(search_terms.split(),
case_sensitive)
if range_str:
# Only search through the supplied range.
ref_set.intersection_update(range_str)
ref_list = sorted(ref_set, key=sort_key)
term_dict = defaultdict(list)
raw_dict = RawDict(iter(ref_list), self._module_name)
words_len = 0
for verse_ref, (verse_text, verse_dict) in raw_dict:
for term in search_terms.split():
if self._strongs_regx.match(term):
num = self._strongs_regx.sub('\\1', term)
words = set(verse_dict[num.upper()])
if words:
term_dict[num.upper()].append({verse_ref: words})
elif self._morph_regx.match(term):
tag = self._morph_regx.sub('\\1', term)
words = set(verse_dict[tag.upper()])
if words:
term_dict[tag.upper()].append({verse_ref: words})
else:
for key, value in verse_dict['_words'][0].items():
if ' %s ' % term.lower() in ' %s ' % key.lower():
attr_dict = value[0]
if strongs and 'strongs' in attr_dict:
attr_list = attr_dict['strongs']
attr_list.append(key)
term_dict[term].append({verse_ref: attr_list})
if morph and 'morph' in attr_dict:
attr_list = attr_dict['morph']
attr_list.append(key)
words_len = max(len(attr_list), words_len)
term_dict[term].append({verse_ref: attr_list})
len_longest_ref = len(max(ref_set, key=len))
for key, value in term_dict.items():
words_len = max([len(i) for d in value for i, v in d.items()])
print('%s:' % key)
for dic in value:
ref, words = tuple(dic.items())[0]
if isinstance(words, list):
w_str = '"%s"' % '", "'.join(words[:-1])
l_str = '"%s"' % words[-1]
words_str = '{0:{2}}: {1}'.format(w_str, l_str, words_len)
else:
words_str = '"%s"' % '", "'.join(words)
print('\t{0:{1}}: {2}'.format(ref, len_longest_ref, words_str))
#print('\t{0:{1}}: "{2}"'.format(ref, len_longest_ref,
# '", "'.join(words)))
exit()
@_process_search
def test2_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" A Test.
"""
ref_set = self._index_dict.value_union(search_terms.split(),
case_sensitive)
if range_str:
# Only search through the supplied range.
ref_set.intersection_update(range_str)
ref_iter = iter(sorted(ref_set, key=sort_key))
# Get an iterator that will return tuples
# (verse_reference, verse_text).
verse_iter = IndexedVerseTextIter(ref_iter, strongs=True,
morph=morph, added=added,
module=self._module_name)
# This will skip words.
not_words_str = r'\b\w+\b'
# This will skip Strong's Numbers.
not_strongs_str = r'<[^>]*>'
# This wil skip Morphological Tags.
not_morph_str = r'\{[^\}]*\}'
# This will skip all punctuation. Skipping ()'s is a problem for
# searching Morphological Tags, but it is necessary for the
# parenthesized words. May break highlighting.
not_punct_str = r'[\s,\?\!\.;:\\/_\(\)\[\]"\'-]'
max_ref_len = len(max(ref_set, key=len))
found_set = set()
term_dict = defaultdict(list)
for verse_ref, verse_text in verse_iter:
for term in search_terms.split():
if self._strongs_regx.match(term):
test_regx = re.compile(r'''
\s
((?:\b\w+\b|[\s,\?\!\.;:\\/_\(\)\[\]"\'-])+)
\s
((?:%s)+)
''' % term, re.I | re.X)
elif self._morph_regx.match(term):
test_regx = re.compile(r'''
\s((?:\b\w+\b|[\s,\?\!\.;:\\/_\(\)\[\]"\'-])+)
(?:<[^>]*>|\s)+
((?:%s)+)
''' % term, re.I | re.X)
else:
test_regx = re.compile(r'''
((?:\b\w+\b|[\s,\?\!\.;:\\/_\(\)\[\]"\'-])*?
%s
(?:\b\w+\b|[\s,\?\!\.;:\\/_\(\)\[\]"\'-])+)+
((?:<[^>]*>|\{[^\}]*\}|\s)+)
''' % term, re.I | re.X)
for match in test_regx.finditer(verse_text):
phrase, num = match.groups()
phrase = phrase.strip(',').strip('.').strip()
phrase = phrase.strip(';').strip('?').strip(':').strip()
num = num.replace('<', '').replace('>', '')
num = num.replace('{', '').replace('}', '')
if not phrase or not num.strip():
if not strongs:
break
print(verse_ref, verse_text)
print(match.group(), match.groups())
exit()
num = '"%s"' % '", "'.join(num.split())
term_dict[term].append(
'\t{0:{1}}: {2:{4}}: "{3}"'.format(verse_ref,
max_ref_len,
num, phrase,
18)
)
for term, lst in term_dict.items():
term = term.replace('<', '').replace('>', '')
term = term.replace('{', '').replace('}', '')
print('%s:\n%s' % (term, '\n'.join(lst)))
exit()
@_process_search
def test3_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" A Test.
"""
ref_set = self._index_dict.value_union(search_terms.split(),
case_sensitive)
if range_str:
# Only search through the supplied range.
ref_set.intersection_update(range_str)
if not ref_set:
exit()
ref_iter = iter(sorted(ref_set, key=sort_key))
# Get an iterator that will return tuples
# (verse_reference, verse_text).
verse_iter = VerseTextIter(ref_iter, strongs=strongs,
morph=morph, render='raw',
module=self._module_name)
found_set = set()
strong_regx = re.compile(r'strong:([GH]\d+)', re.I)
morph_regx = re.compile(r'(?:Morph|robinson):([\w-]*)', re.I)
tag_regx = re.compile(r'''
([^<]*) # Before tag.
<(?P<tag>q|w|transChange|note) # Tag name.
([^>]*)> # Tag attributes.
([\w\W]*?)</(?P=tag)> # Tag text and end.
([^<]*) # Between tags.
''', re.I | re.X)
divname_regx = re.compile(r'''
(?:<seg>)?
<(?:divineName)>+
([^<]*?)
([\'s]*)
</(?:divineName)>
(?:</seg>)?
''', re.I | re.X)
xadded_regx = re.compile(r'<seg subType="x-added"[^>]*>([^<]*)</seg>',
re.I)
div_upper = lambda m: m.group(1).upper() + m.group(2)
marker_regx = re.compile(r'.*marker="(.)".*', re.I)
term_dict = defaultdict(list)
len_attrs = 0
for verse_ref, verse_text in verse_iter:
#print(render_raw(verse_text, strongs, morph))
#print(render_raw2(verse_text, strongs, morph))
#continue
for term in search_terms.split():
term = term.replace('<', '').replace('>', '')
term = term.replace('{', '').replace('}', '')
v_text = ''
info_print('%s\n' % verse_text, tag=4)
term_regx = re.compile('\\b%s\\b' % term, re.I)
for match in tag_regx.finditer(verse_text):
opt, tag_name, tag_attr, tag_text, punct = match.groups()
tag_text = xadded_regx.sub('\\1', tag_text)
if match.re.search(tag_text):
match_list = match.re.findall(tag_text + punct)
else:
match_list = [match.groups()]
for tag_tup in match_list:
opt, tag_name, tag_attr, tag_text, punct = tag_tup
info_print(tag_tup, tag=4)
value_list = []
attr_list = []
strongs_list = []
morph_list = []
tag_text = divname_regx.sub(div_upper, tag_text)
v_text += marker_regx.sub('\\1 ', opt) + tag_text + \
punct
if term.upper() in tag_attr:
attr_list = [term.upper()]
elif term_regx.search(tag_text):
if strongs or not morph:
strongs_list = strong_regx.findall(tag_attr)
if morph:
morph_list = morph_regx.findall(tag_attr)
for lst in (strongs_list, morph_list, attr_list):
if lst:
attr_str = '%s"' % '", "'.join(lst)
value_list = [attr_str, tag_text.strip()]
term_dict[term].append({verse_ref: value_list})
len_attrs = max(len(attr_str), len_attrs)
info_print(v_text, tag=4)
max_len_ref = len(max(ref_set, key=len))
for term, lst in term_dict.items():
print('%s:' % term)
for dic in lst:
ref, (attrs, s) = list(dic.items())[0]
s_l = '{1:{0}}: "{2}'.format(len_attrs, attrs, s)
print('\t{0:{1}}: "{2}"'.format(ref, max_len_ref, s_l))
exit()
@_process_search
def test4_search(self, search_terms, strongs=False, morph=False,
added=True, case_sensitive=False, range_str=''):
""" A Test.
"""
ref_set = self._index_dict.value_union(search_terms.split(),
case_sensitive)
if range_str:
# Only search through the supplied range.
ref_set.intersection_update(range_str)
if not ref_set:
exit()
ref_iter = iter(sorted(ref_set, key=sort_key))
# Get an iterator that will return tuples
# (verse_reference, verse_text).
verse_iter = VerseTextIter(ref_iter, strongs=strongs,
morph=morph, render='raw',
module=self._module_name)
found_set = set()
strong_regx = re.compile(r'strong:([GH]\d+)', re.I)
morph_regx = re.compile(r'(?:Morph|robinson):([\w-]*)', re.I)
tag_regx = re.compile(r'''
([^<>]*) # Before tag.
<(?P<tag>seg|q|w|transChange|note|title)# Tag name.
([^>]*)> # Tag attributes.
([\w\W]*?)</(?P=tag)> # Tag text and end.
([^<]*) # Between tags.
''', re.I | re.X)
divname_regx = re.compile(r'''
<(?:divineName)>
([^<]*?)
([\'s]*)
</(?:divineName)>
''', re.I | re.X)
div_upper = lambda m: m.group(1).upper() + m.group(2)
marker_regx = re.compile(r'.*marker="(.)".*', re.I)
term_dict = defaultdict(list)
len_attrs = 0
def recurse_tag(text, term, verse_ref, ctag_attr=''):
""" Recursively parses raw verse text using regular expressions,
and a list of dictionaries of the search term and any attributes
with its text.
"""
term_list = []
for match in tag_regx.finditer(text):
value_list = []
attr_list = []
strongs_list = []
morph_list = []
opt, tag_name, tag_attr, tag_text, punct = match.groups()
if match.re.search(tag_text):
term_list.extend(recurse_tag(tag_text, term, verse_ref,
tag_attr))
else:
info_print((opt, tag_name, tag_attr, tag_text, punct),
tag=4)
if marker_regx.match(opt):
opt = ''
tag_text = opt + divname_regx.sub(div_upper,
tag_text) + punct
if term.upper() in tag_attr or term.upper() in ctag_attr:
attr_list = [term.upper()]
elif term_regx.search(tag_text):
if strongs or not morph:
strongs_list.extend(strong_regx.findall(tag_attr))
strongs_list.extend(strong_regx.findall(ctag_attr))
if morph:
morph_list.extend(morph_regx.findall(tag_attr))
morph_list.extend(morph_regx.findall(ctag_attr))
for lst in (strongs_list, morph_list, attr_list):
if lst:
a_str = '%s"' % '", "'.join(lst)
value_list = [a_str, tag_text.strip()]
term_list.append({verse_ref: value_list})
return term_list
for verse_ref, verse_text in verse_iter:
#print(render_raw(verse_text, strongs, morph))
#print(render_raw2(verse_text, strongs, morph))
#continue
for term in search_terms.split():
term = term.replace('<', '').replace('>', '')
term = term.replace('{', '').replace('}', '')
v_text = ''
info_print('%s\n' % verse_text, tag=4)
term_regx = re.compile('\\b%s\\b' % term, re.I)
value_list = recurse_tag(verse_text, term, verse_ref)
if value_list:
for i in value_list:
len_attrs = max(len(i[verse_ref][0]), len_attrs)
term_dict[term].extend(value_list)
max_len_ref = len(max(ref_set, key=len))
for term, lst in term_dict.items():
print('%s:' % term)
for dic in lst:
ref, (attrs, s) = list(dic.items())[0]
s_l = '{1:{0}}: "{2}'.format(len_attrs, attrs, s)
print('\t{0:{1}}: "{2}"'.format(ref, max_len_ref, s_l))
return set()
concordance_search = test4_search
class SearchCmd(Cmd):
""" A Command line interface for searching the Bible.
"""
def __init__(self, module='KJV'):
""" Initialize the settings.
"""
self.prompt = '\001[33m\002search\001[m\002> '
self.intro = '''
%s Copyright (C) 2011 Josiah Gordon <[email protected]>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
This is a Bible search program that searches the KJV
sword module. If you need help type 'help' to display a list of valid
commands. For help on a specific command type 'help <command>.'
Examples:
mixed 'jordan h03383' (Finds all verses with Strong's number 'H03383'
translated 'Jordan')
concordance live (Lists the references of all the verses with
the word 'live' in them, the Strong's number
that was used, and what the phrase is that
that Strong's number is translated as.)
concordance h02418 (Lists the references of all the verses with
the Strong's number 'H02418' and how it was
translated. It only occures six times and all
of them are in Daniel.)
strongs h02418 (Looks up and gives the definition of the
Strong's number 'H02418.')
set range gen-mal (Sets the range to the Old Testament.)
Just about everything has tab-completion, so you can hit tab a couple
of times to see all the completions to what you are typing.
If you want to see this intro again type: 'intro'
To find out more type 'help'
(example: 'help search' will list the help for the search command.)
To exit type 'quit' or hit 'CTRL+D'
''' % os.path.basename(argv[0])
super(SearchCmd, self).__init__()
self._quoted_regex = re.compile('''
((?P<quote>'|")
.*?
(?P=quote)|[^'"]*)
''', re.X)
# Perform the specified search.
self._search = Search(module=module)
self._results = set()
self._search_list = []
self._highlight_list = []
self._words = self._search._index_dict['_words_']
self._strongs = self._search._index_dict['_strongs_']
self._morph = self._search._index_dict['_morph_']
self._book_list = list(book_gen())
self._setting_dict = {
'search_type': 'mixed',
'search_strongs': False,
'search_morph': False,
'case_sensitive': False,
'context': 0,
'one_line': False,
'show_notes': False,
'show_strongs': False,
'show_morph': False,
'added': True,
'range': '',
'extras': (),
'module': module,
}
self._search_types = ['mixed', 'mixed_phrase', 'multiword', 'anyword',
'combined', 'partial_word', 'ordered_multiword',
'regex', 'eitheror', 'sword_lucene',
'sword_phrase', 'sword_multiword',
'sword_entryattrib']
def _complete(self, text, line, begidx, endidx, complete_list):
""" Return a list of matching text.
"""
retlist = [i for i in complete_list if i.startswith(text)]
if not retlist:
# If nothing was found try words that contain the text.
retlist = [i for i in complete_list if text in i]
if not retlist:
# Finally try matching misspelled words.
retlist = get_close_matches(text, complete_list, cutoff=0.7)
return retlist
def _get_list(self, args):
""" Split the args into quoted strings and seperate words.
"""
arg_list = []
# Split the arg string into quoted phrases and single words.
for i, c in self._quoted_regex.findall(args):
if c in ['"', "'"]:
arg_list.append(i.strip(c))
else:
arg_list.extend(i.split())
return arg_list
def do_test(self, args):
""" A Test.
"""
quoted_regex = re.compile('''((?P<quote>'|").*?(?P=quote)|[^'"]*)''')
print(quoted_regex.findall(args))
print(self._get_list(args))
def _print(self, text_iter):
""" Print all the text breaking it and screens so the user can read it
all.
"""
count = 0
for verse in text_iter:
count += len(verse.splitlines()) if '\n' in verse else 1
print(verse)
if count >= screen_size()[0] - 4:
count = 0
try:
input('[Press enter to see more, or CTRL+D to end.]')
print('[1A[K', end='')
except:
print('[G[K', end='')
break
def precmd(self, line):
""" Set the correct settings before running the line.
"""
if not line:
return line
cmd = line.split()[0]
if cmd in self._search_types:
search_type = cmd
if search_type.startswith('sword_'):
self._setting_dict['extras'] = (search_type[6:],)
search_type = search_type[:5]
else:
self._setting_dict['extras'] = ()
self._setting_dict['search_type'] = search_type
return line
def postcmd(self, stop, line):
""" If lookup was called then show the results.
"""
if not line:
return stop
cmd = line.split()[0]
if cmd == 'lookup':
self.onecmd('show_results')
return stop
def completedefault(self, text, line, begidx, endidx):
""" By default complete words in the Bible.
"""
words_list = self._words
return self._complete(text, line, begidx, endidx, words_list)
def do_shell(self, args):
""" Execute shell commands.
"""
os.system(args)
def do_concordance(self, args):
""" Perform a concordance like search.
"""
if not args:
return
arg_list = self._get_list(args)
# Search.
strongs_search = self._setting_dict['search_strongs']
morph_search = self._setting_dict['search_morph']
search_range = self._setting_dict['range']
case_sensitive = self._setting_dict['case_sensitive']
search_added = self._setting_dict['added']
self._search.test4_search(arg_list, strongs_search, morph_search,
search_added, case_sensitive, search_range)
def do_show(self, args):
""" Show relevent parts of the GPL.
"""
if args.lower() in ['c', 'copying']:
# Show the conditions.
print(copying_str)
elif args.lower() in ['w', 'warranty']:
# Show the warranty.
print(warranty_str)
else:
# Show the entire license.
print('%s%s' % (copying_str, warranty_str))
def do_EOF(self, args):
""" Exit when eof is recieved.
"""
return True
def do_quit(self, args):
""" Exit.
"""
return True
def do_help(self, args):
""" Print the help.
"""
if args:
try:
self._print(getattr(self, 'do_%s' % args).__doc__.splitlines())
return
except:
pass
super(SearchCmd, self).do_help(args)
def do_intro(self, args):
""" Re-print the intro screen.
"""
self._print(self.intro.splitlines())
def complete_show_results(self, text, line, begidx, endidx):
""" Tab completion for the show_results command.
"""
cmd_list = ['strongs', 'morph', 'notes', 'one_line']
return self._complete(text, line, begidx, endidx, cmd_list)
def do_show_results(self, args):
""" Output the results.
Print out all the verses that were either found by searching or by
lookup.
Extra arguments:
+/-strongs - Enable/disable strongs in the output.
+/-morph - Enable/disable morphology in the output
+/-notes - Enable/disable foot notes in the output.
+/-added - Enable/disable added text in the output.
+/-one_line - Enable/disable one line output.
anything else - If the output is from looking up verses with
the lookup command, then any other words or
quoted phrases given as arguments will be
highlighted in the output.
"""
search_type = self._setting_dict['search_type']
strongs_search = self._setting_dict['search_strongs']
morph_search = self._setting_dict['search_morph']
search_range = self._setting_dict['range']
case_sensitive = self._setting_dict['case_sensitive']
search_added = self._setting_dict['added']
module_name = self._setting_dict['module']
highlight_list = self._highlight_list
kwargs = self._setting_dict
results = self._results
# Get the output arguments.
show_strongs = self._setting_dict['show_strongs'] or strongs_search
show_morph = self._setting_dict['show_morph'] or morph_search
show_notes = self._setting_dict['show_notes']
one_line = self._setting_dict['one_line']
arg_list = self._get_list(args)
if '+strongs' in arg_list:
show_strongs = True
arg_list.remove('+strongs')
if '+morph' in args:
show_morph = True
arg_list.remove('+morph')
if '-strongs' in args:
show_strongs = False
arg_list.remove('-strongs')
if '-morph' in args:
show_strongs = False
arg_list.remove('-morph')
if '+notes' in args:
show_notes = True
arg_list.remove('+notes')
if '-notes' in args:
show_notes = False
arg_list.remove('-notes')
if '+one_line' in args:
one_line = True
arg_list.remove('+one_line')
if '-one_line' in args:
one_line = False
arg_list.remove('-one_line')
if '+added' in args:
search_added = True
arg_list.remove('+added')
if '-added' in args:
search_added = False
arg_list.remove('-added')
if search_range:
results.intersection_update(parse_verse_range(search_range))
if not highlight_list:
# Highlight anything else the user typed in.
highlight_list = arg_list
# Don't modify regular expression searches.
if search_type != 'regex':
regx_list = build_highlight_regx(highlight_list, case_sensitive,
(search_type == 'ordered_multiword'))
if kwargs['context']:
regx_list.extend(build_highlight_regx(results, case_sensitive))
else:
arg_str = ' '.join(arg_list)
regx_list = [re.compile(arg_str, re.I if case_sensitive else 0)]
# Flags for the highlight string.
flags = re.I if not case_sensitive else 0
# Add the specified number of verses before and after to provide
# context.
context_results = sorted(add_context(results, kwargs['context']),
key=sort_key)
# Get a formated verse string generator.
verse_gen = render_verses_with_italics(context_results,
not one_line,
show_strongs, show_morph,
search_added,
show_notes,
highlight_search_terms,
module_name, regx_list,
highlight_text, flags)
if one_line:
# Print it all on one line.
print(' '.join(verse_gen))
else:
# Print the verses on seperate lines.
self._print(verse_gen)
#print('\n'.join(verse_gen))
def complete_lookup(self, text, line, begidx, endidx):
""" Try to complete Verse references.
"""
name_list = self._book_list
text = text.capitalize()
return self._complete(text, line, begidx, endidx, name_list)
def do_lookup(self, args):
""" Lookup the verses by references.
Example: lookup gen1:3-5;mal3 (Look up Genesis chapter 1 verses
3-5 and Malachi chapter 3.)
"""
self._results = parse_verse_range(args)
self._highlight_list = []
def complete_strongs(self, text, line, begidx, endidx):
""" Tabe complete Strong's numbers.
"""
text = text.capitalize()
return self._complete(text, line, begidx, endidx, self._strongs)
def do_strongs(self, numbers):
""" Lookup one or more Strong's Numbers.
strongs number,number,number....
"""
# Lookup all the Strong's Numbers in the argument list.
# Make all the numbers seperated by a comma.
strongs_list = ','.join(numbers.upper().split()).split(',')
#TODO: Find what Strong's Modules are available and use the best,
# or let the user decide.
greek_strongs_lookup = Lookup('StrongsRealGreek')
hebrew_strongs_lookup = Lookup('StrongsRealHebrew')
for strongs_num in strongs_list:
# Greek Strong's Numbers start with a 'G' and Hebrew ones start
# with an 'H.'
if strongs_num.upper().startswith('G'):
mod_name = 'StrongsRealGreek'
else:
mod_name = 'StrongsRealHebrew'
print('%s\n' % mod_lookup(mod_name, strongs_num[1:]))
def complete_morph(self, text, line, begidx, endidx):
""" Tabe complete Morphological Tags.
"""
text = text.capitalize()
return self._complete(text, line, begidx, endidx, self._morph)
def do_morph(self, tags):
""" Lookup one or more Morphological Tags.
morph tag,tag,tag....
"""
# Lookup all the Morphological Tags in the argument list.
# I don't know how to lookup Hebrew morphological tags, so I
# only lookup Greek ones in 'Robinson.'
print('%s\n' % mod_lookup('Robinson', tags.upper()))
def do_websters(self, words):
""" Lookup one or more words in Websters Dictionary.
websters word,word,word...
"""
# Lookup words in the dictionary.
print('%s\n' % mod_lookup('WebstersDict', words))
def do_kjvd(self, words):
""" Lookup one or more words in the KJV Dictionary.
kjvd word,word,word...
"""
# Lookup words in the KJV dictionary.
print('%s\n' % mod_lookup('KJVD', words))
def do_daily(self, daily):
""" Display a daily devotional from 'Bagsters Daily light.'
daily date/today
Dates are given in the format Month.Day. The word 'today' is an alias
to today's date. The default is to lookup today's devotional.
"""
daily = 'today' if not daily else daily
# Lookup the specified daily devotional.
if daily.lower() == 'today':
# Today is an alias for today's date.
daily = strftime('%m.%d')
daily_lookup = Lookup('Daily')
# Try to make the output nicer.
print(daily_lookup.get_formatted_text(daily))
def complete_set(self, text, line, begidx, endidx):
""" Complete setting options.
"""
setting_list = self._setting_dict.keys()
return self._complete(text, line, begidx, endidx, setting_list)
def do_set(self, args):
""" Set settings.
Run without arguments to see the current settings.
set show_strongs = True/False - Enable strongs numbers in the
output.
set show_morph = True/False - Enable morphology in the output.
set context = <number> - Show <number> verses of context.
set case_sensitive = True/False - Set the search to case sensitive.
set range = <range> - Confine search/output to <range>.
set one_line = True/False - Don't break output at verses.
set added = True/False - Show/search added text.
set show_notes = True/False - Show foot-notes in output.
set search_type = <type> - Use <type> for searching.
set search_strongs = True/False - Search Strong's numbers
(deprecated).
set search_morph = True/False - Search Morphological Tags
(deprecated).
"""
if not args:
print("Current settings:\n")
max_len = len(max(self._setting_dict.keys(), key=len))
for setting, value in self._setting_dict.items():
if setting.lower() == 'range':
if not Sword:
value = VerseRange.parse_range(value)
value = '; '.join(str(i) for i in value)
else:
key = Sword.VerseKey()
range_list = key.parseVerseList(value, 'Genesis 1:1',
True, False)
value = range_list.getRangeText()
print('{1:{0}} = {2}'.format(max_len, setting, value))
print()
else:
for setting in args.split(';'):
if '=' in setting:
k, v = setting.split('=')
elif ' ' in setting:
k, v = setting.split()
else:
print(self._setting_dict.get(setting, ''))
continue
k = k.strip()
v = v.strip()
if isinstance(v, str):
if v.lower() == 'false':
v = False
elif v.lower() == 'true':
v = True
elif v.isdigit():
v = int(v)
self._setting_dict[k] = v
def complete_search(self, text, line, begidx, endidx):
""" Bible word completion to make searching easier.
"""
words_list = self._words
return self._complete(text, line, begidx, endidx, words_list)
complete_mixed = complete_search
complete_mixed_phrase = complete_search
complete_multiword = complete_search
complete_anyword = complete_search
complete_combined = complete_search
complete_partial_word = complete_search
complete_ordered_multiword = complete_search
complete_regex = complete_search
complete_eitheror = complete_search
complete_sword_lucene = complete_search
complete_sword_phrase = complete_search
complete_sword_multiword = complete_search
complete_sword_entryattrib = complete_search
def do_search(self, args):
""" Search the Bible.
Search types are:
mixed - A search made up of a mix of most of the
other search types. Put an '!' in front of
words/phrases that you don't want in any of
the results.
mixed_phrase - A phrase search that can include words,
Strong's, and Morphology. Can be used in
the mixed search by including words in
quotes.
multiword - Search for verses containing each word at
least once. Use in the mixed search by
putting a '+' in front of any word/phrase
you want to be in all the results.
anyword - Search for verses containing one or more of
any of the words. Use in the mixed search
by putting a '|' in front of any
word/phrase you want in any but not
necessarily all the results.
eitheror - Search for verses containing one and only
one of the words. In the mixed search put
a '^' in front of two or more words/phrases
to make the results contain one and only
one of the marked search terms.
combined - Search using a phrase like ('in' AND ('the'
OR 'it')) finding verses that have both
'in' and 'the' or both 'in' and 'it'.
To do the same thing with the mixed search
use a phrase like this:
(mixed '+in' '^the' '^it').
partial_word - Search for partial words (e.g. a search for
'begin*' would find all the words starting
with 'begin'.) Use in the mixed search to
make partial words in a phrase.
ordered_multiword - Search for words in order, but not
necessarily in a phrase. In the mixed
search put a '~' in front of any quoted
group of words you want to be in that
order, but you don't mind if they have
other words between them.
regex - A regular expression search (slow).
Examples:
mixed - (mixed '+~in the beg*' '!was') finds any
verse that has the words 'in', 'the', and
any word starting with 'beg', in order, but
not the word 'was.'
mixed_phrase - (mixed_phrase 'h011121 of gomer') finds any
verse with that phrase.
mixed search flags first column prefix (these should come first):
----------------------------------------------------------------
! = not (not in any of the results)
+ = all (in all the results)
| = or (in at least one result)
^ = exclusive or (only one in any of the results)
not example: (mixed 'in the beginning' !was) results will have the
phrase 'in the beginning' but will not have the word
'was.'
all example: (mixed 'in the beginning' +was) results may have the
phrase 'in the beginning' but all of them will have
the word 'was.' (note. this will find all verses with
the word 'was' in them if you want it to have the
phrase 'in the beginning' also you have to prefix it
with a '+' aswell)
or example: (mixed 'in the beginning' |was) results will be all the
verses with the phrase 'in the beginning' and all the
verses with the word 'was.' This is the default way
the mixed search operates, so the '|' can be excluded
in this case.
exclusive or example: (mixed '^in the beginning' '^was') results
will either have the phrase 'in the
beginning' or the word 'was', but not both.
To be effective you must have at least two
search terms prefixed with '^.'
mixed search flags second column prefix (these come after the first
column flags):
-------------------------------------------------------------------
~ = sloppy phrase or ordered multiword
& = regular expression search.
sloppy phrase example: (mixed '~in the beginning') results will
have all the words 'in', 'the', and
'beginning,' but they may have other words
between them.
regular expression example:
(mixed '&\\b[iI]n\\b\s+\\b[tT[Hh][eE]\\b\s+\\b[bB]eginning\\b')
results will be all the verses with the phrase 'in the beginning.'
"""
if not args:
return
arg_list = self._get_list(args)
arg_str = ' '.join(arg_list)
self._search_list = arg_list
extras = self._setting_dict['extras']
search_type = self._setting_dict['search_type']
try:
# Get the search function asked for.
search_func = getattr(self._search, '%s_search' % search_type)
except AttributeError as err:
# An invalid search type was specified.
print("Invalid search type: %s" % search_type, file=sys.stderr)
exit()
# Search.
strongs_search = self._setting_dict['search_strongs']
morph_search = self._setting_dict['search_morph']
search_range = self._setting_dict['range']
case_sensitive = self._setting_dict['case_sensitive']
search_added = self._setting_dict['added']
self._results = search_func(arg_list, strongs_search, morph_search,
search_added, case_sensitive, search_range,
*extras)
count = len(self._results)
info_print("\nFound %s verse%s.\n" % \
(count, 's' if count != 1 else ''),
tag=-10)
print("To view the verses type 'show_results.'")
if search_type in ['combined', 'combined_phrase']:
# Combined searches are complicated.
# Parse the search argument and build a highlight string from the
# result.
arg_parser = CombinedParse(arg_str)
parsed_args = arg_parser.word_list
not_l = arg_parser.not_list
# Remove any stray '+'s.
#highlight_str = highlight_str.replace('|+', ' ')
if search_type == 'combined_phrase':
# A phrase search needs to highlight phrases.
highlight_list = parsed_args
else:
highlight_list = ' '.join(parsed_args).split()
# Build the highlight string for the other searches.
elif search_type in ['anyword', 'multiword', 'eitheror',
'partial_word']:
# Highlight each word separately.
highlight_list = arg_str.split()
elif search_type == 'mixed':
# In mixed search phrases are in quotes so the arg_list should be
# what we want, but don't include any !'ed words.
highlight_list = [i for i in arg_list if not i.startswith('!')]
elif search_type in ['phrase', 'mixed_phrase', 'ordered_multiword']:
# Phrases should highlight phrases.
highlight_list = [arg_str]
elif search_type == 'sword':
highlight_list = arg_list
self._highlight_list = highlight_list
do_mixed = do_search
do_mixed_phrase = do_search
do_multiword = do_search
do_anyword = do_search
do_combined = do_search
do_partial_word = do_search
do_ordered_multiword = do_search
do_regex = do_search
do_eitheror = do_search
do_sword_lucene = do_search
do_sword_phrase = do_search
do_sword_multiword = do_search
do_sword_entryattrib = do_search<|fim▁end|>
|
verse_text = self._clean_regex.sub('', verse_text)
verse_text = self._notes_regex.sub(self._notes_str, verse_text)
|
<|file_name|>true.rs<|end_file_name|><|fim▁begin|>// rustfmt-reorder_impl_items: true<|fim▁hole|>struct Dummy;
impl Iterator for Dummy {
type Item = i32;
fn next(&mut self) -> Option<Self::Item> {
None
}
}<|fim▁end|>
| |
<|file_name|>plugin.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Tiny Technologies, Inc. All rights reserved.
* Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/
*
* Version: 5.3.1 (2020-05-27)
*/
(function () {
'use strict';
var global = tinymce.util.Tools.resolve('tinymce.PluginManager');
var global$1 = tinymce.util.Tools.resolve('tinymce.util.Tools');
var getNonEditableClass = function (editor) {
return editor.getParam('noneditable_noneditable_class', 'mceNonEditable');
};
var getEditableClass = function (editor) {
return editor.getParam('noneditable_editable_class', 'mceEditable');
};
var getNonEditableRegExps = function (editor) {
var nonEditableRegExps = editor.getParam('noneditable_regexp', []);
if (nonEditableRegExps && nonEditableRegExps.constructor === RegExp) {
return [nonEditableRegExps];
} else {
return nonEditableRegExps;
}
};
var hasClass = function (checkClassName) {
return function (node) {
return (' ' + node.attr('class') + ' ').indexOf(checkClassName) !== -1;
};
};
var replaceMatchWithSpan = function (editor, content, cls) {
return function (match) {
var args = arguments, index = args[args.length - 2];
var prevChar = index > 0 ? content.charAt(index - 1) : '';
if (prevChar === '"') {
return match;
}
if (prevChar === '>') {
var findStartTagIndex = content.lastIndexOf('<', index);
if (findStartTagIndex !== -1) {
var tagHtml = content.substring(findStartTagIndex, index);
if (tagHtml.indexOf('contenteditable="false"') !== -1) {
return match;
}
}
}
return '<span class="' + cls + '" data-mce-content="' + editor.dom.encode(args[0]) + '">' + editor.dom.encode(typeof args[1] === 'string' ? args[1] : args[0]) + '</span>';
};
};
var convertRegExpsToNonEditable = function (editor, nonEditableRegExps, e) {
var i = nonEditableRegExps.length, content = e.content;
if (e.format === 'raw') {<|fim▁hole|> }
e.content = content;
};
var setup = function (editor) {
var editClass, nonEditClass;
var contentEditableAttrName = 'contenteditable';
editClass = ' ' + global$1.trim(getEditableClass(editor)) + ' ';
nonEditClass = ' ' + global$1.trim(getNonEditableClass(editor)) + ' ';
var hasEditClass = hasClass(editClass);
var hasNonEditClass = hasClass(nonEditClass);
var nonEditableRegExps = getNonEditableRegExps(editor);
editor.on('PreInit', function () {
if (nonEditableRegExps.length > 0) {
editor.on('BeforeSetContent', function (e) {
convertRegExpsToNonEditable(editor, nonEditableRegExps, e);
});
}
editor.parser.addAttributeFilter('class', function (nodes) {
var i = nodes.length, node;
while (i--) {
node = nodes[i];
if (hasEditClass(node)) {
node.attr(contentEditableAttrName, 'true');
} else if (hasNonEditClass(node)) {
node.attr(contentEditableAttrName, 'false');
}
}
});
editor.serializer.addAttributeFilter(contentEditableAttrName, function (nodes) {
var i = nodes.length, node;
while (i--) {
node = nodes[i];
if (!hasEditClass(node) && !hasNonEditClass(node)) {
continue;
}
if (nonEditableRegExps.length > 0 && node.attr('data-mce-content')) {
node.name = '#text';
node.type = 3;
node.raw = true;
node.value = node.attr('data-mce-content');
} else {
node.attr(contentEditableAttrName, null);
}
}
});
});
};
function Plugin () {
global.add('noneditable', function (editor) {
setup(editor);
});
}
Plugin();
}());<|fim▁end|>
|
return;
}
while (i--) {
content = content.replace(nonEditableRegExps[i], replaceMatchWithSpan(editor, content, getNonEditableClass(editor)));
|
<|file_name|>numbers_international.py<|end_file_name|><|fim▁begin|>"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
class NumbersInternational(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'account_id': (str,), # noqa: E501
'iban': (str,), # noqa: E501
'bic': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'account_id': 'account_id', # noqa: E501
'iban': 'iban', # noqa: E501
'bic': 'bic', # noqa: E501
}
_composed_schemas = {}
<|fim▁hole|> '_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, account_id, iban, bic, *args, **kwargs): # noqa: E501
"""NumbersInternational - a model defined in OpenAPI
Args:
account_id (str): The Plaid account ID associated with the account numbers
iban (str): The International Bank Account Number (IBAN) for the account
bic (str): The Bank Identifier Code (BIC) for the account
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.account_id = account_id
self.iban = iban
self.bic = bic
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)<|fim▁end|>
|
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
|
<|file_name|>test_run_script.py<|end_file_name|><|fim▁begin|>import subprocess
import pytest
from utils import *
@all_available_simulators()
def test_filter(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing test should block the fail')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'some_ut.some_passing_test'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
print('No explicit filter should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit', '-s', simulator], cwd=tmp_path)
assert 'FAILED' in log.read_text()
@all_available_simulators()
def test_filter_wildcards(tmp_path, simulator):
failing_unit_test = tmp_path.joinpath('some_failing_unit_test.sv')
failing_unit_test.write_text('''
module some_failing_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_failing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_test)
`FAIL_IF(1)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
passing_unit_test = tmp_path.joinpath('some_passing_unit_test.sv')
passing_unit_test.write_text('''
module some_passing_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing testcase should block the fail')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'some_passing_ut.*'], cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_test' in log.read_text()
print('Filtering only for the test should cause both tests to run, hence trigger the fail')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', "*.some_test"], cwd=tmp_path)
assert 'FAILED' in log.read_text()
@all_available_simulators()
def test_filter_without_dot(tmp_path, simulator):
dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv')
dummy_unit_test.write_text('''
module dummy_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVUNIT_TESTS_END
endmodule
''')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'some_string'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
@all_available_simulators()
def test_filter_with_extra_dot(tmp_path, simulator):
dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv')
dummy_unit_test.write_text('''
module dummy_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVUNIT_TESTS_END
endmodule
''')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'a.b.c'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
@all_available_simulators()
def test_filter_with_partial_widlcard(tmp_path, simulator):
dummy_unit_test = tmp_path.joinpath('dummy_unit_test.sv')
dummy_unit_test.write_text('''
module dummy_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_passing_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVUNIT_TESTS_END
endmodule
''')
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'foo*.bar'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', 'foo.bar*'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
subprocess.check_call(['runSVUnit', '-s', simulator, '--filter', '*foo.bar'], cwd=tmp_path)
log = tmp_path.joinpath('run.log')
assert 'fatal' in log.read_text().lower()
@all_available_simulators()
def test_multiple_filter_expressions(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END<|fim▁hole|> `FAIL_IF(0)
`SVTEST_END
`SVTEST(some_other_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVTEST(yet_another_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only the passing testcases should block the fail')
subprocess.check_call(
[
'runSVUnit',
'-s', simulator,
'--filter', '*.some_passing_test:*.some_other_passing_test:*.yet_another_passing_test',
],
cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_passing_test' in log.read_text()
assert 'some_other_passing_test' in log.read_text()
assert 'yet_another_passing_test' in log.read_text()
@all_available_simulators()
def test_negative_filter(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_other_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering out the failing tests should block the fail')
subprocess.check_call(
['runSVUnit',
'-s', simulator,
'--filter', '-some_ut.some_failing_test:some_ut.some_other_failing_test',
],
cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_passing_test' in log.read_text()
@all_available_simulators()
def test_positive_and_negative_filter(tmp_path, simulator):
unit_test = tmp_path.joinpath('some_unit_test.sv')
unit_test.write_text('''
module some_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVTEST(some_passing_test)
`FAIL_IF(0)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
other_unit_test = tmp_path.joinpath('some_other_unit_test.sv')
other_unit_test.write_text('''
module some_other_unit_test;
import svunit_pkg::*;
`include "svunit_defines.svh"
string name = "some_other_ut";
svunit_testcase svunit_ut;
function void build();
svunit_ut = new(name);
endfunction
task setup();
svunit_ut.setup();
endtask
task teardown();
svunit_ut.teardown();
endtask
`SVUNIT_TESTS_BEGIN
`SVTEST(some_other_failing_test)
`FAIL_IF(1)
`SVTEST_END
`SVUNIT_TESTS_END
endmodule
''')
log = tmp_path.joinpath('run.log')
print('Filtering only tests from the first unit test'
+ ' and then filtering out the failing test should block the fail')
subprocess.check_call(
['runSVUnit',
'-s', simulator,
'--filter', 'some_ut.*-some_ut.some_failing_test',
],
cwd=tmp_path)
assert 'FAILED' not in log.read_text()
assert 'some_passing_test' in log.read_text()<|fim▁end|>
|
`SVTEST(some_passing_test)
|
<|file_name|>movie.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import allure
from selenium.webdriver.common.by import By
from .base import BasePage
from .elements import SimpleInput, SimpleText
from .blocks.nav import NavBlock
class BrowseMoviePageLocators(object):
"""Локаторы страницы просмотра информации о фильме"""
TITLE_LOCATOR = (By.CSS_SELECTOR, '#movie h2')
COUNTRY_LOCATOR = (By.NAME, 'country')
DIRECTOR_LOCATOR = (By.NAME, 'director')
WRITER_LOCATOR = (By.NAME, 'writer')
PRODUCER_LOCATOR = (By.NAME, 'producer')
EDIT_BUTTON_LOCATOR = (By.CSS_SELECTOR, 'img[title="Edit"]')
REMOVE_BUTTON_LOCATOR = (By.CSS_SELECTOR, 'img[title="Remove"]')
class BrowseMoviePage(BasePage):
"""Страница просмотра информации о фильме"""
def __init__(self, driver):
super(BrowseMoviePage, self).__init__(driver)
self.nav = NavBlock(driver)
title = SimpleText(BrowseMoviePageLocators.TITLE_LOCATOR)
director = SimpleText(BrowseMoviePageLocators.DIRECTOR_LOCATOR)
writer = SimpleText(BrowseMoviePageLocators.WRITER_LOCATOR)
producer = SimpleText(BrowseMoviePageLocators.PRODUCER_LOCATOR)
@allure.step('Нажмем на кноку "Edit"')
def click_edit_button(self):
"""
:rtype: EditMoviePage
"""
self._click(BrowseMoviePageLocators.EDIT_BUTTON_LOCATOR)
return EditMoviePage(self._driver)
@allure.step('Нажмем на кноку "Remove"')<|fim▁hole|> """
self._click(BrowseMoviePageLocators.REMOVE_BUTTON_LOCATOR)
self.alert_accept()
from .home import HomePage
return HomePage(self._driver)
class AddMoviePageLocators(object):
"""Локаторы страницы создания описания фильма"""
TITLE_INPUT_LOCATOR = (By.NAME, 'name')
TITLE_INPUT_ERROR_LOCATOR = (By.CSS_SELECTOR, 'input[name="name"].error')
ALSO_KNOWN_AS_INPUT_LOCATOR = (By.NAME, 'aka')
YEAR_INPUT_LOCATOR = (By.NAME, 'year')
YEAR_INPUT_ERROR_LOCATOR = (By.CSS_SELECTOR, 'input[name="year"].error')
DURATION_INPUT_LOCATOR = (By.NAME, 'duration')
TRAILER_URL_INPUT_LOCATOR = (By.NAME, 'trailer')
FORMAT_INPUT_LOCATOR = (By.NAME, 'format')
COUNTRY_INPUT_LOCATOR = (By.NAME, 'country')
DIRECTOR_INPUT_LOCATOR = (By.NAME, 'director')
WRITER_INPUT_LOCATOR = (By.NAME, 'writer')
PRODUCER_INPUT_LOCATOR = (By.NAME, 'producer')
SAVE_BUTTON_LOCATOR = (By.CSS_SELECTOR, 'img[title="Save"]')
class AddMoviePage(BasePage):
"""Страница создания описания фильма"""
def __init__(self, driver):
super(AddMoviePage, self).__init__(driver)
self.nav = NavBlock(driver)
title = SimpleInput(AddMoviePageLocators.TITLE_INPUT_LOCATOR, 'название фильма')
also_know_as = SimpleInput(AddMoviePageLocators.ALSO_KNOWN_AS_INPUT_LOCATOR, 'оригинальное название фильма')
year = SimpleInput(AddMoviePageLocators.YEAR_INPUT_LOCATOR, 'год')
duration = SimpleInput(AddMoviePageLocators.DURATION_INPUT_LOCATOR, 'продолжительность')
trailer_url = SimpleInput(AddMoviePageLocators.TRAILER_URL_INPUT_LOCATOR, 'адрес трейлера')
format = SimpleInput(AddMoviePageLocators.FORMAT_INPUT_LOCATOR, 'формат')
country = SimpleInput(AddMoviePageLocators.COUNTRY_INPUT_LOCATOR, 'страну')
director = SimpleInput(AddMoviePageLocators.DIRECTOR_INPUT_LOCATOR, 'директора')
writer = SimpleInput(AddMoviePageLocators.WRITER_INPUT_LOCATOR, 'сценариста')
producer = SimpleInput(AddMoviePageLocators.PRODUCER_INPUT_LOCATOR, 'продюсера')
@allure.step('Нажмем на кноку "Save"')
def click_save_button(self):
"""
:rtype: BrowseMoviePage
"""
self._click(AddMoviePageLocators.SAVE_BUTTON_LOCATOR)
return BrowseMoviePage(self._driver)
def title_field_is_required_present(self):
"""
:rtype: bool
"""
return self._is_element_present(AddMoviePageLocators.TITLE_INPUT_ERROR_LOCATOR)
def year_field_is_required_present(self):
"""
:rtype: bool
"""
return self._is_element_present(AddMoviePageLocators.YEAR_INPUT_ERROR_LOCATOR)
class EditMoviePageLocators(object):
"""Локаторы для страницы редактирования описания фильма"""
REMOVE_BUTTON_LOCATOR = (By.CSS_SELECTOR, 'img[title="Remove"]')
class EditMoviePage(AddMoviePage):
"""Страница редактирования описания фильма"""
@allure.step('Нажмем на кноку "Remove"')
def click_remove_button(self):
"""
:rtype: HomePage
"""
self._click(EditMoviePageLocators.REMOVE_BUTTON_LOCATOR)
self.alert_accept()
from .home import HomePage
return HomePage(self._driver)<|fim▁end|>
|
def click_remove_button(self):
"""
:rtype: HomePage
|
<|file_name|>gecko.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified types for legacy Gecko-only properties.
use crate::parser::{Parse, ParserContext};
use crate::values::computed::length::CSSPixelLength;
use crate::values::computed::{self, LengthPercentage};
use crate::values::generics::gecko::ScrollSnapPoint as GenericScrollSnapPoint;
use crate::values::generics::rect::Rect;
use crate::values::specified::length::LengthPercentage as SpecifiedLengthPercentage;
use cssparser::{Parser, Token};
use std::fmt;
use style_traits::values::SequenceWriter;
use style_traits::{CssWriter, ParseError, StyleParseErrorKind, ToCss};
/// A specified type for scroll snap points.
pub type ScrollSnapPoint = GenericScrollSnapPoint<SpecifiedLengthPercentage>;
impl Parse for ScrollSnapPoint {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
if input.try(|i| i.expect_ident_matching("none")).is_ok() {
return Ok(GenericScrollSnapPoint::None);
}
input.expect_function_matching("repeat")?;
// FIXME(emilio): This won't clamp properly when animating.
let length = input
.parse_nested_block(|i| SpecifiedLengthPercentage::parse_non_negative(context, i))?;
Ok(GenericScrollSnapPoint::Repeat(length))
}
}
fn parse_pixel_or_percent<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<LengthPercentage, ParseError<'i>> {
let location = input.current_source_location();
let token = input.next()?;
let value = match *token {
Token::Dimension {
value, ref unit, ..
} => {
match_ignore_ascii_case! { unit,
"px" => Ok(LengthPercentage::new(CSSPixelLength::new(value), None)),
_ => Err(()),
}
},
Token::Percentage { unit_value, .. } => Ok(LengthPercentage::new_percent(
computed::Percentage(unit_value),
)),
_ => Err(()),
};
value.map_err(|()| location.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
<|fim▁hole|>/// Only bare px or percentage values are allowed. Other length units and
/// calc() values are not allowed.
///
/// <https://w3c.github.io/IntersectionObserver/#parse-a-root-margin>
#[repr(transparent)]
pub struct IntersectionObserverRootMargin(pub Rect<LengthPercentage>);
impl Parse for IntersectionObserverRootMargin {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let rect = Rect::parse_with(context, input, parse_pixel_or_percent)?;
Ok(IntersectionObserverRootMargin(rect))
}
}
// Strictly speaking this is not ToCss. It's serializing for DOM. But
// we can just reuse the infrastructure of this.
//
// <https://w3c.github.io/IntersectionObserver/#dom-intersectionobserver-rootmargin>
impl ToCss for IntersectionObserverRootMargin {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: fmt::Write,
{
// We cannot use the ToCss impl of Rect, because that would
// merge items when they are equal. We want to list them all.
let mut writer = SequenceWriter::new(dest, " ");
let rect = &self.0;
writer.item(&rect.0)?;
writer.item(&rect.1)?;
writer.item(&rect.2)?;
writer.item(&rect.3)
}
}<|fim▁end|>
|
/// The value of an IntersectionObserver's rootMargin property.
///
|
<|file_name|>tar.py<|end_file_name|><|fim▁begin|>"""SCons.Tool.tar
Tool-specific initialization for tar.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.<|fim▁hole|># WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/tar.py rel_2.4.0:3365:9259ea1c13d7 2015/09/21 14:03:43 bdbaddog"
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Node.FS
import SCons.Util
tars = ['tar', 'gtar']
TarAction = SCons.Action.Action('$TARCOM', '$TARCOMSTR')
TarBuilder = SCons.Builder.Builder(action = TarAction,
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$TARSUFFIX',
multi = 1)
def generate(env):
"""Add Builders and construction variables for tar to an Environment."""
try:
bld = env['BUILDERS']['Tar']
except KeyError:
bld = TarBuilder
env['BUILDERS']['Tar'] = bld
env['TAR'] = env.Detect(tars) or 'gtar'
env['TARFLAGS'] = SCons.Util.CLVar('-c')
env['TARCOM'] = '$TAR $TARFLAGS -f $TARGET $SOURCES'
env['TARSUFFIX'] = '.tar'
def exists(env):
return env.Detect(tars)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
|
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
<|file_name|>mnist.py<|end_file_name|><|fim▁begin|># Copyright 2016 Anonymous researcher(s)
# This file is part of BinaryNet.
# BinaryNet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# BinaryNet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with BinaryNet. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import sys
import os
import time
import numpy as np
np.random.seed(1234) # for reproducibility
# specifying the gpu to use
# import theano.sandbox.cuda
# theano.sandbox.cuda.use('gpu1')
import theano
import theano.tensor as T
import lasagne
import cPickle as pickle
import gzip
import binary_net
from pylearn2.datasets.mnist import MNIST
from pylearn2.utils import serial
from collections import OrderedDict
if __name__ == "__main__":
# BN parameters
batch_size = 100
print("batch_size = "+str(batch_size))
# alpha is the exponential moving average factor
# alpha = .15
alpha = .1
print("alpha = "+str(alpha))
epsilon = 1e-4
print("epsilon = "+str(epsilon))
# MLP parameters
num_units = 4096
print("num_units = "+str(num_units))
n_hidden_layers = 3
print("n_hidden_layers = "+str(n_hidden_layers))
# Training parameters
num_epochs = 1000
print("num_epochs = "+str(num_epochs))
# Dropout parameters
dropout_in = .2 # 0. means no dropout
print("dropout_in = "+str(dropout_in))
dropout_hidden = .5
print("dropout_hidden = "+str(dropout_hidden))
# BinaryOut
activation = binary_net.binary_tanh_unit
print("activation = binary_net.binary_tanh_unit")
# activation = binary_net.binary_sigmoid_unit
# print("activation = binary_net.binary_sigmoid_unit")
# BinaryConnect
binary = True
print("binary = "+str(binary))
stochastic = False
print("stochastic = "+str(stochastic))
# (-H,+H) are the two binary values
# H = "Glorot"
H = 1.
print("H = "+str(H))
# W_LR_scale = 1.
W_LR_scale = "Glorot" # "Glorot" means we are using the coefficients from Glorot's paper
print("W_LR_scale = "+str(W_LR_scale))
# Decaying LR
LR_start = .003
print("LR_start = "+str(LR_start))
LR_fin = 0.0000003
print("LR_fin = "+str(LR_fin))
LR_decay = (LR_fin/LR_start)**(1./num_epochs)
print("LR_decay = "+str(LR_decay))
# BTW, LR decay might good for the BN moving average...
save_path = "mnist_parameters.npz"
print("save_path = "+str(save_path))
shuffle_parts = 1
print("shuffle_parts = "+str(shuffle_parts))
print('Loading MNIST dataset...')
train_set = MNIST(which_set= 'train', start=0, stop = 50000, center = False)
valid_set = MNIST(which_set= 'train', start=50000, stop = 60000, center = False)
test_set = MNIST(which_set= 'test', center = False)
# bc01 format
# Inputs in the range [-1,+1]
# print("Inputs in the range [-1,+1]")
train_set.X = 2* train_set.X.reshape(-1, 1, 28, 28) - 1.
valid_set.X = 2* valid_set.X.reshape(-1, 1, 28, 28) - 1.
test_set.X = 2* test_set.X.reshape(-1, 1, 28, 28) - 1.
# flatten targets
train_set.y = np.hstack(train_set.y)
valid_set.y = np.hstack(valid_set.y)
test_set.y = np.hstack(test_set.y)
# Onehot the targets
train_set.y = np.float32(np.eye(10)[train_set.y])
valid_set.y = np.float32(np.eye(10)[valid_set.y])
test_set.y = np.float32(np.eye(10)[test_set.y])
# for hinge loss
train_set.y = 2* train_set.y - 1.
valid_set.y = 2* valid_set.y - 1.
test_set.y = 2* test_set.y - 1.
print('Building the MLP...')
# Prepare Theano variables for inputs and targets
input = T.tensor4('inputs')
target = T.matrix('targets')
LR = T.scalar('LR', dtype=theano.config.floatX)
mlp = lasagne.layers.InputLayer(
shape=(None, 1, 28, 28),
input_var=input)
mlp = lasagne.layers.DropoutLayer(
mlp,
p=dropout_in)
for k in range(n_hidden_layers):
mlp = binary_net.DenseLayer(
mlp,
binary=binary,
stochastic=stochastic,
H=H,
W_LR_scale=W_LR_scale,
nonlinearity=lasagne.nonlinearities.identity,
num_units=num_units)
mlp = lasagne.layers.BatchNormLayer(
mlp,
epsilon=epsilon,
alpha=alpha)
mlp = lasagne.layers.NonlinearityLayer(
mlp,
nonlinearity=activation)
mlp = lasagne.layers.DropoutLayer(
mlp,
p=dropout_hidden)
mlp = binary_net.DenseLayer(
mlp,
binary=binary,
stochastic=stochastic,
H=H,
W_LR_scale=W_LR_scale,
nonlinearity=lasagne.nonlinearities.identity,
num_units=10)
mlp = lasagne.layers.BatchNormLayer(
mlp,
epsilon=epsilon,
alpha=alpha)
train_output = lasagne.layers.get_output(mlp, deterministic=False)
# squared hinge loss
loss = T.mean(T.sqr(T.maximum(0.,1.-target*train_output)))
if binary:
# W updates
W = lasagne.layers.get_all_params(mlp, binary=True)
W_grads = binary_net.compute_grads(loss,mlp)
updates = lasagne.updates.adam(loss_or_grads=W_grads, params=W, learning_rate=LR)
updates = binary_net.clipping_scaling(updates,mlp)
# other parameters updates
params = lasagne.layers.get_all_params(mlp, trainable=True, binary=False)
updates = OrderedDict(updates.items() + lasagne.updates.adam(loss_or_grads=loss, params=params, learning_rate=LR).items())
else:
params = lasagne.layers.get_all_params(mlp, trainable=True)
updates = lasagne.updates.adam(loss_or_grads=loss, params=params, learning_rate=LR)
test_output = lasagne.layers.get_output(mlp, deterministic=True)
<|fim▁hole|> # Compile a function performing a training step on a mini-batch (by giving the updates dictionary)
# and returning the corresponding training loss:
train_fn = theano.function([input, target, LR], loss, updates=updates)
# Compile a second function computing the validation loss and accuracy:
val_fn = theano.function([input, target], [test_loss, test_err])
print('Training...')
binary_net.train(
train_fn,val_fn,
mlp,
batch_size,
LR_start,LR_decay,
num_epochs,
train_set.X,train_set.y,
valid_set.X,valid_set.y,
test_set.X,test_set.y,
save_path,
shuffle_parts)<|fim▁end|>
|
test_loss = T.mean(T.sqr(T.maximum(0.,1.-target*test_output)))
test_err = T.mean(T.neq(T.argmax(test_output, axis=1), T.argmax(target, axis=1)),dtype=theano.config.floatX)
|
<|file_name|>video.hpp<|end_file_name|><|fim▁begin|>/* $Id$ */
/*
Copyright (C) 2003 - 2013 by David White <[email protected]>
Part of the Battle for Wesnoth Project http://www.wesnoth.org/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY.
See the COPYING file for more details.
*/
#ifndef VIDEO_HPP_INCLUDED
#define VIDEO_HPP_INCLUDED
#include "events.hpp"
#include "exceptions.hpp"
#include "lua_jailbreak_exception.hpp"
#include <boost/utility.hpp>
struct surface;
//possible flags when setting video modes
#define FULL_SCREEN SDL_FULLSCREEN
surface display_format_alpha(surface surf);
surface get_video_surface();
SDL_Rect screen_area();
bool non_interactive();
//which areas of the screen will be updated when the buffer is flipped?
void update_rect(size_t x, size_t y, size_t w, size_t h);
void update_rect(const SDL_Rect& rect);
void update_whole_screen();
class CVideo : private boost::noncopyable {
public:
enum FAKE_TYPES {
NO_FAKE,
FAKE,
FAKE_TEST
};
CVideo(FAKE_TYPES type = NO_FAKE);
~CVideo();
int bppForMode( int x, int y, int flags);
int modePossible( int x, int y, int bits_per_pixel, int flags, bool current_screen_optimal=false);
int setMode( int x, int y, int bits_per_pixel, int flags );
//did the mode change, since the last call to the modeChanged() method?
bool modeChanged();
//functions to get the dimensions of the current video-mode
int getx() const;
int gety() const;
//blits a surface with black as alpha
void blit_surface(int x, int y, surface surf, SDL_Rect* srcrect=NULL, SDL_Rect* clip_rect=NULL);
void flip();
surface& getSurface();
bool isFullScreen() const;
struct error : public game::error
{
error() : game::error("Video initialization failed") {}
};
class quit
: public tlua_jailbreak_exception
{
public:
quit()
: tlua_jailbreak_exception()
{
}
private:
IMPLEMENT_LUA_JAILBREAK_EXCEPTION(quit)
};
//functions to allow changing video modes when 16BPP is emulated
void setBpp( int bpp );
int getBpp();
void make_fake();
/**
* Creates a fake frame buffer for the unit tests.
*
* @param width The width of the buffer.
* @param height The height of the buffer.
* @param bpp The bpp of the buffer.
*/
void make_test_fake(const unsigned width = 1024,
const unsigned height = 768, const unsigned bpp = 32);
bool faked() const { return fake_screen_; }
//functions to set and clear 'help strings'. A 'help string' is like a tooltip, but it appears
//at the bottom of the screen, so as to not be intrusive. Setting a help string sets what
//is currently displayed there.
int set_help_string(const std::string& str);
void clear_help_string(int handle);
void clear_all_help_strings();
//function to stop the screen being redrawn. Anything that happens while
//the update is locked will be hidden from the user's view.
//note that this function is re-entrant, meaning that if lock_updates(true)
//is called twice, lock_updates(false) must be called twice to unlock
//updates.
void lock_updates(bool value);
bool update_locked() const;
private:
void initSDL();
bool mode_changed_;
int bpp_; // Store real bits per pixel
//if there is no display at all, but we 'fake' it for clients
bool fake_screen_;
//variables for help strings
int help_string_;
int updatesLocked_;
};
//an object which will lock the display for the duration of its lifetime.
struct update_locker
{
update_locker(CVideo& v, bool lock=true) : video(v), unlock(lock) {
if(lock) {
video.lock_updates(true);
}
}
~update_locker() {
unlock_update();
}
void unlock_update() {
if(unlock) {
video.lock_updates(false);
unlock = false;
}
}
private:
CVideo& video;
bool unlock;
};
class resize_monitor : public events::pump_monitor {
void process(events::pump_info &info);
};
<|fim▁hole|> resize_lock();
~resize_lock();
};
#endif<|fim▁end|>
|
//an object which prevents resizing of the screen occurring during
//its lifetime.
struct resize_lock {
|
<|file_name|>errors.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package kinesisvideo
const (
// ErrCodeAccountStreamLimitExceededException for service response error code
// "AccountStreamLimitExceededException".
//
// The number of streams created for the account is too high.
ErrCodeAccountStreamLimitExceededException = "AccountStreamLimitExceededException"
// ErrCodeClientLimitExceededException for service response error code
// "ClientLimitExceededException".
//
// Kinesis Video Streams has throttled the request because you have exceeded
// the limit of allowed client calls. Try making the call later.
ErrCodeClientLimitExceededException = "ClientLimitExceededException"
// ErrCodeDeviceStreamLimitExceededException for service response error code
// "DeviceStreamLimitExceededException".
//
// Not implemented.
ErrCodeDeviceStreamLimitExceededException = "DeviceStreamLimitExceededException"
// ErrCodeInvalidArgumentException for service response error code
// "InvalidArgumentException".
//
// The value for this input parameter is invalid.
ErrCodeInvalidArgumentException = "InvalidArgumentException"
// ErrCodeInvalidDeviceException for service response error code
// "InvalidDeviceException".
//
// Not implemented.
ErrCodeInvalidDeviceException = "InvalidDeviceException"
// ErrCodeInvalidResourceFormatException for service response error code
// "InvalidResourceFormatException".
//
// The format of the StreamARN is invalid.<|fim▁hole|> // "NotAuthorizedException".
//
// The caller is not authorized to perform this operation.
ErrCodeNotAuthorizedException = "NotAuthorizedException"
// ErrCodeResourceInUseException for service response error code
// "ResourceInUseException".
//
// The stream is currently not available for this operation.
ErrCodeResourceInUseException = "ResourceInUseException"
// ErrCodeResourceNotFoundException for service response error code
// "ResourceNotFoundException".
//
// Amazon Kinesis Video Streams can't find the stream that you specified.
ErrCodeResourceNotFoundException = "ResourceNotFoundException"
// ErrCodeTagsPerResourceExceededLimitException for service response error code
// "TagsPerResourceExceededLimitException".
//
// You have exceeded the limit of tags that you can associate with the resource.
// Kinesis video streams support up to 50 tags.
ErrCodeTagsPerResourceExceededLimitException = "TagsPerResourceExceededLimitException"
// ErrCodeVersionMismatchException for service response error code
// "VersionMismatchException".
//
// The stream version that you specified is not the latest version. To get the
// latest version, use the DescribeStream (https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_DescribeStream.html)
// API.
ErrCodeVersionMismatchException = "VersionMismatchException"
)<|fim▁end|>
|
ErrCodeInvalidResourceFormatException = "InvalidResourceFormatException"
// ErrCodeNotAuthorizedException for service response error code
|
<|file_name|>SmsAction.java<|end_file_name|><|fim▁begin|>package com.metrink.action;
import org.apache.commons.mail.EmailException;
import org.apache.commons.mail.SimpleEmail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Provider;
import com.metrink.alert.ActionBean;
import com.metrink.alert.AlertBean;
import com.metrink.metric.Metric;
/**
* The base action for all SMS actions.
*
* A list of gateways can be found here: http://www.emailtextmessages.com/
*
*/<|fim▁hole|>
public SmsAction(final Provider<SimpleEmail> emailProvider) {
this.emailProvider = emailProvider;
}
@Override
public void triggerAction(Metric metric, AlertBean alertBean, ActionBean actionBean) {
final String toAddr = constructAddress(actionBean.getValue());
final String alertQuery = alertBean.getAlertQuery().substring(0, alertBean.getAlertQuery().lastIndexOf(" do "));
final StringBuilder sb = new StringBuilder();
sb.append(metric.getId());
sb.append(" ");
sb.append(metric.getValue());
sb.append(" triggered ");
sb.append(alertQuery);
try {
final SimpleEmail email = emailProvider.get();
email.addTo(toAddr);
email.setSubject("METRINK Alert");
email.setMsg(sb.toString());
final String messageId = email.send();
LOG.info("Sent message {} to {}", messageId, toAddr);
} catch (final EmailException e) {
LOG.error("Error sending email: {}", e.getMessage());
}
}
/**
* Given a phone number, create the address for the gateway.
* @param phoneNumber the phone number.
* @return the email address to use.
*/
protected abstract String constructAddress(String phoneNumber);
}<|fim▁end|>
|
public abstract class SmsAction implements Action {
private static final Logger LOG = LoggerFactory.getLogger(SmsAction.class);
private final Provider<SimpleEmail> emailProvider;
|
<|file_name|>Overview.cpp<|end_file_name|><|fim▁begin|>/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */
/*
Sonic Visualiser
An audio file viewer and annotation editor.
Centre for Digital Music, Queen Mary, University of London.
This file copyright 2006 Chris Cannam and QMUL.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version. See the file
COPYING included with this distribution for more information.
*/
#include "Overview.h"
#include "layer/Layer.h"
#include "data/model/Model.h"
#include "base/ZoomConstraint.h"
#include <QPaintEvent>
#include <QPainter>
#include <QPainterPath>
#include <iostream>
//#define DEBUG_OVERVIEW 1
Overview::Overview(QWidget *w) :
View(w, false),
m_clickedInRange(false),
m_dragCentreFrame(0)
{
setObjectName(tr("Overview"));
m_followPan = false;
m_followZoom = false;
setPlaybackFollow(PlaybackIgnore);
m_modelTestTime.start();
}
void
Overview::modelChangedWithin(sv_frame_t startFrame, sv_frame_t endFrame)
{
bool zoomChanged = false;
sv_frame_t frameCount = getModelsEndFrame() - getModelsStartFrame();
int zoomLevel = int(frameCount / width());
if (zoomLevel < 1) zoomLevel = 1;
zoomLevel = getZoomConstraintBlockSize(zoomLevel,
ZoomConstraint::RoundUp);
if (zoomLevel != m_zoomLevel) {
zoomChanged = true;
}
if (!zoomChanged) {
if (m_modelTestTime.elapsed() < 1000) {
for (LayerList::const_iterator i = m_layerStack.begin();
i != m_layerStack.end(); ++i) {
if ((*i)->getModel() &&
(!(*i)->getModel()->isOK() ||
!(*i)->getModel()->isReady())) {
return;
}
}
} else {
m_modelTestTime.restart();
}
}
View::modelChangedWithin(startFrame, endFrame);
}
void
Overview::modelReplaced()
{
m_playPointerFrame = getAlignedPlaybackFrame();
View::modelReplaced();
}
void
Overview::registerView(View *view)
{
m_views.insert(view);
update();
}
void
Overview::unregisterView(View *view)
{
m_views.erase(view);
update();
}
void
Overview::globalCentreFrameChanged(sv_frame_t
#ifdef DEBUG_OVERVIEW
f
#endif
)
{
#ifdef DEBUG_OVERVIEW
cerr << "Overview::globalCentreFrameChanged: " << f << endl;
#endif
update();
}
void
Overview::viewCentreFrameChanged(View *v, sv_frame_t
#ifdef DEBUG_OVERVIEW
f
#endif
)
{
#ifdef DEBUG_OVERVIEW
cerr << "Overview[" << this << "]::viewCentreFrameChanged(" << v << "): " << f << endl;
#endif
if (m_views.find(v) != m_views.end()) {
update();
}
}
void
Overview::viewZoomLevelChanged(View *v, int, bool)
{
if (v == this) return;
if (m_views.find(v) != m_views.end()) {
update();
}
}
void
Overview::viewManagerPlaybackFrameChanged(sv_frame_t f)
{
#ifdef DEBUG_OVERVIEW
cerr << "Overview[" << this << "]::viewManagerPlaybackFrameChanged(" << f << "): " << f << endl;
#endif
bool changed = false;
f = getAlignedPlaybackFrame();<|fim▁hole|> if (changed) update();
}
QColor
Overview::getFillWithin() const
{
return Qt::transparent;
}
QColor
Overview::getFillWithout() const
{
QColor c = palette().window().color();
c.setAlpha(100);
return c;
}
void
Overview::paintEvent(QPaintEvent *e)
{
// Recalculate zoom in case the size of the widget has changed.
#ifdef DEBUG_OVERVIEW
cerr << "Overview::paintEvent: width is " << width() << ", centre frame " << m_centreFrame << endl;
#endif
sv_frame_t startFrame = getModelsStartFrame();
sv_frame_t frameCount = getModelsEndFrame() - getModelsStartFrame();
int zoomLevel = int(frameCount / width());
if (zoomLevel < 1) zoomLevel = 1;
zoomLevel = getZoomConstraintBlockSize(zoomLevel,
ZoomConstraint::RoundUp);
if (zoomLevel != m_zoomLevel) {
m_zoomLevel = zoomLevel;
emit zoomLevelChanged(m_zoomLevel, m_followZoom);
}
sv_frame_t centreFrame = startFrame + m_zoomLevel * (width() / 2);
if (centreFrame > (startFrame + getModelsEndFrame())/2) {
centreFrame = (startFrame + getModelsEndFrame())/2;
}
if (centreFrame != m_centreFrame) {
#ifdef DEBUG_OVERVIEW
cerr << "Overview::paintEvent: Centre frame changed from "
<< m_centreFrame << " to " << centreFrame << " and thus start frame from " << getStartFrame();
#endif
m_centreFrame = centreFrame;
#ifdef DEBUG_OVERVIEW
cerr << " to " << getStartFrame() << endl;
#endif
emit centreFrameChanged(m_centreFrame, false, PlaybackIgnore);
}
View::paintEvent(e);
QPainter paint;
paint.begin(this);
paint.setClipRegion(e->region());
paint.setRenderHints(QPainter::Antialiasing);
QRect r(rect());
// We paint a rounded rect for each distinct set of view extents,
// and we colour in the inside and outside of the rect that
// corresponds to the current view. (One small caveat -- we don't
// know which rect that is yet. We'll have to figure it out
// somehow...)
std::set<std::pair<int, int> > extents;
std::vector<QRect> rects;
QRect primary;
int y = 0;
for (ViewSet::iterator i = m_views.begin(); i != m_views.end(); ++i) {
if (!*i) continue;
View *w = (View *)*i;
sv_frame_t f0 = w->getFrameForX(0);
sv_frame_t f1 = w->getFrameForX(w->width());
if (f0 >= 0) {
sv_frame_t rf0 = w->alignToReference(f0);
f0 = alignFromReference(rf0);
}
if (f1 >= 0) {
sv_frame_t rf1 = w->alignToReference(f1);
f1 = alignFromReference(rf1);
}
int x0 = getXForFrame(f0);
int x1 = getXForFrame(f1);
if (x1 <= x0) x1 = x0 + 1;
std::pair<int, int> extent(x0, x1);
if (extents.find(extent) == extents.end()) {
y += height() / 10 + 1;
extents.insert(extent);
QRect vr(x0, y, x1 - x0, height() - 2 * y);
rects.push_back(vr);
primary = vr; //!!! for now
}
}
QPainterPath without;
without.addRoundedRect(primary, 4, 4);
without.addRect(rect());
paint.setPen(Qt::NoPen);
paint.setBrush(getFillWithout());
paint.drawPath(without);
paint.setBrush(getFillWithin());
paint.drawRoundedRect(primary, 4, 4);
foreach (QRect vr, rects) {
paint.setBrush(Qt::NoBrush);
paint.setPen(QPen(Qt::gray, 2));
paint.drawRoundedRect(vr, 4, 4);
}
paint.end();
}
void
Overview::mousePressEvent(QMouseEvent *e)
{
m_clickPos = e->pos();
sv_frame_t clickFrame = getFrameForX(m_clickPos.x());
if (clickFrame > 0) m_dragCentreFrame = clickFrame;
else m_dragCentreFrame = 0;
m_clickedInRange = true;
for (ViewSet::iterator i = m_views.begin(); i != m_views.end(); ++i) {
if (*i && (*i)->getAligningModel() == getAligningModel()) {
m_dragCentreFrame = (*i)->getCentreFrame();
break;
}
}
}
void
Overview::mouseReleaseEvent(QMouseEvent *e)
{
if (m_clickedInRange) {
mouseMoveEvent(e);
}
m_clickedInRange = false;
}
void
Overview::mouseMoveEvent(QMouseEvent *e)
{
if (!m_clickedInRange) return;
int xoff = int(e->x()) - int(m_clickPos.x());
sv_frame_t frameOff = xoff * m_zoomLevel;
sv_frame_t newCentreFrame = m_dragCentreFrame;
if (frameOff > 0) {
newCentreFrame += frameOff;
} else if (newCentreFrame >= -frameOff) {
newCentreFrame += frameOff;
} else {
newCentreFrame = 0;
}
if (newCentreFrame >= getModelsEndFrame()) {
newCentreFrame = getModelsEndFrame();
if (newCentreFrame > 0) --newCentreFrame;
}
if (std::max(m_centreFrame, newCentreFrame) -
std::min(m_centreFrame, newCentreFrame) > m_zoomLevel) {
sv_frame_t rf = alignToReference(newCentreFrame);
#ifdef DEBUG_OVERVIEW
cerr << "Overview::mouseMoveEvent: x " << e->x() << " and click x " << m_clickPos.x() << " -> frame " << newCentreFrame << " -> rf " << rf << endl;
#endif
if (m_followPlay == PlaybackScrollContinuous ||
m_followPlay == PlaybackScrollPageWithCentre) {
emit centreFrameChanged(rf, true, PlaybackScrollContinuous);
} else {
emit centreFrameChanged(rf, true, PlaybackIgnore);
}
}
}
void
Overview::mouseDoubleClickEvent(QMouseEvent *e)
{
sv_frame_t frame = getFrameForX(e->x());
sv_frame_t rf = 0;
if (frame > 0) rf = alignToReference(frame);
#ifdef DEBUG_OVERVIEW
cerr << "Overview::mouseDoubleClickEvent: frame " << frame << " -> rf " << rf << endl;
#endif
m_clickedInRange = false; // we're not starting a drag with the second click
emit centreFrameChanged(rf, true, PlaybackScrollContinuous);
}
void
Overview::enterEvent(QEvent *)
{
emit contextHelpChanged(tr("Click and drag to navigate; double-click to jump"));
}
void
Overview::leaveEvent(QEvent *)
{
emit contextHelpChanged("");
}<|fim▁end|>
|
if (getXForFrame(m_playPointerFrame) != getXForFrame(f)) changed = true;
m_playPointerFrame = f;
|
<|file_name|>ninja_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the ninja.py file. """
import sys
import unittest
import gyp.generator.ninja as ninja
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
# These cannot run on non-Windows as they require a VS installation to
# correctly handle variable expansion.
if sys.platform.startswith("win"):
writer = ninja.NinjaWriter(
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
)
spec = {"target_name": "wee"}
self.assertTrue(
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
)
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter(
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
)
spec = {"target_name": "wee"}
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
)<|fim▁hole|> self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
)
self.assertTrue(
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
)
if __name__ == "__main__":
unittest.main()<|fim▁end|>
| |
<|file_name|>widgettoolbarrepository.d.ts<|end_file_name|><|fim▁begin|>import { Plugin } from '@ckeditor/ckeditor5-core';
import { DocumentSelection } from '@ckeditor/ckeditor5-engine';
import Selection from '@ckeditor/ckeditor5-engine/src/view/selection';
import EngineView from '@ckeditor/ckeditor5-engine/src/view/view';
import ContextualBalloon from '@ckeditor/ckeditor5-ui/src/panel/balloon/contextualballoon';
import View from '@ckeditor/ckeditor5-ui/src/view';
export default class WidgetToolbarRepository extends Plugin {
static readonly requires: [typeof ContextualBalloon];
static readonly pluginName: 'WidgetToolbarRepository';
init(): void;
destroy(): void;
register(
toolbarId: string,
options?: {
ariaLabel?: string | undefined;
items: string[];
getRelatedElement: (el: Selection | DocumentSelection) => EngineView;
balloonClassName?: string | undefined;
},
): void;
}
export interface WidgetRepositoryToolbarDefinition {
balloonClassName: string;
getRelatedElement: (el: Selection | DocumentSelection) => EngineView;
view: View;<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>archive.py<|end_file_name|><|fim▁begin|>import os
import six
from aleph.util import checksum
class Archive(object):
def _get_file_path(self, meta):
ch = meta.content_hash
if ch is None:
raise ValueError("No content hash available.")
path = os.path.join(ch[:2], ch[2:4], ch[4:6], ch)
file_name = 'data'
if meta.file_name is not None:
file_name = meta.file_name
else:
if meta.extension is not None:
file_name = '%s.%s' % (file_name, meta.extension)
return os.path.join(six.text_type(path), six.text_type(file_name))
def _update_metadata(self, filename, meta):
meta.content_hash = checksum(filename)
return meta
def upgrade(self):
"""Run maintenance on the store."""
pass
def archive_file(self, filename, meta, move=False):
"""Import the given file into the archive.<|fim▁hole|> """
pass
def load_file(self, meta):
pass
def cleanup_file(self, meta):
pass
def generate_url(self, meta):
return<|fim▁end|>
|
Return an updated metadata object. If ``move`` is given, the
original file will not exist afterwards.
|
<|file_name|>newick_git.py<|end_file_name|><|fim▁begin|>import string
class Tree:
def __init__(self,u,vs):
self.u = u
self.vs = vs
def distance(self,w):
if w == self.u:
return 0,[w]
for v in self.vs:
d,path = v.distance(w)
if d != -1:
return d+1,[self.u]+path
return -1,[]
def taxa(self):
ret = set()
if type(self.u) == type(""):
ret.add(self.u)
for v in self.vs:
ret = ret.union(v.taxa())
return ret
def nodes(self):
ret = set([self])
for v in self.vs:
ret = ret.union(v.nodes())
return ret
def level_traverse(self,ret=None):
if ret == None:
ret = []
for v in self.vs:
ret = v.level_traverse(ret)
ret.append(self.u)
return ret
def splits(self):
if len(self.vs) == 0:
return []
taxa = self.taxa()
ret = []
for v in self.vs:
vt = v.taxa()
delta = taxa.difference(vt)
r = v.splits() #the split happen in subtrees
ret += [(L,R.union(delta)) for L,R in r]
ret.append((vt,delta))
return ret
def adj_list(self,father=None,cur=None,children=None):
if cur == None:
cur = {}
if children == None:
children = {}
cur[self.u] = set()
children[self.u] = set()
if father != None:
cur[self.u].add(father)
for v in self.vs:
cur,children = v.adj_list(father = self.u, cur = cur, children = children)
cur[self.u].add(v.u)
children[self.u].add(v.u)
return cur, children
def find_rev(self, dnas, pos, pre = None, mid = None):
if pre == None:
ret = []
for v in self.vs:
ret += v.find_rev(dnas,pos,dnas[self.u][pos], None)
return ret
elif mid == None:
if dnas[self.u][pos] != pre:
ret = []
for v in self.vs:
ret += [[self] + path for path in v.find_rev(dnas, pos, pre, dnas[self.u][pos])]
return ret
else:
return []
else:
if dnas[self.u][pos] == pre:
#print("")
return [[self]]
elif dnas[self.u][pos] == mid:
ret = []
for v in self.vs:
ret += [[self] + path for path in v.find_rev(dnas,pos,pre,mid)]
return ret
else:
#print("")
return []
def newick_parse(s):
def S():
ret = None
if s[S.pos] == "(":
S.pos += 1
label = S.N
S.N += 1
ret = Tree(label,[])
<|fim▁hole|> ret.vs.append(S())
while s[S.pos] == ",":
S.pos += 1
ret.vs.append(S())
assert s[S.pos] == ")"
S.pos += 1
if s[S.pos] in string.ascii_letters or s[S.pos] == "_": # has label
label = s[S.pos]
S.pos += 1
while s[S.pos] in string.ascii_letters or s[S.pos] == "_":
label += s[S.pos]
S.pos += 1
ret.u = label
elif s[S.pos] in string.ascii_letters or s[S.pos] == "_":
label = s[S.pos]
S.pos += 1
while s[S.pos] in string.ascii_letters or s[S.pos] == "_":
label += s[S.pos]
S.pos += 1
ret = Tree(label,[])
else:
label = S.N
S.N += 1
ret = Tree(label,[])
return ret
S.N = 1
S.pos = 0
return S()
def edge_splits(t,taxa):
splits = t.splits()
splits = filter(lambda x:len(x[0]) != 1 and len(x[1]) != 1, splits)
ret = []
for split in splits:
s = ""
for i in range(len(taxa)):
if taxa[i] in split[0]:
s += "1"
else:
s += "0"
ret.append(s)
return ret<|fim▁end|>
| |
<|file_name|>mouth.ts<|end_file_name|><|fim▁begin|>import type {<|fim▁hole|> ComponentPickCollection,
ColorPickCollection,
} from '../static-types';
export const mouth: ComponentGroup = {
default: (components: ComponentPickCollection, colors: ColorPickCollection) =>
`<path d="M27.93 46a1 1 0 0 1 1-1h9.14a1 1 0 0 1 1 1 5 5 0 0 1-5 5h-1.14a5 5 0 0 1-5-5Z" fill="#66253C"/><path d="M35.76 50.7a5 5 0 0 1-1.69.3h-1.14a5 5 0 0 1-5-4.8c.77-.29 1.9-.25 3.02-.22L32 46c2.21 0 4 1.57 4 3.5 0 .42-.09.83-.24 1.2Z" fill="#B03E67"/><path d="M29 45h10v1a1 1 0 0 1-1 1h-8a1 1 0 0 1-1-1v-1Z" fill="#fff"/>`,
missingTooth: (
components: ComponentPickCollection,
colors: ColorPickCollection
) =>
`<path d="M27.93 46a1 1 0 0 1 1-1h9.14a1 1 0 0 1 1 1 5 5 0 0 1-5 5h-1.14a5 5 0 0 1-5-5Z" fill="#66253C"/><path d="M35.76 50.7a5 5 0 0 1-1.69.3h-1.14a5 5 0 0 1-5-4.8c.77-.29 1.9-.25 3.02-.22L32 46c2.21 0 4 1.57 4 3.5 0 .42-.09.83-.24 1.2Z" fill="#B03E67"/><path d="M29 45h10v1a1 1 0 0 1-1 1h-8a1 1 0 0 1-1-1v-1Z" fill="#fff"/><path d="M31 45.3c0-.17.13-.3.3-.3h1.4c.17 0 .3.13.3.3v2.4a.3.3 0 0 1-.3.3h-1.4a.3.3 0 0 1-.3-.3v-2.4Z" fill="#B03E67"/>`,
};<|fim▁end|>
|
ComponentGroup,
|
<|file_name|>ellipse.js<|end_file_name|><|fim▁begin|>'use strict';
QUnit.module('ellipse', function() {
var boundaryOnAngle = function(ellipse, angle) {
var a = ellipse.a;
var b = ellipse.b;
var rad = angle * Math.PI / 180;
return g.Point(ellipse.x + a * Math.cos(rad), ellipse.y + b * Math.sin(rad)).round();
};
QUnit.test('validate helper boundaryOnAngle', function(assert) {
var a = 150;
var b = 50;
var c = g.Point(0, 0);
var ellipse = g.Ellipse(c, a, b);
assert.propEqual(boundaryOnAngle(ellipse, 0), g.Point(150, 0));
assert.propEqual(boundaryOnAngle(ellipse, 90), (g.Point(0, 50)));
assert.propEqual(boundaryOnAngle(ellipse, 180), (g.Point(-150, 0)));
assert.propEqual(boundaryOnAngle(ellipse, 270), (g.Point(0, -50)));
});
QUnit.module('constructor', function() {
QUnit.test('creates a new Ellipse object', function(assert) {
assert.ok(g.ellipse() instanceof g.ellipse);
assert.ok(g.ellipse({ x: 1, y: 2 }, 3, 4) instanceof g.ellipse);
assert.equal(g.ellipse({ x: 1, y: 2 }, 3, 4).x, 1);
assert.equal(g.ellipse({ x: 1, y: 2 }, 3, 4).y, 2);
assert.equal(g.ellipse({ x: 1, y: 2 }, 3, 4).a, 3);
assert.equal(g.ellipse({ x: 1, y: 2 }, 3, 4).b, 4);
assert.ok(g.ellipse(g.ellipse({ x: 1, y: 2 }, 3, 4)).equals(g.ellipse({ x: 1, y: 2 }, 3, 4)));
// default values
assert.ok(g.ellipse().equals(g.rect({ x: 0, y: 0 }, 0, 0)));
});
});
QUnit.module('fromRect(rect)', function() {
QUnit.test('creates a new Ellipse object', function(assert) {
assert.ok(g.ellipse.fromRect(g.rect()) instanceof g.ellipse);
var r = g.rect(100, 50, 150, 70);
assert.ok(g.rect.fromEllipse(g.ellipse.fromRect(r)).equals(r));
});
});
QUnit.module('tangentTheta', function(hooks) {
var radiusTangentAngle = function(ellipse, angle) {
var theta = ellipse.tangentTheta(boundaryOnAngle(ellipse, angle), angle);
return Math.round((theta + angle) % 180);
};
QUnit.test('validate on circle', function(assert) {
var a = 50;
var b = 50;
var c = g.Point(0, 0);
var ellipse = g.Ellipse(c, a, b);
for (var angle = 0; angle <= 360; angle += 10) {
var tangentAngle = radiusTangentAngle(ellipse, angle);
var tolerance = 2;
assert.ok(tangentAngle - 90 < tolerance && tangentAngle - 90 > -tolerance, angle + 'deg, should be 90deg, actual: ' + tangentAngle);
}
});
QUnit.test('validate helper boundaryOnAngle', function(assert) {
function checkTangentThetaOnEllipse(ellipse, message) {
assert.equal(ellipse.tangentTheta(boundaryOnAngle(ellipse, 0)), 270, '0 on ' + message);
assert.equal(ellipse.tangentTheta(boundaryOnAngle(ellipse, 180)), 90, '180 on ' + message);
assert.equal(ellipse.tangentTheta(boundaryOnAngle(ellipse, 90)), 180, '90 on ' + message);
assert.equal(ellipse.tangentTheta(boundaryOnAngle(ellipse, 270)), 0, '270 on ' + message);
<|fim▁hole|> var theta = ellipse.tangentTheta(boundaryOnAngle(ellipse, angle), angle);
assert.ok(theta >= 0, 'tangent theta is numeric on ' + message);
}
}
checkTangentThetaOnEllipse(g.Ellipse(g.Point(11, 22), 50, 100), 'wide ellipse');
checkTangentThetaOnEllipse(g.Ellipse(g.Point(11, 22), 100, 50), 'tall ellipse');
});
});
QUnit.module('Where is point in space with ellipse', function(hooks) {
QUnit.test('normalizedDistance', function(assert) {
var tolerance = 0.009;
var ellipse = g.Ellipse(g.Point(111, 111), 150, 150);
var r1 = ellipse.normalizedDistance(ellipse.center());
assert.ok(r1 < 1 && r1 >= 0);
assert.ok(ellipse.normalizedDistance(ellipse.center().offset(500, 500)) > 1);
for (var angle = 0; angle < 360; angle += 1) {
var b = boundaryOnAngle(ellipse, angle);
var x = ellipse.normalizedDistance(b);
assert.ok(x - 1 < tolerance && x - 1 > -tolerance, 'point on angle: ' + angle + ' result:' + x);
}
});
});
QUnit.module('inflate()', function() {
QUnit.test('inflate ellipse', function(assert) {
assert.ok(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate().equals(g.ellipse({ x: 0, y: 0 }, 1, 1)));
assert.ok(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate(2, 1).equals(g.ellipse({ x: 0, y: 0 }, 5, 3)));
assert.ok(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate(0, 1).equals(g.ellipse({ x: 0, y: 0 }, 1, 3)));
assert.ok(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate(2, 0).equals(g.ellipse({ x: 0, y: 0 }, 5, 1)));
assert.ok(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate(5).equals(g.ellipse({ x: 0, y: 0 }, 11, 11)));
assert.ok(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate(2).equals(
g.ellipse.fromRect(g.rect.fromEllipse(g.ellipse({ x: 0, y: 0 }, 1, 1).inflate(2)))
));
});
});
QUnit.module('prototype', function() {
QUnit.module('bbox()', function() {
});
QUnit.module('clone()', function() {
});
QUnit.module('equals(ellipse)', function() {
});
QUnit.module('intersectionWithLineFromCenterToPoint(point, angle)', function() {
});
QUnit.module('toString()', function() {
});
});
});<|fim▁end|>
|
for (var angle = 0; angle <= 360; angle += 5) {
|
<|file_name|>core_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.layers.core."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class DenseTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testDenseProperties(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='my_dense')
self.assertEqual(dense.units, 2)
self.assertEqual(dense.activation, nn_ops.relu)
self.assertEqual(dense.kernel_regularizer, None)
self.assertEqual(dense.bias_regularizer, None)
self.assertEqual(dense.activity_regularizer, None)
self.assertEqual(dense.use_bias, True)
# Test auto-naming
dense = core_layers.Dense(2, activation=nn_ops.relu)
dense.apply(random_ops.random_uniform((5, 2)))
self.assertEqual(dense.name, 'dense_1')
dense = core_layers.Dense(2, activation=nn_ops.relu)
dense.apply(random_ops.random_uniform((5, 2)))
self.assertEqual(dense.name, 'dense_2')
def testVariableInput(self):
with self.test_session():
v = variable_scope.get_variable(
'X', initializer=init_ops.zeros_initializer(), shape=(1, 1))
x = core_layers.Dense(1)(v)
variables.global_variables_initializer().run()
self.assertAllEqual(x.eval(), [[0.0]])
@test_util.run_in_graph_and_eager_modes()
def testCall(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='my_dense')
inputs = random_ops.random_uniform((5, 4), seed=1)
outputs = dense(inputs)
self.assertListEqual([5, 2], outputs.get_shape().as_list())
self.assertListEqual(dense.variables, [dense.kernel, dense.bias])
self.assertListEqual(dense.trainable_variables,
[dense.kernel, dense.bias])
self.assertListEqual(dense.non_trainable_variables, [])
if context.in_graph_mode():
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 2)
self.assertEqual(dense.kernel.name, 'my_dense/kernel:0')
self.assertEqual(dense.bias.name, 'my_dense/bias:0')
@test_util.run_in_graph_and_eager_modes()
def testCallTensorDot(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='my_dense')
inputs = random_ops.random_uniform((5, 4, 3), seed=1)
outputs = dense(inputs)
self.assertListEqual([5, 4, 2], outputs.get_shape().as_list())
@test_util.run_in_graph_and_eager_modes()
def testNoBias(self):
dense = core_layers.Dense(2, use_bias=False, name='my_dense')
inputs = random_ops.random_uniform((5, 2), seed=1)
_ = dense(inputs)
self.assertListEqual(dense.variables, [dense.kernel])
self.assertListEqual(dense.trainable_variables, [dense.kernel])
self.assertListEqual(dense.non_trainable_variables, [])
if context.in_graph_mode():
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 1)
self.assertEqual(dense.kernel.name, 'my_dense/kernel:0')
self.assertEqual(dense.bias, None)
@test_util.run_in_graph_and_eager_modes()
def testNonTrainable(self):
dense = core_layers.Dense(2, trainable=False, name='my_dense')
inputs = random_ops.random_uniform((5, 2), seed=1)
_ = dense(inputs)
self.assertListEqual(dense.variables, [dense.kernel, dense.bias])
self.assertListEqual(dense.non_trainable_variables,
[dense.kernel, dense.bias])
self.assertListEqual(dense.trainable_variables, [])
if context.in_graph_mode():
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 0)
@test_util.run_in_graph_and_eager_modes()
def testOutputShape(self):
dense = core_layers.Dense(7, activation=nn_ops.relu, name='my_dense')
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = dense.apply(inputs)<|fim▁hole|> self.assertEqual(outputs.get_shape().as_list(), [5, 2, 7])
inputs = random_ops.random_uniform((1, 2, 4, 3), seed=1)
outputs = dense.apply(inputs)
self.assertEqual(outputs.get_shape().as_list(), [1, 2, 4, 7])
def testCallOnPlaceHolder(self):
inputs = array_ops.placeholder(dtype=dtypes.float32)
dense = core_layers.Dense(4, name='my_dense')
with self.assertRaises(ValueError):
dense(inputs)
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=[None, None])
dense = core_layers.Dense(4, name='my_dense')
with self.assertRaises(ValueError):
dense(inputs)
inputs = array_ops.placeholder(
dtype=dtypes.float32, shape=[None, None, None])
dense = core_layers.Dense(4, name='my_dense')
with self.assertRaises(ValueError):
dense(inputs)
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=[None, 3])
dense = core_layers.Dense(4, name='my_dense')
dense(inputs)
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=[None, None, 3])
dense = core_layers.Dense(4, name='my_dense')
dense(inputs)
@test_util.run_in_graph_and_eager_modes()
def testActivation(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='dense1')
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = dense(inputs)
if context.in_graph_mode():
self.assertEqual(outputs.op.name, 'dense1/Relu')
dense = core_layers.Dense(2, name='dense2')
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = dense(inputs)
if context.in_graph_mode():
self.assertEqual(outputs.op.name, 'dense2/BiasAdd')
def testActivityRegularizer(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
dense = core_layers.Dense(
2, name='my_dense', activity_regularizer=regularizer)
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = dense(inputs)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense.losses, loss_keys)
def testKernelRegularizer(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
dense = core_layers.Dense(
2, name='my_dense', kernel_regularizer=regularizer)
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = dense(inputs)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense.losses, loss_keys)
def testKernelRegularizerWithReuse(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = core_layers.dense(
inputs, 2, name='my_dense', kernel_regularizer=regularizer)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
_ = core_layers.dense(
inputs, 2, name='my_dense', kernel_regularizer=regularizer, reuse=True)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
def testBiasRegularizer(self):
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
dense = core_layers.Dense(2, name='my_dense', bias_regularizer=regularizer)
inputs = random_ops.random_uniform((5, 3), seed=1)
_ = dense(inputs)
loss_keys = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)
self.assertEqual(len(loss_keys), 1)
self.assertListEqual(dense.losses, loss_keys)
def testFunctionalDense(self):
with self.test_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
outputs = core_layers.dense(
inputs, 2, activation=nn_ops.relu, name='my_dense')
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)), 2)
self.assertEqual(outputs.op.name, 'my_dense/Relu')
def testFunctionalDenseTwice(self):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
vars1 = _get_variable_dict_from_varstore().values()
core_layers.dense(inputs, 2)
vars2 = _get_variable_dict_from_varstore().values()
self.assertEqual(len(vars1), 2)
self.assertEqual(len(vars2), 4)
# TODO(alive): get this to work in eager mode.
def testFunctionalDenseTwiceReuse(self):
with self.test_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
vars1 = variables.trainable_variables()
core_layers.dense(inputs, 2, name='my_dense', reuse=True)
vars2 = variables.trainable_variables()
self.assertEqual(vars1, vars2)
# TODO(alive): get this to work in eager mode.
def testFunctionalDenseTwiceReuseFromScope(self):
with self.test_session():
with variable_scope.variable_scope('scope'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
vars1 = variables.trainable_variables()
with variable_scope.variable_scope('scope', reuse=True):
core_layers.dense(inputs, 2, name='my_dense')
vars2 = variables.trainable_variables()
self.assertEqual(vars1, vars2)
def testFunctionalDenseInitializerFromScope(self):
with variable_scope.variable_scope(
'scope', initializer=init_ops.ones_initializer()), self.test_session():
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
variables.global_variables_initializer().run()
weights = _get_variable_dict_from_varstore()
self.assertEqual(len(weights), 2)
# Check that the matrix weights got initialized to ones (from scope).
self.assertAllClose(weights['scope/dense/kernel'].read_value().eval(),
np.ones((3, 2)))
# Check that the bias still got initialized to zeros.
self.assertAllClose(weights['scope/dense/bias'].read_value().eval(),
np.zeros((2)))
def testEagerExecution(self):
with context.eager_mode():
container = variable_scope.EagerVariableStore()
x = constant_op.constant([[2.0]])
with container.as_default():
y = core_layers.dense(
x, 1, name='my_dense',
kernel_initializer=init_ops.ones_initializer())
self.assertAllEqual(y, [[2.0]])
self.assertEqual(len(container.variables()), 2)
# Recreate the layer to test reuse.
with container.as_default():
core_layers.dense(
x, 1, name='my_dense',
kernel_initializer=init_ops.ones_initializer())
self.assertEqual(len(container.variables()), 2)
def testFunctionalDenseWithCustomGetter(self):
called = [0]
def custom_getter(getter, *args, **kwargs):
called[0] += 1
return getter(*args, **kwargs)
with variable_scope.variable_scope('test', custom_getter=custom_getter):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
self.assertEqual(called[0], 2)
def testFunctionalDenseInScope(self):
with self.test_session():
with variable_scope.variable_scope('test'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name='my_dense')
var_dict = _get_variable_dict_from_varstore()
var_key = 'test/my_dense/kernel'
self.assertEqual(var_dict[var_key].name, '%s:0' % var_key)
with variable_scope.variable_scope('test1') as scope:
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2, name=scope)
var_dict = _get_variable_dict_from_varstore()
var_key = 'test1/kernel'
self.assertEqual(var_dict[var_key].name, '%s:0' % var_key)
with variable_scope.variable_scope('test2'):
inputs = random_ops.random_uniform((5, 3), seed=1)
core_layers.dense(inputs, 2)
var_dict = _get_variable_dict_from_varstore()
var_key = 'test2/dense/kernel'
self.assertEqual(var_dict[var_key].name, '%s:0' % var_key)
@test_util.run_in_graph_and_eager_modes()
def testComputeOutputShape(self):
dense = core_layers.Dense(2, activation=nn_ops.relu, name='dense1')
ts = tensor_shape.TensorShape
# pylint: disable=protected-access
with self.assertRaises(ValueError):
dense._compute_output_shape(ts(None))
with self.assertRaises(ValueError):
dense._compute_output_shape(ts([]))
with self.assertRaises(ValueError):
dense._compute_output_shape(ts([1]))
self.assertEqual(
[None, 2],
dense._compute_output_shape((None, 3)).as_list())
self.assertEqual(
[None, 2],
dense._compute_output_shape(ts([None, 3])).as_list())
self.assertEqual(
[None, 4, 2],
dense._compute_output_shape(ts([None, 4, 3])).as_list())
# pylint: enable=protected-access
@test_util.run_in_graph_and_eager_modes()
def testConstraints(self):
k_constraint = lambda x: x / math_ops.reduce_sum(x)
b_constraint = lambda x: x / math_ops.reduce_max(x)
dense = core_layers.Dense(2,
kernel_constraint=k_constraint,
bias_constraint=b_constraint)
inputs = random_ops.random_uniform((5, 3), seed=1)
dense(inputs)
self.assertEqual(dense.kernel_constraint, k_constraint)
self.assertEqual(dense.bias_constraint, b_constraint)
def _get_variable_dict_from_varstore():
var_dict = variable_scope._get_default_variable_store()._vars # pylint: disable=protected-access
sorted_var_dict = collections.OrderedDict(
sorted(var_dict.items(), key=lambda t: t[0]))
return sorted_var_dict
class DropoutTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testDropoutProperties(self):
dp = core_layers.Dropout(0.5, name='dropout')
self.assertEqual(dp.rate, 0.5)
self.assertEqual(dp.noise_shape, None)
dp.apply(array_ops.ones(()))
self.assertEqual(dp.name, 'dropout')
@test_util.run_in_graph_and_eager_modes()
def testBooleanLearningPhase(self):
dp = core_layers.Dropout(0.5)
inputs = array_ops.ones((5, 3))
dropped = dp.apply(inputs, training=True)
if context.in_graph_mode():
self.evaluate(variables.global_variables_initializer())
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
dropped = dp.apply(inputs, training=False)
np_output = self.evaluate(dropped)
self.assertAllClose(np.ones((5, 3)), np_output)
def testDynamicLearningPhase(self):
with self.test_session() as sess:
dp = core_layers.Dropout(0.5, seed=1)
inputs = array_ops.ones((5, 5))
training = array_ops.placeholder(dtype='bool')
dropped = dp.apply(inputs, training=training)
self.evaluate(variables.global_variables_initializer())
np_output = sess.run(dropped, feed_dict={training: True})
self.assertAlmostEqual(0., np_output.min())
np_output = sess.run(dropped, feed_dict={training: False})
self.assertAllClose(np.ones((5, 5)), np_output)
@test_util.run_in_graph_and_eager_modes()
def testDynamicNoiseShape(self):
inputs = array_ops.ones((5, 3, 2))
noise_shape = [None, 1, None]
dp = core_layers.Dropout(0.5, noise_shape=noise_shape, seed=1)
dropped = dp.apply(inputs, training=True)
self.evaluate(variables.global_variables_initializer())
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
self.assertAllClose(np_output[:, 0, :], np_output[:, 1, :])
def testCustomNoiseShape(self):
inputs = array_ops.ones((5, 3, 2))
noise_shape = [5, 1, 2]
dp = core_layers.Dropout(0.5, noise_shape=noise_shape, seed=1)
dropped = dp.apply(inputs, training=True)
self.evaluate(variables.global_variables_initializer())
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
self.assertAllClose(np_output[:, 0, :], np_output[:, 1, :])
def testFunctionalDropout(self):
with self.test_session():
inputs = array_ops.ones((5, 5))
dropped = core_layers.dropout(inputs, 0.5, training=True, seed=1)
variables.global_variables_initializer().run()
np_output = self.evaluate(dropped)
self.assertAlmostEqual(0., np_output.min())
dropped = core_layers.dropout(inputs, 0.5, training=False, seed=1)
np_output = self.evaluate(dropped)
self.assertAllClose(np.ones((5, 5)), np_output)
def testDynamicRate(self):
with self.test_session() as sess:
rate = array_ops.placeholder(dtype='float32', name='rate')
dp = core_layers.Dropout(rate, name='dropout')
inputs = array_ops.ones((5, 5))
dropped = dp.apply(inputs, training=True)
sess.run(variables.global_variables_initializer())
np_output = sess.run(dropped, feed_dict={rate: 0.5})
self.assertAlmostEqual(0., np_output.min())
np_output = sess.run(dropped, feed_dict={rate: 0.0})
self.assertAllClose(np.ones((5, 5)), np_output)
class FlattenTest(test.TestCase):
def testCreateFlatten(self):
with self.test_session() as sess:
x = array_ops.placeholder(shape=(None, 2, 3), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((3, 2, 3))})
self.assertEqual(list(np_output.shape), [3, 6])
self.assertEqual(y.get_shape().as_list(), [None, 6])
x = array_ops.placeholder(shape=(1, 2, 3, 2), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((1, 2, 3, 2))})
self.assertEqual(list(np_output.shape), [1, 12])
self.assertEqual(y.get_shape().as_list(), [1, 12])
def testComputeShape(self):
shape = core_layers.Flatten()._compute_output_shape((1, 2, 3, 2))
self.assertEqual(shape.as_list(), [1, 12])
shape = core_layers.Flatten()._compute_output_shape((None, 3, 2))
self.assertEqual(shape.as_list(), [None, 6])
shape = core_layers.Flatten()._compute_output_shape((None, 3, None))
self.assertEqual(shape.as_list(), [None, None])
def testFunctionalFlatten(self):
x = array_ops.placeholder(shape=(None, 2, 3), dtype='float32')
y = core_layers.flatten(x, name='flatten')
self.assertEqual(y.get_shape().as_list(), [None, 6])
def testFlattenValueError(self):
x = array_ops.placeholder(shape=(None,), dtype='float32')
with self.assertRaises(ValueError):
core_layers.Flatten()(x)
def testFlattenUnknownAxes(self):
with self.test_session() as sess:
x = array_ops.placeholder(shape=(5, None, None), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((5, 2, 3))})
self.assertEqual(list(np_output.shape), [5, 6])
self.assertEqual(y.get_shape().as_list(), [5, None])
x = array_ops.placeholder(shape=(5, None, 2), dtype='float32')
y = core_layers.Flatten()(x)
np_output = sess.run(y, feed_dict={x: np.zeros((5, 3, 2))})
self.assertEqual(list(np_output.shape), [5, 6])
self.assertEqual(y.get_shape().as_list(), [5, None])
if __name__ == '__main__':
test.main()<|fim▁end|>
|
self.assertEqual(outputs.get_shape().as_list(), [5, 7])
inputs = random_ops.random_uniform((5, 2, 3), seed=1)
outputs = dense(inputs)
|
<|file_name|>mtfn.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) 2015 Michael Hamilton.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with teh License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
#include <string>
#include <cassert>
#include "mtfn.h"
using namespace std;
using namespace mtfn;
#define is_vowel(a) (is_one_of( (a), "AEIOUY" ))
const int padding_len = 5;
// Define the allowed special characters in iso-8859-1 / UCS-2
const char sm_c_cedilla = 0xe7;
const char cap_c_cedilla = sm_c_cedilla - 0x20;
const char sm_n_tilde = 0xf1;
const char cap_n_tilde = sm_n_tilde - 0x20;
static int char_value( char c )
{
switch ( c )
{
case '0':
return 0x01;
case 'A':
return 0x02;
case 'F':
return 0x03;
case 'H':
return 0x04;
case 'J':
return 0x05;
case 'K':
return 0x06;
case 'L':
return 0x07;
case 'M':
return 0x08;
case 'N':
return 0x09;
case 'P':
return 0x0A;
case 'R':
return 0x0B;
case 'S':
return 0x0C;
case 'T':
return 0x0D;
case 'X':
return 0x0E;
default:
return 0x00;
}
}
sound::sound( const string& str, bool limit_length )
: m_name( string( padding_len, '_' ) + str + string( padding_len, '_' ) ),
m_first( m_name.begin() + padding_len ),
m_last( m_name.end() - padding_len - 1 ),
m_cursor( m_first ),
m_has_alternate( false ),
m_primary( "" ),
m_alternate( "" ),
m_prim_int( 0 ),
m_alt_int( 0 ),
m_length_limited( limit_length )
{
// Convert to upper case, remove any unexpected characters
for( string::iterator i = m_name.begin() + padding_len;
i != m_name.end() - padding_len;
i++ )
{
if ( ( 'A' <= *i && *i <= 'Z' ) || *i == ' ' ||
*i == cap_c_cedilla || *i == cap_n_tilde )
{
continue;
}
else if ( ( 'a' <= *i && *i <= 'z' ) ||
*i == sm_c_cedilla || *i == sm_n_tilde )
{
*i = *i - 0x20;
}
else
{
i = m_name.erase( i );
}
}
// Skip silent letters at the start of a word.
if ( is_one_of( m_cursor, 2, "GN", "KN", "PN", "WR", "PS", NULL ) )
{
m_cursor += 1;
}
while ( !is_ready() )
{
switch ( *m_cursor )
{
case 'A':
case 'E':
case 'I':
case 'O':
case 'U':
case 'Y':
vowel();
break;
case 'B':
letter_b();
break;
case cap_c_cedilla:
letter_c_cedilla();
break;
case 'C':
letter_c();
break;
case 'D':
letter_d();
break;
case 'F':
letter_f();
break;
case 'G':
letter_g();
break;
case 'H':
letter_h();
break;
case 'J':
letter_j();
break;
case 'K':
letter_k();
break;
case 'L':
letter_l();
break;
case 'M':
letter_m();
break;
case 'N':
letter_n();
break;
case cap_n_tilde:
letter_n_tilde();
break;
case 'P':
letter_p();
break;
case 'Q':
letter_q();
break;
case 'R':
letter_r();
break;
case 'S':
letter_s();
break;
case 'T':
letter_t();
break;
case 'V':
letter_v();
break;
case 'W':
letter_w();
break;
case 'X':
letter_x();
break;
case 'Z':
letter_z();
break;
default:
m_cursor++;
break;
}
}
if ( limit_length && m_primary.size() > stop_len )
{
m_primary = string( m_primary.begin(), m_primary.begin() + stop_len );
}
if ( !m_has_alternate )
{
m_alternate = string();
}
if ( limit_length && m_alternate.size() > stop_len )
{
m_alternate =
string( m_alternate.begin(), m_alternate.begin() + stop_len );
}
string::const_iterator j = m_primary.begin();
while ( j != m_primary.end() )
{
m_prim_int <<= 4;
m_prim_int += char_value( *j );
j++;
}
j = m_alternate.begin();
while ( j != m_alternate.end() )
{
m_alt_int <<= 4;
m_alt_int += char_value( *j );
j++;
}
}
sound::sound( const wstring& wstr, bool limit_length )
{
string str( "" );
for ( wstring::const_iterator i = wstr.begin();
i != wstr.end();
i++ )
{
if ( ( L'A' <= *i && *i <= L'Z' ) ||
( L'a' <= *i && *i <= L'z' ) ||
( *i == L' ' ) ||
( *i == cap_c_cedilla ) ||
( *i == sm_c_cedilla ) ||
( *i == cap_n_tilde ) ||
( *i == sm_n_tilde ) )
{
str += (char)*i;
}
}
*this = sound( str, limit_length );
}
bool sound::is_slavo_germanic( void )
{
return ( m_name.find_first_of( 'W' ) != string::npos ||
m_name.find_first_of( 'K' ) != string::npos ||
m_name.find( "CZ" ) != string::npos ||
m_name.find( "WITZ" ) != string::npos );
}
bool sound::is_spanish_ll( void )
{
string::const_iterator& c( m_cursor );
if ( c == m_last - 2 && is_one_of( c-1, 4, "ILLO", "ILLA", "ALLE", NULL ) )
{
return true;
}
else if ( ( is_one_of( m_last - 1, 2, "AS", "OS", NULL ) ||
is_one_of( *m_last, "AO" ) )
&& string( c-1, c+3 ) == "ALLE" )
{
return true;
}
else
{
return false;
}
}
bool sound::starts_german( void )
{
return is_one_of( m_first, 4, "VAN ", "VON ", NULL ) ||
string( m_first, m_first+3 ) == "SCH";
}
bool sound::is_germanic_c( void )
{
string::const_iterator& c( m_cursor );
return ( c > m_first+1 &&
!is_vowel( *(c-2) ) &&
string( c-1, c+2 ) == "ACH" &&
!is_one_of( *(c+2), "IE" ) ) ||
is_one_of( c-2, 6, "BACHER", "MACHER", NULL );
}
//:TRICKY va_start on const string& is undefined, so the first
// haystack has to be a named parameter.
bool sound::is_one_of( const string& needle, const char* haystack, ... )
{
va_list ap;
va_start( ap, haystack );
bool found = false;
do
{
if ( !found && needle == haystack )
{
found = true;
}
} while ( ( haystack = (char*)va_arg( ap, char* ) ) != NULL );
va_end( ap );
return found;
}
//:TRICKY same trick as for the std::string version
bool sound::is_one_of( const string::const_iterator& beg, int count,
const char* haystack, ... )
{
string needle( beg, beg + count );
va_list ap;
va_start( ap, haystack );
bool found = false;
do
{
if ( !found && needle == haystack )
{
found = true;
}
assert( strlen( haystack ) == count );
} while ( ( haystack = (char*)va_arg( ap, char* ) ) != NULL );
va_end( ap );
return found;
}
bool sound::is_one_of( char needle, const string& haystack )
{
return ( haystack.find_first_of( needle ) != string::npos );
}
<|fim▁hole|> if ( c == m_first )
{
add( 'A' );
}
c++;
}
void sound::letter_b( void )
{
string::const_iterator& c( m_cursor );
// "-mb", e.g., "dumb" already skipped over...
add( 'P' );
// 'BB' sounds the same as 'B'
if ( *(c+1) == 'B' )
{
c += 2;
}
else
{
c += 1;
}
}
void sound::letter_c_cedilla( void )
{
string::const_iterator& c( m_cursor );
// Ç sounds like 'S'
add( "", "S" );
c++;
}
void sound::letter_c( void )
{
string::const_iterator& c( m_cursor );
if ( is_germanic_c() )
{
add( 'K' );
c += 2;
}
else if ( c == m_first && string( c, c+6) == "CAESAR" )
{
add( 'S' );
c += 2;
}
else if ( string( c, c+4 ) == "CHIA" )
{
add( 'K' );
c += 2;
}
else if ( string( c, c+2 ) == "CH" )
{
letter_combo_ch();
}
else if ( string( c, c+2 ) == "CZ" &&
string( c-2, c+2 ) != "WICZ" )
{
// 'czar'
add( 'S', 'X' );
c += 2;
}
else if ( string( c+1, c+4 ) == "CIA" )
{
// italian like 'focaccia'
add( 'X' );
c += 3;
}
else if ( string( c, c+2 ) == "CC" && string( c-1, c+2 ) != "MCC" )
{
// double "cc" but not "McClelland"
return letter_combo_cc();
}
else if ( is_one_of( c, 2, "CK", "CG", "CQ", NULL ) )
{
add( 'K' );
c += 2;
}
else if ( is_one_of( c, 2, "CI", "CE", "CY", NULL ) )
{
//-- Italian vs. English --//
if ( is_one_of( c, 3, "CIO", "CIE", "CIA", NULL ) )
{
add('S', 'X');
}
else
{
add('S');
}
c += 2;
}
else
{
add( 'K' );
if ( is_one_of( c+1, 2, " C", " Q", " G", NULL ))
{
//-- Mac Caffrey, Mac Gregor --//
c += 3;
}
else if ( is_one_of( *(c+1), "CKQ") &&
!is_one_of( c+1, 2, "CE", "CI", NULL ) )
{
c += 2;
}
else
{
c += 1;
}
}
}
void sound::letter_combo_ch( void )
{
string::const_iterator& c( m_cursor );
if ( c > m_first && string( c, c+4 ) == "CHAE" )
{
// michael
add( 'K', 'X' );
c += 2;
}
else if ( c == m_first && string( c, c+5 ) != "CHORE" &&
( is_one_of( c+1, 5, "HARAC", "HARIS", NULL ) ||
is_one_of( c+1, 3, "HOR", "HYM", "HIA", "HEM", NULL ) ) )
{
// words with greek roots, e.g. 'chemistry', 'chorus'
add( 'K' );
c += 2;
}
else if ( ( is_one_of( m_first, 4, "VAN ", "VON ", NULL ) ||
string( m_first, m_first+3 ) == "SCH" ) ||
is_one_of( c-2, 6, "ORCHES", "ARCHIT", "ORCHID", NULL ) ||
is_one_of( *(c+2), "TS" ) ||
( is_one_of( *(c-1), "AOUE_" ) &&
is_one_of( *(c+2), "LRNMBHFVW _" ) ) )
{
// germanic, greek, or otherwise 'ch' for 'kh'
add( 'K' );
c += 2;
}
else
{
if ( c > m_first )
{
if ( string( m_first, m_first + 2 ) == "MC" )
{
// 'mchugh'
add('K');
}
else
{
add('X', 'K');
}
}
else
{
add ( 'X' );
}
c += 2;
}
}
void sound::letter_combo_cc( void )
{
string::const_iterator& c( m_cursor );
// 'bellocchio' but not 'bacchus'
if ( is_one_of( *(c+2), "IEH" ) && string( c+2, c+4 ) != "HU" )
{
//'accident', 'accede' 'succeed'
if ( ( c == m_first + 1 && *(c-1) == 'A' ) ||
is_one_of( c-1, 5, "UCCEE", "UCCES", NULL ) )
{
add( "KS" );
}
//'bacci', 'bertucci', other italian
else
{
add( 'X' );
}
c += 3;
}
else
{
add( 'K' );
c+= 2;
}
}
void sound::letter_d( void )
{
string::const_iterator& c( m_cursor );
if ( string( c, c+2 ) == "DG" )
{
if ( is_one_of( *(c+2), "IEY" ) )
{
//e.g. 'edge'
add( 'J' );
c += 3;
}
else
{
//e.g. 'edgar'
add( "TK" );
c += 2;
}
}
else
{
// 'DT' and 'DD' sound the same as 'D'
if ( is_one_of( c, 2, "DT", "DD", NULL ) )
{
c += 2;
}
else
{
c += 1;
}
add( 'T' );
}
}
void sound::letter_f( void )
{
string::const_iterator& c( m_cursor );
// 'FF' sounds the same as 'F'
if ( *(c+1) == 'F' )
{
c += 2;
}
else
{
c += 1;
}
add( 'F' );
}
void sound::letter_g( void )
{
string::const_iterator& c( m_cursor );
if ( *(c+1) == 'H' )
{
letter_combo_gh();
}
else if ( *(c+1) == 'N' )
{
if ( c == m_first+1 && is_vowel( *m_first ) && !is_slavo_germanic() )
{
add( "KN", "N" );
}
else if ( string( c+2, c+4 ) != "EY" && *(c+1) != 'Y' &&
!is_slavo_germanic() )
{
//not e.g. 'cagney'
add( "N", "KN" );
}
else
{
add( "KN" );
}
c+= 2;
}
else if ( string( c+1, c+3 ) == "LI" && !is_slavo_germanic() )
{
//'tagliaro'
add( "KL", "L" );
c += 2;
}
else if ( c == m_first &&
( *(c+1) == 'Y' ||
is_one_of( c+1, 2, "ES", "EP", "EB", "EL", "EY", "IB", "IL", "IN", "IE", "EI", "ER", NULL ) ) )
{
// -ges-,-gep-,-gel-, -gie- at beginning
add( 'K', 'J' );
c += 2;
}
else if ( ( string( c+1, c+3 ) == "ER" || *(c+1) == 'Y' ) &&
!is_one_of( m_first, 6, "DANGER", "RANGER", "MANGER", NULL ) &&
!is_one_of( *(c-1), "EI" ) &&
!is_one_of( c-1, 3, "RGY", "OGY", NULL ) )
{
// -ger-, -gy-
add( 'K', 'J' );
c += 2;
return;
}
else if ( is_one_of( *(c+1), "EIY" ) ||
is_one_of( c-1, 4, "AGGI", "OGGI", NULL ) )
{
// italian e.g, 'biaggi'
//obvious germanic
if ( is_one_of( m_first, 4, "VAN ", "VON ", NULL ) ||
string( m_first, m_first+3 ) == "SCH" ||
string( c+1, c+3 ) == "ET" )
{
add( 'K' );
}
else
{
//always soft if french ending
if ( string( c+1, c+5) == "IER_" )
{
add( 'J' );
}
else
{
add( 'J', 'K' );
}
}
c += 2;
}
else if ( *(c+1) == 'G')
{
add( 'K' );
c += 2;
}
else
{
add( 'K' );
c += 1;
}
}
void sound::letter_combo_gh( void )
{
string::const_iterator& c( m_cursor );
if ( c > m_first && !is_vowel( *(c-1) ) )
{
add( 'K' );
c += 2;
}
else if ( c == m_first )
{
if ( *(c+2) == 'I' )
{
add( 'J' );
}
else
{
add( 'K' );
}
c += 2;
}
else if ( is_one_of( *(c-2), "BHD" ) || is_one_of( *(c-3), "BHD" ) ||
is_one_of( *(c-4), "BH" ) )
{
// Parker's rule (with some further refinements) - e.g., 'hugh'
c += 2;
}
else
{
//e.g., 'laugh', 'McLaughlin', 'cough', 'gough', 'rough', 'tough'
if ( c > m_first + 2 &&
*(c-1) == 'U' &&
is_one_of( *(c-3), "CGLRT" ) )
{
add( 'F' );
}
else if ( c > m_first && *(c-1) != 'I' )
{
add( 'K' );
}
c += 2;
}
}
void sound::letter_h( void )
{
string::const_iterator& c( m_cursor );
if ( ( c == m_first || is_vowel( *(c-1) ) ) && is_vowel( *(c+1) ) )
{
// keep any h that looks like '^h[aeiouy]' or '[aeiouy]h[aeiouy]'
add( 'H' );
c += 2;
}
else
{
c += 1;
}
}
void sound::letter_j( void )
{
string::const_iterator& c( m_cursor );
if ( string( c, c+4 ) == "JOSE" ||
string( m_first, m_first+4 ) == "SAN " )
{
// obvious spanish, 'jose', 'san jacinto'
if ( ( ( c == m_first && *(c+4) == ' ' ) ||
m_last - m_first == 3 ) ||
string( m_first, m_first + 4 ) == "SAN " )
{
add( 'H' );
}
else
{
add( 'J', 'H' );
}
c += 1;
}
else if ( c == m_first && string( c, c+4 ) != "JOSE" )
{
add( 'J', 'A' );
}
else if ( is_vowel( *(c-1) ) && !is_slavo_germanic() &&
is_one_of( *(c+1), "AO" ) )
{
// spanish pron. of e.g. 'bajador'
add( 'J', 'H' );
}
else if ( c == m_last )
{
add( "J", "" );
}
else if ( !is_one_of( *(c+1), "LTKSNMBZ" ) &&
!is_one_of( *(c-1), "SKL" ) )
{
add( 'J' );
}
if ( *(c+1) == 'J' ) //it could happen!
{
c += 2;
}
else
{
c += 1;
}
}
void sound::letter_k( void )
{
string::const_iterator& c( m_cursor );
if ( *(c+1) == 'K' )
{
c += 2;
}
else
{
c += 1;
}
add( 'K' );
}
void sound::letter_l( void )
{
string::const_iterator& c( m_cursor );
if ( *(c+1) == 'L' )
{
//spanish e.g. 'cabrillo', 'gallegos'
if ( is_spanish_ll() )
{
add( "L", "" );
}
else
{
add( 'L' );
}
c += 2;
}
else
{
c += 1;
add( 'L' );
}
}
void sound::letter_m( void )
{
string::const_iterator& c( m_cursor );
// 'dumb', 'thumb', 'dumber', 'dummy', but not 'thumbelina"
if ( ( string( c-1, c+2 ) == "UMB" &&
( c+1 == m_last || string( c+2, c+4 ) == "ER" ) ) ||
*(c+1) == 'M' )
{
c += 2;
}
else
{
c += 1;
}
add( 'M' );
}
void sound::letter_n( void )
{
string::const_iterator& c( m_cursor );
// Double 'n' sounds like 'n'
if ( *(c+1) == 'N' )
{
c += 2;
}
else
{
c += 1;
}
add( 'N' );
}
void sound::letter_n_tilde( void )
{
string::const_iterator& c( m_cursor );
c+= 1;
add( 'N' );
}
void sound::letter_p( void )
{
string::const_iterator& c( m_cursor );
// 'phyllis'
if ( *(c+1) == 'H' )
{
add( 'F' );
c += 2;
return;
}
if ( is_one_of( *(c+1), "PB" ) )
{
// 'campbell', 'steppenwolf'
c += 2;
}
else
{
// 'peter'
c += 1;
}
add( 'P' );
}
void sound::letter_q( void )
{
string::const_iterator& c( m_cursor );
if ( *(c+1) == 'Q' )
{
// 'sadiqqi'
c += 2;
}
else
{
// 'qadaffi'
c += 1;
}
add( 'K' );
}
void sound::letter_r( void )
{
string::const_iterator& c( m_cursor );
if ( c == m_last &&
!is_slavo_germanic() &&
string( c-2, c ) == "IE" &&
!is_one_of( c-4, 2, "ME", "MA", NULL ) )
{
// french 'rogier' but not germanic or 'hochmeier'
add( "", "R" );
}
else
{
add( 'R' );
}
if ( *(c+1) == 'R' )
{
c += 2;
}
else
{
c += 1;
}
}
void sound::letter_s( void )
{
string::const_iterator& c( m_cursor );
if ( is_one_of( c-1, 3, "ISL", "YSL", NULL ) )
{
// special cases 'island', 'isle', 'carlisle', 'carlysle'
c += 1;
}
else if ( c == m_first && string( c, c+5 ) == "SUGAR" )
{
// special case 'sugar-'
add( 'X', 'S' );
c += 1;
}
else if ( string( c, c+2 ) == "SH" )
{
if ( is_one_of( c+1, 4, "HEIM", "HOEK", "HOLM", "HOLZ", NULL ) )
{
// 'rudesheim'
add( 'S' );
}
else
{
add( 'X' );
}
c += 2;
}
else if ( is_one_of( c, 3, "SIO", "SIA", NULL ) )
{
// italian & armenian
if ( is_slavo_germanic() )
{
add( 'S' );
}
else
{
add( 'S', 'X' );
}
c += 3;
}
else if ( ( c == m_first && is_one_of( *(c+1), "MNLW" ) ) || *(c+1) == 'Z' )
{
// german & anglicisations, e.g. 'smith' match 'schmidt',
// 'snider' match 'schneider'
// also, -sz- in slavic language altho in hungarian it is pronounced 's'
add( 'S', 'X' );
if ( *(c+1) == 'Z' )
{
c += 2;
}
else
{
c += 1;
}
}
else if ( string( c, c+2 ) == "SC" )
{
if ( *(c+2) == 'H' )
{
// Schlesinger's rule
if ( is_one_of( c+3, 2, "OO", "ER", "EN", "UY", "ED", "EM", NULL ) )
{
// dutch origin, e.g. 'school', 'schooner'
if ( is_one_of( c+3, 2, "ER", "EN", NULL ) )
{
// 'schermerhorn', 'schenker'
add( "X", "SK" );
}
else
{
add( "SK" );
}
}
else
{
if ( c == m_first && !is_vowel( *(c+3) ) && *(c+3) != 'W' )
{
add( 'X', 'S' );
}
else
{
add( 'X' );
}
}
c += 3;
}
else if ( is_one_of( *(c+2), "IEY" ) )
{
add( 'S' );
c += 3;
}
else
{
add( "SK" );
c += 3;
}
}
else if ( c == m_last && is_one_of( c-2, 2, "AI", "OI", NULL ) )
{
// french e.g. 'resnais', 'artois'
add( "", "S" );
c += 1;
}
else
{
add( 'S' );
if ( is_one_of( *(c+1), "SZ" ) )
{
c += 2;
}
else
{
c += 1;
}
}
}
void sound::letter_t( void )
{
string::const_iterator& c( m_cursor );
if ( string( c, c+4 ) == "TION" || is_one_of( c, 3, "TIA", "TCH", NULL ) )
{
add( 'X' );
c += 3;
return;
}
if ( string( c, c+2 ) == "TH" || string( c, c+3 ) == "TTH" )
{
if ( is_one_of( c+2, 2, "OM", "AM", NULL ) ||
is_one_of( m_first, 4, "VAN ", "VON ", NULL ) ||
string( m_first, m_first + 3 ) == "SCH" )
{
// special case 'thomas', 'thames' or germanic
add( 'T' );
}
else
{
add( '0', 'T' );
}
c += 2;
return;
}
if ( is_one_of( *(c+1), "TD" ) )
{
c += 2;
}
else
{
c += 1;
}
add( 'T' );
return;
}
void sound::letter_v( void )
{
string::const_iterator& c( m_cursor );
if ( *(c+1) == 'V' )
{
c += 2;
}
else
{
c += 1;
}
add( 'F' );
return;
}
void sound::letter_w( void )
{
string::const_iterator& c( m_cursor );
// can also be in middle of word
if ( string( c, c+2 ) == "WR" )
{
add( 'R' );
c += 2;
return;
}
if ( c == m_first && ( is_vowel( *(c+1) ) || string( c, c+2 ) == "WH" ) )
{
// 'wasserman' should match 'vasserman'
if ( is_vowel( *(c+1 ) ) )
{
add( "A", "F" );
}
else
{
// need Uomo to match Womo
add( 'A' );
}
}
// 'arnow' should match 'arnoff'
if ( ( c == m_last && is_vowel( *(c-1) ) ) ||
is_one_of( c-1, 5, "EWSKI", "EWSKY", "OWSKI", "OWSKY", NULL ) ||
string( m_first, m_first + 3 ) == "SCH" )
{
add( "", "F" );
c += 1;
return;
}
// polish e.g. 'filipowicz'
if ( is_one_of( c, 4, "WICZ", "WITZ", NULL ) )
{
add( "TS", "FX" );
c += 4;
return;
}
// else skip it
c += 1;
}
void sound::letter_x( void )
{
string::const_iterator& c( m_cursor );
if ( c == m_first )
{
// Initial 'X' is pronounced 'Z'
add( 'S' );
}
else if ( c != m_last || !( is_one_of( c-3, 3, "IAU", "EAU", NULL ) ||
is_one_of( c-2, 2, "AU", "OU", NULL ) ) )
{
// exclude french trailing 'x' e.g. 'breaux'
add( "KS" );
}
if ( is_one_of( *(c+1), "CX" ) )
{
c += 2;
}
else
{
c += 1;
}
}
void sound::letter_z( void )
{
string::const_iterator& c( m_cursor );
if ( *(c+1) == 'H' )
{
// chinese pinyin e.g. 'zhao'
add( 'J' );
c += 2;
return;
}
if ( is_one_of( c+1, 2, "ZO", "ZI", "ZA", NULL ) ||
( is_slavo_germanic() && c > m_first && *(c-1) != 'T' ) )
{
add( "S", "TS" );
}
else
{
add( 'S' );
}
if ( *(c+1) == 'Z' )
{
c += 2;
}
else
{
c += 1;
}
}<|fim▁end|>
|
void sound::vowel( void )
{
string::const_iterator& c( m_cursor );
|
<|file_name|>globals.d.ts<|end_file_name|><|fim▁begin|>declare interface KernelOpts {
//
}
declare interface StonehengeOpts {
//
}
declare namespace NodeJS {
interface Global {
kernel: StonehengeKernel
opts: StonehengeOpts<|fim▁hole|> hardReset: () => void
}
}
// Extended Screeps globals
interface Memory {
pidCounter: number
kernel: KernelMemory
stats: any
}<|fim▁end|>
| |
<|file_name|>loglisteners.js<|end_file_name|><|fim▁begin|>(function() {
'use strict';
angular
.module('blocks.logger')
.factory('logListeners', logListeners);
/**
* @ngdoc service
* @name spaghetto.logger:logListeners
*
* @description
* Manage different log listeners so that log messages can have various
* destinations.
*
*
* The default behaviour is to send log messages to :
*
* * '$log' : Angular simple logging service, writing into the browser's console
* * 'toaster' : Toaster screen notifications
*
* You can change this behaviour by installing new log listeners and/or removing
* the default ones
* ## Log listener definition
* <pre>
*
// here instead of an exdample, we should definie the required properties (with ngdoc)
of a logListener object
* </pre>
*
*/
/* @ngInject */
function logListeners() {
var listeners = {};
var service = {
addListener: addListener,
getListeners: getListeners,
removeListener: removeListener
};
return service;
///////////////
/**
* @ngdoc method
* @name addListener
* @methodOf spaghetto.logger:logListeners
* @kind function
*
* @description
* Add log listener
*
* ## Add a Log listener
* <pre>
// define my Log Listener
var myLogListener = {
error : errorLog,
info : infoLog,
success : successLog,
warning : warningLog
}
function errorLog(msg, data, title) {
console.log('Error: ' + title + '\n' + data);
}
function infoLog(msg, data, title) {
console.log('Info: ' + title + '\n' + data);
}
function successLog(msg, data, title) {
console.log('Success: ' + title + '\n' + data);
}
function warningLog(msg, data, title) {
console.log('Warning: ' + title + '\n' + data);
}
logListeners.addListener('mylog', myLogListener);
* </pre>
* @param {string} name log listener name<|fim▁hole|> * @param {Function} logListener.success log a success message
* @param {Function} logListener.warning log a warning message
*/
function addListener(name, logListener) {
listeners[name] = logListener;
}
/**
* @ngdoc method
* @name removeListener
* @methodOf spaghetto.logger:logListeners
* @kind function
*
* @description
* Remove a log listener
*
* ## Remove a log listener
* <pre>
// 'toastr' log listener is installed by default
// if you want to remove it, you can do:
logListeners.removeListener('toastr');
* </pre>
* @param {string} name log listener name
*/
function removeListener(name) {
delete listeners[name];
}
/**
* @ngdoc method
* @name getListeners
* @methodOf spaghetto.logger:logListeners
* @kind function
*
* @description
* returns all installed log listeners
*
* @return {Array} keys is the log listener name
* and value is the log listener object
**/
function getListeners() {
return listeners;
}
}
}());<|fim▁end|>
|
* @param {Object} logListener log listener object
* @param {Function} logListener.error log an error message
* @param {Function} logListener.info log an info message
|
<|file_name|>drive_integration.py<|end_file_name|><|fim▁begin|>__author__ = "Wren J. R. (uberfastman)"
__email__ = "[email protected]"
# code snippets taken from: http://stackoverflow.com/questions/24419188/automating-pydrive-verification-process
import datetime
import logging
from pathlib import Path
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from report.logger import get_logger
from utils.app_config_parser import AppConfigParser
logger = get_logger(__name__, propagate=False)
# Suppress verbose googleapiclient info/warning logging
logging.getLogger("googleapiclient").setLevel(level=logging.ERROR)
logging.getLogger("googleapiclient.discovery").setLevel(level=logging.ERROR)
logging.getLogger("googleapiclient.discovery_cache").setLevel(level=logging.ERROR)
logging.getLogger("googleapiclient.discovery_cache.file_cache").setLevel(level=logging.ERROR)
class GoogleDriveUploader(object):
def __init__(self, filename, config):
logger.debug("Initializing Google Drive uploader.")
project_dir = Path(__file__).parents[1]
logger.debug("Authenticating with Google Drive.")
self.filename = Path(project_dir) / filename
self.config = config
self.gauth = GoogleAuth()
auth_token = Path(project_dir) / Path(self.config.get("Drive", "google_drive_auth_token"))
# Try to load saved client credentials
self.gauth.LoadCredentialsFile(auth_token)
if self.gauth.credentials is None:
# Authenticate if they're not there
self.gauth.LocalWebserverAuth()
elif self.gauth.access_token_expired:
# Refresh them if expired
self.gauth.Refresh()
else:
# Initialize the saved creds
self.gauth.Authorize()
# Save the current credentials to a file
self.gauth.SaveCredentialsFile(auth_token)
def upload_file(self, test=False):
logger.debug("Uploading file to Google Drive.")
# Create GoogleDrive instance with authenticated GoogleAuth instance.
drive = GoogleDrive(self.gauth)
# Get lists of folders
root_folders = drive.ListFile(
{"q": "'root' in parents and mimeType='application/vnd.google-apps.folder' and trashed=false"}).GetList()
google_drive_folder_path_default = self.config.get("Drive", "google_drive_folder_path_default")
google_drive_folder_path = Path(self.config.get(
"Drive", "google_drive_folder_path", fallback=google_drive_folder_path_default)).parts
google_drive_root_folder_id = self.make_root_folder(
drive,
self.check_file_existence(google_drive_folder_path[0], root_folders, "root"),
google_drive_folder_path[0]
)
if not test:
parent_folder_id = google_drive_root_folder_id
parent_folder_content_folders = drive.ListFile({
"q": (
f"'{parent_folder_id}' in parents and "<|fim▁hole|> f"trashed=false"
)
}).GetList()
for folder in google_drive_folder_path[1:]:
# create folder chain in Google Drive
parent_folder_id = self.make_parent_folder(
drive,
self.check_file_existence(folder, parent_folder_content_folders, parent_folder_id),
folder,
parent_folder_id
)
parent_folder_content_folders = drive.ListFile({
"q": (
f"'{parent_folder_id}' in parents and "
f"mimeType='application/vnd.google-apps.folder' and "
f"trashed=false"
)
}).GetList()
# Check for season folder and create it if it does not exist
season_folder_name = Path(self.filename).parts[-3]
season_folder_id = self.make_parent_folder(
drive,
self.check_file_existence(season_folder_name, parent_folder_content_folders, parent_folder_id),
season_folder_name,
parent_folder_id
)
season_folder_content_folders = drive.ListFile({
"q": (
f"'{season_folder_id}' in parents and "
f"mimeType='application/vnd.google-apps.folder' and "
f"trashed=false"
)
}).GetList()
# Check for league folder and create it if it does not exist
league_folder_name = Path(self.filename).parts[-2].replace("-", "_")
league_folder_id = self.make_parent_folder(
drive,
self.check_file_existence(league_folder_name, season_folder_content_folders, season_folder_id),
league_folder_name, season_folder_id
)
league_folder_content_pdfs = drive.ListFile({
"q": (
f"'{league_folder_id}' in parents and "
f"mimeType='application/pdf' and "
f"trashed=false"
)
}).GetList()
# Check for league report and create if if it does not exist
report_file_name = Path(self.filename).parts[-1]
report_file = self.check_file_existence(report_file_name, league_folder_content_pdfs, league_folder_id)
else:
all_pdfs = drive.ListFile({"q": "mimeType='application/pdf' and trashed=false"}).GetList()
report_file_name = self.filename
report_file = self.check_file_existence(report_file_name, all_pdfs, "root")
league_folder_id = "root"
if report_file:
report_file.Delete()
upload_file = drive.CreateFile(
{
"title": report_file_name,
"mimeType": "application/pdf",
"parents": [
{
"kind": "drive#fileLink",
"id": league_folder_id
}
]
}
)
upload_file.SetContentFile(self.filename)
# Upload the file.
upload_file.Upload()
upload_file.InsertPermission(
{
"type": "anyone",
"role": "reader",
"withLink": True
}
)
return "\nFantasy Football Report\nGenerated %s\n*%s*\n\n_Google Drive Link:_\n%s" % (
"{:%Y-%b-%d %H:%M:%S}".format(datetime.datetime.now()), upload_file['title'], upload_file["alternateLink"])
@staticmethod
def check_file_existence(file_name, file_list, parent_id):
drive_file_name = file_name
google_drive_file = None
for drive_file in file_list:
if drive_file["title"] == drive_file_name:
for parent_folder in drive_file["parents"]:
if parent_folder["id"] == parent_id or parent_folder["isRoot"]:
google_drive_file = drive_file
return google_drive_file
@staticmethod
def make_root_folder(drive, folder, folder_name):
if not folder:
new_root_folder = drive.CreateFile(
{
"title": folder_name,
"parents": [
{
"kind": "drive#fileLink",
"isRoot": True,
"id": "root"
}
],
"mimeType": "application/vnd.google-apps.folder"
}
)
new_root_folder.Upload()
root_folder_id = new_root_folder["id"]
else:
root_folder_id = folder["id"]
return root_folder_id
@staticmethod
def make_parent_folder(drive, folder, folder_name, parent_folder_id):
if not folder:
new_parent_folder = drive.CreateFile(
{
"title": folder_name,
"parents": [
{
"kind": "drive#fileLink",
"id": parent_folder_id
}
],
"mimeType": "application/vnd.google-apps.folder"
}
)
new_parent_folder.Upload()
parent_folder_id = new_parent_folder["id"]
else:
parent_folder_id = folder["id"]
return parent_folder_id
if __name__ == "__main__":
local_config = AppConfigParser()
local_config.read(Path(__file__).parents[1] / "config.ini")
reupload_file = local_config.get("Drive", "google_drive_reupload_file")
google_drive_uploader = GoogleDriveUploader(reupload_file, local_config)
upload_message = google_drive_uploader.upload_file()
print(upload_message)<|fim▁end|>
|
f"mimeType='application/vnd.google-apps.folder' and "
|
<|file_name|>logic.py<|end_file_name|><|fim▁begin|>from ..workspace import Block
from twisted.internet import defer
from .variables import lexical_variable
import operator
class logic_null (Block):
def eval (self):
return defer.succeed(None)
class logic_boolean (Block):
def eval (self):
return defer.succeed(self.fields['BOOL'] == 'TRUE')
class logic_negate (Block):
outputType = bool
def eval (self):
def negate (result):
if result is None:
return None
return result == False
self._complete = self.getInputValue('BOOL').addCallback(negate)
return self._complete
_operators_map = {<|fim▁hole|> "LT": operator.lt,
"LTE": operator.le,
"GT": operator.gt,
"GTE": operator.ge
}
def _compare (lhs, rhs, op_id):
if lhs is None or rhs is None:
return None
op = _operators_map[op_id]
return op(lhs, rhs)
# Emit a warning if bad op given
class logic_compare (Block):
outputType = bool
def eval (self):
lhs = self.getInputValue('A')
rhs = self.getInputValue('B')
op_id = self.fields['OP']
def _eval (results):
lhs, rhs = results
return _compare(lhs, rhs, op_id)
self._complete = defer.gatherResults([lhs, rhs]).addCallback(_eval)
return self._complete
class lexical_variable_compare (lexical_variable):
outputType = bool
def eval (self):
variable = self._getVariable()
if variable is None:
self.emitLogMessage(
"Unknown variable: " + str(self.getFieldValue('VAR')),
"error"
)
return defer.succeed(None)
value = self.getFieldValue('VALUE')
op_id = self.getFieldValue('OP')
unit = self.getFieldValue('UNIT', None)
if isinstance(unit, (int, float)):
value *= unit
return defer.succeed(_compare(variable.value, value, op_id))
class logic_operation (Block):
outputType = bool
def eval (self):
@defer.inlineCallbacks
def _run ():
op = self.fields['OP']
lhs = yield self.getInputValue('A')
if lhs is None:
return
if op == "AND":
if bool(lhs):
rhs = yield self.getInputValue('B')
if rhs is None:
return
defer.returnValue(bool(rhs))
else:
defer.returnValue(False)
elif op == "OR":
if bool(lhs):
defer.returnValue(True)
else:
rhs = yield self.getInputValue('B')
if rhs is None:
return
defer.returnValue(bool(rhs))
# Emit a warning
return
self._complete = _run()
return self._complete
class logic_ternary (Block):
# TODO: outputType of then and else should be the same.
# this is then the outputType of the logic_ternary block.
def eval (self):
@defer.inlineCallbacks
def _run ():
test = yield self.getInputValue('IF')
if test is None:
return
if bool(test):
result = yield self.getInputValue('THEN')
defer.returnValue(result)
else:
result = yield self.getInputValue('ELSE')
defer.returnValue(result)
self._complete = _run()
return self._complete<|fim▁end|>
|
"EQ": operator.eq,
"NEQ": operator.ne,
|
<|file_name|>restyle_damage.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The restyle damage is a hint that tells layout which kind of operations may
//! be needed in presence of incremental style changes.
#![deny(missing_docs)]
use computed_values::display;
use heapsize::HeapSizeOf;
use matching::{StyleChange, StyleDifference};
use properties::ComputedValues;
use std::fmt;
bitflags! {
#[doc = "Individual layout actions that may be necessary after restyling."]
pub flags ServoRestyleDamage: u8 {
#[doc = "Repaint the node itself."]
#[doc = "Currently unused; need to decide how this propagates."]
const REPAINT = 0x01,
#[doc = "The stacking-context-relative position of this node or its descendants has \
changed."]
#[doc = "Propagates both up and down the flow tree."]
const REPOSITION = 0x02,
#[doc = "Recompute the overflow regions (bounding box of object and all descendants)."]
#[doc = "Propagates down the flow tree because the computation is bottom-up."]
const STORE_OVERFLOW = 0x04,
#[doc = "Recompute intrinsic inline_sizes (minimum and preferred)."]
#[doc = "Propagates down the flow tree because the computation is"]
#[doc = "bottom-up."]
const BUBBLE_ISIZES = 0x08,
#[doc = "Recompute actual inline-sizes and block-sizes, only taking out-of-flow children \
into account. \
Propagates up the flow tree because the computation is top-down."]
const REFLOW_OUT_OF_FLOW = 0x10,
#[doc = "Recompute actual inline_sizes and block_sizes."]
#[doc = "Propagates up the flow tree because the computation is"]
#[doc = "top-down."]
const REFLOW = 0x20,
#[doc = "Re-resolve generated content. \
Propagates up the flow tree because the computation is inorder."]
const RESOLVE_GENERATED_CONTENT = 0x40,
#[doc = "The entire flow needs to be reconstructed."]
const RECONSTRUCT_FLOW = 0x80
}
}
impl HeapSizeOf for ServoRestyleDamage {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl ServoRestyleDamage {
/// Compute the `StyleDifference` (including the appropriate restyle damage)
/// for a given style change between `old` and `new`.
pub fn compute_style_difference(_source: &ComputedValues,
old: &ComputedValues,
new: &ComputedValues)
-> StyleDifference {
let damage = compute_damage(old, new);
let change = if damage.is_empty() { StyleChange::Unchanged } else { StyleChange::Changed };
StyleDifference::new(damage, change)
}
/// Computes the `StyleDifference` between the two `ComputedValues` objects
/// for the case where the old and new style are both `display: none`.
///
/// For Servo we never need to generate any damage for such elements.
pub fn compute_undisplayed_style_difference(
_old_style: &ComputedValues,
_new_style: &ComputedValues,
) -> StyleDifference {
StyleDifference::new(Self::empty(), StyleChange::Unchanged)
}
/// Returns a bitmask that represents a flow that needs to be rebuilt and
/// reflowed.
///
/// FIXME(bholley): Do we ever actually need this? Shouldn't
/// RECONSTRUCT_FLOW imply everything else?
pub fn rebuild_and_reflow() -> ServoRestyleDamage {
REPAINT | REPOSITION | STORE_OVERFLOW | BUBBLE_ISIZES | REFLOW_OUT_OF_FLOW | REFLOW |
RECONSTRUCT_FLOW
}
/// Returns a bitmask indicating that the frame needs to be reconstructed.
pub fn reconstruct() -> ServoRestyleDamage {
RECONSTRUCT_FLOW
}
/// Supposing a flow has the given `position` property and this damage,
/// returns the damage that we should add to the *parent* of this flow.
pub fn damage_for_parent(self, child_is_absolutely_positioned: bool) -> ServoRestyleDamage {
if child_is_absolutely_positioned {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
} else {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
}
}
/// Supposing the *parent* of a flow with the given `position` property has
/// this damage, returns the damage that we should add to this flow.
pub fn damage_for_child(self,
parent_is_absolutely_positioned: bool,
child_is_absolutely_positioned: bool)
-> ServoRestyleDamage {
match (parent_is_absolutely_positioned, child_is_absolutely_positioned) {
(false, true) => {<|fim▁hole|> // Absolute children are out-of-flow and therefore insulated from changes.
//
// FIXME(pcwalton): Au contraire, if the containing block dimensions change!
self & (REPAINT | REPOSITION)
}
(true, false) => {
// Changing the position of an absolutely-positioned block requires us to reflow
// its kids.
if self.contains(REFLOW_OUT_OF_FLOW) {
self | REFLOW
} else {
self
}
}
_ => {
// TODO(pcwalton): Take floatedness into account.
self & (REPAINT | REPOSITION | REFLOW)
}
}
}
/// Servo doesn't implement this optimization.
pub fn handled_for_descendants(self) -> Self {
Self::empty()
}
}
impl Default for ServoRestyleDamage {
fn default() -> Self {
Self::empty()
}
}
impl fmt::Display for ServoRestyleDamage {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut first_elem = true;
let to_iter =
[ (REPAINT, "Repaint")
, (REPOSITION, "Reposition")
, (STORE_OVERFLOW, "StoreOverflow")
, (BUBBLE_ISIZES, "BubbleISizes")
, (REFLOW_OUT_OF_FLOW, "ReflowOutOfFlow")
, (REFLOW, "Reflow")
, (RESOLVE_GENERATED_CONTENT, "ResolveGeneratedContent")
, (RECONSTRUCT_FLOW, "ReconstructFlow")
];
for &(damage, damage_str) in &to_iter {
if self.contains(damage) {
if !first_elem { write!(f, " | ")?; }
write!(f, "{}", damage_str)?;
first_elem = false;
}
}
if first_elem {
write!(f, "NoDamage")?;
}
Ok(())
}
}
// NB: We need the braces inside the RHS due to Rust #8012. This particular
// version of this macro might be safe anyway, but we want to avoid silent
// breakage on modifications.
macro_rules! add_if_not_equal(
($old:ident, $new:ident, $damage:ident,
[ $($effect:ident),* ], [ $($style_struct_getter:ident.$name:ident),* ]) => ({
if $( ($old.$style_struct_getter().$name != $new.$style_struct_getter().$name) )||* {
$damage.insert($($effect)|*);
true
} else {
false
}
})
);
fn compute_damage(old: &ComputedValues, new: &ComputedValues) -> ServoRestyleDamage {
let mut damage = ServoRestyleDamage::empty();
// This should check every CSS property, as enumerated in the fields of
// http://doc.servo.org/style/properties/struct.ComputedValues.html
// FIXME: Test somehow that every property is included.
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES, REFLOW_OUT_OF_FLOW,
REFLOW, RECONSTRUCT_FLOW], [
get_box.clear, get_box.float, get_box.display, get_box.position, get_counters.content,
get_counters.counter_reset, get_counters.counter_increment,
get_inheritedbox._servo_under_display_none,
get_list.quotes, get_list.list_style_type,
// If these text or font properties change, we need to reconstruct the flow so that
// text shaping is re-run.
get_inheritedtext.letter_spacing, get_inheritedtext.text_rendering,
get_inheritedtext.text_transform, get_inheritedtext.word_spacing,
get_inheritedtext.overflow_wrap, get_inheritedtext.text_justify,
get_inheritedtext.white_space, get_inheritedtext.word_break, get_text.text_overflow,
get_font.font_family, get_font.font_style, get_font.font_variant_caps, get_font.font_weight,
get_font.font_size, get_font.font_stretch,
get_inheritedbox.direction, get_inheritedbox.writing_mode,
get_text.text_decoration_line, get_text.unicode_bidi,
get_inheritedtable.empty_cells, get_inheritedtable.caption_side,
get_column.column_width, get_column.column_count
]) || (new.get_box().display == display::T::inline &&
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW, RECONSTRUCT_FLOW], [
// For inline boxes only, border/padding styles are used in flow construction (to decide
// whether to create fragments for empty flows).
get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left
])) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW],
[get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_margin.margin_top, get_margin.margin_right,
get_margin.margin_bottom, get_margin.margin_left,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left,
get_position.width, get_position.height,
get_inheritedtext.line_height,
get_inheritedtext.text_align, get_inheritedtext.text_indent,
get_table.table_layout,
get_inheritedtable.border_collapse,
get_inheritedtable.border_spacing,
get_column.column_gap,
get_position.flex_direction,
get_position.flex_wrap,
get_position.justify_content,
get_position.align_items,
get_position.align_content,
get_position.order,
get_position.flex_basis,
get_position.flex_grow,
get_position.flex_shrink,
get_position.align_self
]) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, REFLOW_OUT_OF_FLOW], [
get_position.top, get_position.left,
get_position.right, get_position.bottom,
get_effects.opacity,
get_box.transform, get_box.transform_style, get_box.transform_origin,
get_box.perspective, get_box.perspective_origin
]) || add_if_not_equal!(old, new, damage,
[REPAINT], [
get_color.color, get_background.background_color,
get_background.background_image, get_background.background_position_x,
get_background.background_position_y, get_background.background_repeat,
get_background.background_attachment, get_background.background_clip,
get_background.background_origin, get_background.background_size,
get_border.border_top_color, get_border.border_right_color,
get_border.border_bottom_color, get_border.border_left_color,
get_border.border_top_style, get_border.border_right_style,
get_border.border_bottom_style, get_border.border_left_style,
get_border.border_top_left_radius, get_border.border_top_right_radius,
get_border.border_bottom_left_radius, get_border.border_bottom_right_radius,
get_position.z_index, get_box._servo_overflow_clip_box,
get_inheritedtext._servo_text_decorations_in_effect,
get_pointing.cursor, get_pointing.pointer_events,
get_effects.box_shadow, get_effects.clip, get_inheritedtext.text_shadow, get_effects.filter,
get_effects.mix_blend_mode, get_inheritedbox.image_rendering,
// Note: May require REFLOW et al. if `visibility: collapse` is implemented.
get_inheritedbox.visibility
]);
// Paint worklets may depend on custom properties,
// so if they have changed we should repaint.
if old.get_custom_properties() != new.get_custom_properties() {
damage.insert(REPAINT);
}
// If the layer requirements of this flow have changed due to the value
// of the transform, then reflow is required to rebuild the layers.
if old.transform_requires_layer() != new.transform_requires_layer() {
damage.insert(ServoRestyleDamage::rebuild_and_reflow());
}
damage
}<|fim▁end|>
| |
<|file_name|>hw.api.homewatch.jsonlist.js<|end_file_name|><|fim▁begin|>/**
* Created by RSC on 05.04.2016.
*/
myApp.factory('HomeWatchFactory', function ($http, $q, $rootScope, $log) {
return {
getFhemJsonList: function (name, type) {
var url = '';
if ($rootScope.config.connection.isDebug) {
url = 'json/homewatch/data/' + name + '.json';
} else {
url = $rootScope.MetaDatafhemweb_url + $rootScope.config.globals.cmd + type + '=' + name + $rootScope.config.globals.param;
}
$log.debug('HomeWatchFactory: ' + url);
var deferred = $q.defer();
$http({method: "GET", url: url})
.success(function (data, status, headers, config) {
deferred.resolve(data);
}).error(function (data, status, headers, config) {
deferred.reject(status);
});
return deferred.promise;
},
getJson: function (name) {
var url = 'json/homewatch/' + name + '.json';
var deferred = $q.defer();
$http({method: "GET", url: url})
.success(function (data, status, headers, config) {
deferred.resolve(data);
}).error(function (data, status, headers, config) {
deferred.reject(status);
});
return deferred.promise;
},
getLocationWidgets: function (location) {
// no values
if (angular.isUndefined(location) || location == '') {
$log.debug('HomeWatchFactory.getLocationWidgets: location isUndefined');
return;
}
var widget = $rootScope.config.home;
if (widget.length == 0)
return;
var deferred = $q.defer();<|fim▁hole|>
var len = widget.length;
for (var i = 0; i < len; i++) {
if (widget[i].location == location) {
data = widget[i];
deferred.resolve(data);
break;
}
}
return deferred.promise;
}
};
});<|fim▁end|>
| |
<|file_name|>harvester.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import traceback
from ckan.lib.helpers import json
from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
from ckanext.harvest.harvesters import HarvesterBase
from ckanext.geocat.utils import search_utils, csw_processor, ogdch_map_utils, csw_mapping # noqa
from ckanext.geocat.utils.vocabulary_utils import \
(VALID_TERMS_OF_USE, DEFAULT_TERMS_OF_USE)
from ckan.logic.schema import default_update_package_schema,\
default_create_package_schema
from ckan.lib.navl.validators import ignore<|fim▁hole|>
import logging
log = logging.getLogger(__name__)
DEFAULT_PERMA_LINK_URL = 'https://www.geocat.ch/geonetwork/srv/ger/md.viewer#/full_view/' # noqa
DEFAULT_PERMA_LINK_LABEL = 'geocat.ch Permalink'
HARVEST_USER = 'harvest'
class GeocatHarvester(HarvesterBase):
'''
The harvester for geocat
'''
def info(self):
return {
'name': 'geocat_harvester',
'title': 'Geocat harvester',
'description': (
'Harvests metadata from geocat (CSW)'
),
'form_config_interface': 'Text'
}
def validate_config(self, config):
if not config:
return config
try:
config_obj = json.loads(config)
except Exception as e:
raise ValueError(
'Configuration could not be parsed. An error {} occured'
.format(e)
)
if 'delete_missing_datasets' in config_obj:
if not isinstance(config_obj['delete_missing_datasets'], bool):
raise ValueError('delete_missing_dataset must be boolean')
if 'rights' in config_obj:
if not config_obj['rights'] in VALID_TERMS_OF_USE:
raise ValueError('{} is not valid as terms of use'
.format(config_obj['rights']))
return config
def _set_config(self, config_str, harvest_source_id):
if config_str:
self.config = json.loads(config_str)
else:
self.config = {}
self.config['rights'] = self.config.get('rights', DEFAULT_TERMS_OF_USE)
if not self.config['rights'] in VALID_TERMS_OF_USE:
self.config['rights'] = DEFAULT_TERMS_OF_USE
self.config['delete_missing_datasets'] = \
self.config.get('delete_missing_datasets', False)
self.config['geocat_perma_link_label'] = \
tk.config.get('ckanext.geocat.permalink_title',
DEFAULT_PERMA_LINK_LABEL)
self.config['geocat_perma_link_url'] = \
self.config.get('geocat_perma_link_url',
tk.config.get('geocat_perma_link_url',
DEFAULT_PERMA_LINK_URL))
self.config['legal_basis_url'] = \
self.config.get('legal_basis_url', None)
organization_slug = \
search_utils.get_organization_slug_for_harvest_source(
harvest_source_id)
self.config['organization'] = organization_slug
log.debug('Using config: %r' % self.config)
def gather_stage(self, harvest_job):
log.debug('In GeocatHarvester gather_stage')
self._set_config(harvest_job.source.config, harvest_job.source.id)
csw_url = harvest_job.source.url
try:
csw_data = csw_processor.GeocatCatalogueServiceWeb(url=csw_url)
gathered_geocat_identifiers = csw_data.get_geocat_id_from_csw()
except Exception as e:
self._save_gather_error(
'Unable to get content for URL: %s: %s / %s'
% (csw_url, str(e), traceback.format_exc()),
harvest_job
)
return []
existing_dataset_infos = \
search_utils.get_dataset_infos_for_organization(
organization_name=self.config['organization'],
harvest_source_id=harvest_job.source_id,
)
gathered_ogdch_identifiers = \
[ogdch_map_utils.map_geocat_to_ogdch_identifier(
geocat_identifier=geocat_identifier,
organization_slug=self.config['organization'])
for geocat_identifier in gathered_geocat_identifiers]
all_ogdch_identifiers = \
set(gathered_ogdch_identifiers + existing_dataset_infos.keys())
packages_to_delete = search_utils.get_packages_to_delete(
existing_dataset_infos=existing_dataset_infos,
gathered_ogdch_identifiers=gathered_ogdch_identifiers,
)
csw_map = csw_mapping.GeoMetadataMapping(
organization_slug=self.config['organization'],
geocat_perma_link=self.config['geocat_perma_link_url'],
geocat_perma_label=self.config['geocat_perma_link_label'],
legal_basis_url=self.config['legal_basis_url'],
default_rights=self.config['rights'],
valid_identifiers=all_ogdch_identifiers,
)
harvest_obj_ids = self.map_geocat_dataset(
csw_data,
csw_map,
gathered_geocat_identifiers,
gathered_ogdch_identifiers,
harvest_job)
log.debug('IDs: %r' % harvest_obj_ids)
if self.config['delete_missing_datasets']:
delete_harvest_object_ids = \
self.delete_geocat_ids(
harvest_job,
harvest_obj_ids,
packages_to_delete
)
harvest_obj_ids.extend(delete_harvest_object_ids)
return harvest_obj_ids
def delete_geocat_ids(self,
harvest_job,
harvest_obj_ids,
packages_to_delete):
delete_harvest_obj_ids = []
for package_info in packages_to_delete:
obj = HarvestObject(
guid=package_info[1].name,
job=harvest_job,
extras=[HarvestObjectExtra(key='import_action',
value='delete')])
obj.save()
delete_harvest_obj_ids.append(obj.id)
return delete_harvest_obj_ids
def map_geocat_dataset(self,
csw_data,
csw_map,
gathered_geocat_identifiers,
gathered_ogdch_identifiers,
harvest_job):
mapped_harvest_obj_ids = []
for geocat_id in gathered_geocat_identifiers:
ogdch_identifier = ogdch_map_utils.map_geocat_to_ogdch_identifier(
geocat_identifier=geocat_id,
organization_slug=self.config['organization'])
if ogdch_identifier in gathered_ogdch_identifiers:
try:
csw_record_as_string = csw_data.get_record_by_id(geocat_id)
except Exception as e:
self._save_gather_error(
'Error when reading csw record form source: %s %r / %s'
% (ogdch_identifier, e, traceback.format_exc()),
harvest_job)
continue
try:
dataset_dict = csw_map.get_metadata(csw_record_as_string,
geocat_id)
except Exception as e:
self._save_gather_error(
'Error when mapping csw data to dcat: %s %r / %s'
% (ogdch_identifier, e, traceback.format_exc()),
harvest_job)
continue
try:
harvest_obj = \
HarvestObject(guid=ogdch_identifier,
job=harvest_job,
content=json.dumps(dataset_dict))
harvest_obj.save()
except Exception as e:
self._save_gather_error(
'Error when processsing dataset: %s %r / %s'
% (ogdch_identifier, e, traceback.format_exc()),
harvest_job)
continue
else:
mapped_harvest_obj_ids.append(harvest_obj.id)
return mapped_harvest_obj_ids
def fetch_stage(self, harvest_object):
return True
def import_stage(self, harvest_object): # noqa
log.debug('In GeocatHarvester import_stage')
if not harvest_object:
log.error('No harvest object received')
self._save_object_error(
'No harvest object received',
harvest_object
)
return False
import_action = \
search_utils.get_value_from_object_extra(harvest_object.extras,
'import_action')
if import_action and import_action == 'delete':
log.debug('import action: %s' % import_action)
harvest_object.current = False
return self._delete_dataset({'id': harvest_object.guid})
if harvest_object.content is None:
self._save_object_error('Empty content for object %s' %
harvest_object.id,
harvest_object, 'Import')
return False
try:
pkg_dict = json.loads(harvest_object.content)
except ValueError:
self._save_object_error('Could not parse content for object {0}'
.format(harvest_object.id), harvest_object, 'Import') # noqa
return False
pkg_info = \
search_utils.find_package_for_identifier(harvest_object.guid)
context = {
'ignore_auth': True,
'user': HARVEST_USER,
}
try:
if pkg_info:
# Change default schema to ignore lists of dicts, which
# are stored in the '__junk' field
schema = default_update_package_schema()
context['schema'] = schema
schema['__junk'] = [ignore]
pkg_dict['name'] = pkg_info.name
pkg_dict['id'] = pkg_info.package_id
search_utils.map_resources_to_ids(pkg_dict, pkg_info)
updated_pkg = \
tk.get_action('package_update')(context, pkg_dict)
harvest_object.current = True
harvest_object.package_id = updated_pkg['id']
harvest_object.save()
log.debug("Updated PKG: %s" % updated_pkg)
else:
flat_title = _derive_flat_title(pkg_dict['title'])
if not flat_title:
self._save_object_error(
'Unable to derive name from title %s'
% pkg_dict['title'], harvest_object, 'Import')
return False
pkg_dict['name'] = self._gen_new_name(flat_title)
schema = default_create_package_schema()
context['schema'] = schema
schema['__junk'] = [ignore]
log.debug("No package found, create a new one!")
# generate an id to reference it in the harvest_object
pkg_dict['id'] = unicode(uuid.uuid4())
log.info('Package with GUID %s does not exist, '
'let\'s create it' % harvest_object.guid)
harvest_object.current = True
harvest_object.package_id = pkg_dict['id']
harvest_object.add()
model.Session.execute(
'SET CONSTRAINTS harvest_object_package_id_fkey DEFERRED')
model.Session.flush()
created_pkg = \
tk.get_action('package_create')(context, pkg_dict)
log.debug("Created PKG: %s" % created_pkg)
Session.commit()
return True
except Exception as e:
self._save_object_error(
('Exception in import stage: %r / %s'
% (e, traceback.format_exc())), harvest_object)
return False
def _create_new_context(self):
# get the site user
site_user = tk.get_action('get_site_user')(
{'model': model, 'ignore_auth': True}, {})
context = {
'model': model,
'session': Session,
'user': site_user['name'],
}
return context
def _delete_dataset(self, package_dict):
log.debug('deleting dataset %s' % package_dict['id'])
context = self._create_new_context()
tk.get_action('dataset_purge')(
context.copy(),
package_dict
)
return True
def _get_geocat_permalink_relation(self, geocat_pkg_id):
return {'url': self.config['geocat_perma_link_url'] + geocat_pkg_id,
'label': self.config['geocat_perma_link_label']}
class GeocatConfigError(Exception):
pass
def _derive_flat_title(title_dict):
"""localizes language dict if no language is specified"""
return title_dict.get('de') or title_dict.get('fr') or title_dict.get('en') or title_dict.get('it') or "" # noqa<|fim▁end|>
|
import ckan.plugins.toolkit as tk
from ckan import model
from ckan.model import Session
import uuid
|
<|file_name|>CauseOfDeath.java<|end_file_name|><|fim▁begin|>package org.mafagafogigante.dungeon.stats;
import org.mafagafogigante.dungeon.game.Id;
import org.jetbrains.annotations.NotNull;
import java.io.Serializable;
/**
* CauseOfDeath class that defines what kind of death happened and the ID of the related Item or Spell.
*/
public class CauseOfDeath implements Serializable {
private static final CauseOfDeath UNARMED = new CauseOfDeath(TypeOfCauseOfDeath.UNARMED, new Id("UNARMED"));
private final TypeOfCauseOfDeath type;
private final Id id;
/**
* Constructs a CauseOfDeath with the specified TypeOfCauseOfDeath and ID.
*
* @param type a TypeOfCauseOfDeath
* @param id an ID
*/
public CauseOfDeath(@NotNull TypeOfCauseOfDeath type, @NotNull Id id) {
this.type = type;
this.id = id;
}
/**
* Convenience method that returns a CauseOfDeath that represents an unarmed kill.
*/
public static CauseOfDeath getUnarmedCauseOfDeath() {
return UNARMED;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
CauseOfDeath that = (CauseOfDeath) object;
return id.equals(that.id) && type == that.type;
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + id.hashCode();
return result;
}
@Override
public String toString() {
return String.format("%s : %s", type, id);
}
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var jsVars = {
appBaseUrl: null,
appBasePath: null,
init: function ()
{
var jsVarsAttributes = angular.element('#js-vars')[0].attributes;
jsVars.appBaseUrl = jsVarsAttributes['data-base-url'].value;
jsVars.appBasePath = jsVarsAttributes['data-basepath'].value;
}
};
jsVars.init();
var config = {
basePath: jsVars.appBasePath+'/ng-front/',
restServer: jsVars.appBaseUrl+'/api'
};
var Question =
{
TYPE_QCM: 1,
TYPE_FREE: 2
};
function getUTCTimestamp() {<|fim▁hole|> now.getUTCMonth(),
now.getUTCDate(),
now.getUTCHours(),
now.getUTCMinutes(),
now.getUTCSeconds(),
now.getUTCMilliseconds()
);
return utc_now.getTime();
}
/**
* Filter to create a Javascript date
*/
angular.module('zcpeFilters', []).filter('jsDate', function () {
return function (sDate) {
return new Date(sDate);
}
});
var zcpe = angular.module('zcpe', [
'ngRoute',
'pascalprecht.translate',
'ngCookies',
'ngStorage',
'angular-locker',
'controllers-quizz',
'hljs',
'timer',
'zcpeFilters'
]);<|fim▁end|>
|
var now = new Date();
var utc_now = new Date(
now.getUTCFullYear(),
|
<|file_name|>Cache2.java<|end_file_name|><|fim▁begin|>package qj.util.cache;
import java.util.ArrayList;
import qj.util.funct.F2;
public class Cache2<A, B, T> {
private final F2<A, B, T> func;
ArrayList<Holder> holders = new ArrayList<Holder>();
public Cache2(F2<A, B, T> func) {
this.func = func;
}
public T get(A a, B b) {
for (Holder holder : holders) {
if (holder.a.equals(a)
&& holder.b.equals(b)) {
return holder.t;
}<|fim▁hole|> return t;
}
private class Holder {
A a;
B b;
T t;
public Holder(A a, B b, T t) {
this.a = a;
this.b = b;
this.t = t;
}
}
}<|fim▁end|>
|
}
T t = func.e(a, b);
holders.add(new Holder(a, b, t));
|
<|file_name|>screenshot.rs<|end_file_name|><|fim▁begin|>extern crate kiss3d;
extern crate nalgebra as na;
use std::path::Path;
use kiss3d::light::Light;
use kiss3d::window::Window;
use na::{UnitQuaternion, Vector3};
// Based on cube example.
fn main() {
let mut window = Window::new("Kiss3d: screenshot");
let mut c = window.add_cube(0.2, 0.2, 0.2);
c.set_color(1.0, 0.0, 0.0);
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.785));
c.prepend_to_local_rotation(&UnitQuaternion::from_axis_angle(
&Vector3::x_axis(),
-0.6f32,<|fim▁hole|> while window.render() {
let img = window.snap_image();
let img_path = Path::new("screenshot.png");
img.save(img_path).unwrap();
println!("Screeshot saved to `screenshot.png`");
break;
}
}<|fim▁end|>
|
));
window.set_light(Light::StickToCamera);
|
<|file_name|>print_course_duplicates_yaml.py<|end_file_name|><|fim▁begin|>"""Management command for uploading master json data for OCW courses"""
from django.core.management import BaseCommand
<|fim▁hole|> """Print course duplicates yaml"""
help = "Print course duplicates yaml"
def handle(self, *args, **options):
self.stdout.write(generate_duplicates_yaml())<|fim▁end|>
|
from course_catalog.etl.deduplication import generate_duplicates_yaml
class Command(BaseCommand):
|
<|file_name|>xml-parser.js<|end_file_name|><|fim▁begin|>var parseString = require('xml2js').parseString;
module.exports = function(req, res, next){
if (req.is('xml')){
var data = '';
req.setEncoding('utf8');<|fim▁hole|> req.on('end', function(){
if (!data){
return next();
}
parseString(data, {
trim: true,
explicitArray: false
}, function(err, result){
if (!err){
req.body = result || {};
} else {
return res.error('BAD_REQUEST');
}
next();
});
});
} else {
next();
}
};<|fim▁end|>
|
req.on('data', function(chunk){
data += chunk;
});
|
<|file_name|>assertions_on_constants.rs<|end_file_name|><|fim▁begin|>//FIXME: suggestions are wrongly expanded, this should be fixed along with #7843
#![allow(non_fmt_panics)]
macro_rules! assert_const {
($len:expr) => {
assert!($len > 0);
debug_assert!($len < 0);
};
}
fn main() {
assert!(true);
assert!(false);
assert!(true, "true message");
assert!(false, "false message");
let msg = "panic message";
assert!(false, "{}", msg.to_uppercase());
const B: bool = true;
assert!(B);
const C: bool = false;
assert!(C);
assert!(C, "C message");
debug_assert!(true);<|fim▁hole|> // Don't lint this, since there is no better way for expressing "Only panic in debug mode".
debug_assert!(false); // #3948
assert_const!(3);
assert_const!(-1);
// Don't lint on this:
assert!(cfg!(feature = "hey") || cfg!(not(feature = "asdf")));
}<|fim▁end|>
| |
<|file_name|>storage.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from images.models import S3Connection
from shutil import copyfileobj
import tinys3
import os
import urllib
class LocalStorage(object):
def __init__(self, filename):
self.filename = filename
def get_file_data(self):
"""
Returns the raw data for the specified file
"""
image_path = os.path.join(settings.MEDIA_ROOT, self.filename)
# TODO: do you need to close this?<|fim▁hole|>
def get_remote_path(self):
"""
Builds a relative remote path by combining the MEDIA_URL setting and the filename
"""
return '%s%s' % (settings.MEDIA_URL, self.filename)
def store(self, file_instance, content_type=None):
"""
Copy over the `file_instance` to the local storage
"""
image_path = os.path.join(settings.MEDIA_ROOT, self.filename)
with open(image_path, 'w') as fw:
copyfileobj(file_instance, fw)
@staticmethod
def create_argument_slug(arguments_dict):
"""
Converts an arguments dictionary into a string that can be stored in a filename
"""
# TODO: is there a possible bug if an invalid key/value is presented?
args_list = ['%s-%s' % (key, value) for key, value in arguments_dict.items()]
return '--'.join(args_list)
class S3Storage(LocalStorage):
def __init__(self, *args, **kwargs):
"""
Overrides the LocalStorage and initializes a shared S3 connection
"""
super(S3Storage, self).__init__(*args, **kwargs)
self.conn = tinys3.Connection(self.S3_ACCESS_KEY, self.S3_SECRET_KEY, default_bucket=self.S3_BUCKET, tls=True)
def get_remote_path(self):
"""
Returns an absolute remote path for the filename from the S3 bucket
"""
return 'https://%s.%s/%s' % (self.conn.default_bucket, self.conn.endpoint, self.filename)
def get_file_data(self):
"""
Returns the raw data for the specific file, downloading it from S3
"""
path = self.get_remote_path()
data = urllib.urlopen(path).read()
return data
def store(self, file_instance, content_type=None):
"""
Copy over the `file_instance` from memory to S3
"""
self.conn.upload(self.filename, file_instance, content_type=content_type)
@property
def S3_BUCKET(self):
"""
Returns the S3_BUCKET. Checks local environment variables first, database-stored settings second
"""
return os.environ.get('S3_BUCKET', self.database_settings.bucket)
@property
def S3_ACCESS_KEY(self):
"""
Returns the S3_ACCESS_KEY. Checks local environment variables first, database-stored settings second
"""
return os.environ.get('S3_ACCESS_KEY', self.database_settings.access_key)
@property
def S3_SECRET_KEY(self):
"""
Returns the S3_SECRET_KEY. Checks local environment variables first, database-stored settings second
"""
return os.environ.get('S3_SECRET_KEY', self.database_settings.secret_key)
@property
def database_settings(self):
"""
Pulls an S3Connection instance, which contains S3 connection settings, from the databas. Result is cached locally
"""
if not getattr(self, '__database_settings', None):
self.__database_settings = S3Connection.objects.get()
return self.__database_settings<|fim▁end|>
|
data = open(image_path, 'r').read()
return data
|
<|file_name|>kepoutlier.py<|end_file_name|><|fim▁begin|>from .utils import PyKEArgumentHelpFormatter
import numpy as np
from astropy.io import fits as pyfits
from matplotlib import pyplot as plt
from tqdm import tqdm
from . import kepio, kepmsg, kepkey, kepfit, kepstat, kepfunc
__all__ = ['kepoutlier']
def kepoutlier(infile, outfile=None, datacol='SAP_FLUX', nsig=3.0, stepsize=1.0,
npoly=3, niter=1, operation='remove', ranges='0,0', plot=False,
plotfit=False, overwrite=False, verbose=False,
logfile='kepoutlier.log'):
"""
kepoutlier -- Remove or replace statistical outliers from time series data
kepoutlier identifies data outliers relative to piecemeal best-fit
polynomials. Outliers are either removed from the output time series or
replaced by a noise-treated value defined by the polynomial fit. Identified
outliers and the best fit functions are optionally plotted for inspection
purposes.
Parameters
----------
infile : str
The name of a MAST standard format FITS file containing a Kepler light
curve within the first data extension.
outfile : str
The name of the output FITS file. ``outfile`` will be direct copy of
infile with either data outliers removed (i.e. the table will have
fewer rows) or the outliers will be corrected according to a best-fit
function and a noise model.
datacol : str
The column name containing data stored within extension 1 of infile.
This data will be searched for outliers. Typically this name is
SAP_FLUX (Simple Aperture Photometry fluxes) or PDCSAP_FLUX (Pre-search
Data Conditioning fluxes).
nsig : float
The sigma clipping threshold. Data deviating from a best fit function
by more than the threshold will be either removed or corrected
according to the user selection of operation.
stepsize : float
The data within datacol is unlikely to be well represented by a single
polynomial function. stepsize splits the data up into a series of time
blocks, each is fit independently by a separate function. The user can
provide an informed choice of stepsize after inspecting the data with
the kepdraw tool. Units are days.
npoly : int<|fim▁hole|> removed temporarily and the time series fit again. This will be
iterated niter times before freezing upon the best available fit.
operation : str
* ``remove`` throws away outliers. The output data table will smaller
or equal in size to the input table.
* ``replace`` replaces outliers with a value that is consistent with
the best-fit polynomial function and a random component defined by the
rms of the data relative to the fit and calculated using the inverse
normal cumulative function and a random number generator.
ranges : str
The user can choose specific time ranges of data on which to work. This
could, for example, avoid removing known stellar flares from a dataset.
Time ranges are supplied as comma-separated pairs of Barycentric Julian
Dates (BJDs). Multiple ranges are separated by a semi-colon. An example
containing two time ranges is::
'2455012.48517,2455014.50072;2455022.63487,2455025.08231'
If the user wants to correct the entire time series then providing
``ranges = '0,0'`` will tell the task to operate on the whole time series.
plot : bool
Plot the data and outliers?
plotfit : bool
Overlay the polynomial fits upon the plot?
overwrite : bool
Overwrite the output file?
verbose : bool
Print informative messages and warnings to the shell and logfile?
logfile : str
Name of the logfile containing error and warning messages.
Examples
--------
.. code-block:: bash
$ kepoutlier kplr002437329-2010355172524_llc.fits --datacol SAP_FLUX
--nsig 4 --stepsize 5 --npoly 2 --niter 10 --operation replace
--verbose --plot --plotfit
.. image:: ../_static/images/api/kepoutlier.png
:align: center
"""
if outfile is None:
outfile = infile.split('.')[0] + "-{}.fits".format(__all__[0])
# log the call
hashline = '--------------------------------------------------------------'
kepmsg.log(logfile, hashline, verbose)
call = ('KEPOUTLIER -- '
+ ' infile={}'.format(infile)
+ ' outfile={}'.format(outfile)
+ ' datacol={}'.format(datacol)
+ ' nsig={}'.format(nsig)
+ ' stepsize={}'.format(stepsize)
+ ' npoly={}'.format(npoly)
+ ' niter={}'.format(niter)
+ ' operation={}'.format(operation)
+ ' ranges={}'.format(ranges)
+ ' plot={}'.format(plot)
+ ' plotfit={}'.format(plotfit)
+ ' overwrite={}'.format(overwrite)
+ ' verbose={}'.format(verbose)
+ ' logfile={}'.format(logfile))
kepmsg.log(logfile, call+'\n', verbose)
# start time
kepmsg.clock('KEPOUTLIER started at', logfile, verbose)
# overwrite output file
if overwrite:
kepio.overwrite(outfile, logfile, verbose)
if kepio.fileexists(outfile):
errmsg = ('ERROR -- KEPOUTLIER: {} exists. Use overwrite=True'
.format(outfile))
kepmsg.err(logfile, errmsg, verbose)
# open input file
instr = pyfits.open(infile)
tstart, tstop, bjdref, cadence = kepio.timekeys(instr, infile, logfile,
verbose)
try:
work = instr[0].header['FILEVER']
cadenom = 1.0
except:
cadenom = cadence
# fudge non-compliant FITS keywords with no values
instr = kepkey.emptykeys(instr, infile, logfile, verbose)
# read table structure
table = kepio.readfitstab(infile, instr[1], logfile, verbose)
# filter input data table
try:
nanclean = instr[1].header['NANCLEAN']
except:
time = kepio.readtimecol(infile, table, logfile, verbose)
flux = kepio.readfitscol(infile, table, datacol, logfile, verbose)
finite_data_mask = np.isfinite(time) & np.isfinite(flux) & (flux != 0)
table = table[finite_data_mask]
instr[1].data = table
comment = 'NaN cadences removed from data'
kepkey.new('NANCLEAN', True, comment, instr[1], outfile, logfile,
verbose)
# read table columns
try:
intime = instr[1].data.field('barytime') + 2.4e6
except:
intime = kepio.readfitscol(infile, instr[1].data, 'time', logfile,
verbose)
indata = kepio.readfitscol(infile, instr[1].data, datacol, logfile,
verbose)
intime = intime + bjdref
indata = indata / cadenom
# time ranges for region to be corrected
t1, t2 = kepio.timeranges(ranges, logfile, verbose)
cadencelis = kepstat.filterOnRange(intime, t1, t2)
# find limits of each time step
tstep1, tstep2 = [], []
work = intime[0]
while work < intime[-1]:
tstep1.append(work)
tstep2.append(np.array([work + stepsize, intime[-1]],
dtype='float64').min())
work += stepsize
# find cadence limits of each time step
cstep1, cstep2 = [], []
work1 = 0
work2 = 0
for i in range(len(intime)):
if intime[i] >= intime[work1] and intime[i] < intime[work1] + stepsize:
work2 = i
else:
cstep1.append(work1)
cstep2.append(work2)
work1 = i
work2 = i
cstep1.append(work1)
cstep2.append(work2)
outdata = indata * 1.0
# comment keyword in output file
kepkey.history(call, instr[0], outfile, logfile, verbose)
# clean up x-axis unit
intime0 = (tstart // 100) * 100.0
ptime = intime - intime0
xlab = 'BJD $-$ {}'.format(intime0)
# clean up y-axis units
pout = indata * 1.0
nrm = len(str(int(pout.max())))-1
pout = pout / 10**nrm
ylab = '10$^%d$ e$^-$ s$^{-1}$' % nrm
# data limits
xmin = ptime.min()
xmax = ptime.max()
ymin = pout.min()
ymax = pout.max()
xr = xmax - xmin
yr = ymax - ymin
ptime = np.insert(ptime, [0], [ptime[0]])
ptime = np.append(ptime, [ptime[-1]])
pout = np.insert(pout, [0], [0.0])
pout = np.append(pout, 0.0)
# plot light curve
if plot:
plt.figure()
plt.clf()
# plot data
ax = plt.axes([0.06, 0.1, 0.93, 0.87])
# force tick labels to be absolute rather than relative
plt.gca().xaxis.set_major_formatter(plt.ScalarFormatter(useOffset=False))
plt.gca().yaxis.set_major_formatter(plt.ScalarFormatter(useOffset=False))
plt.plot(ptime, pout, color='#0000ff', linestyle='-', linewidth=1.0)
plt.fill(ptime, pout, color='#ffff00', linewidth=0.0, alpha=0.2)
plt.xlabel(xlab, {'color' : 'k'})
plt.ylabel(ylab, {'color' : 'k'})
plt.grid()
# loop over each time step, fit data, determine rms
masterfit = indata * 0.0
mastersigma = np.zeros(len(masterfit))
functype = getattr(kepfunc, 'poly' + str(npoly))
for i in range(len(cstep1)):
pinit = [indata[cstep1[i]:cstep2[i]+1].mean()]
if npoly > 0:
for j in range(npoly):
pinit.append(0.0)
pinit = np.array(pinit, dtype='float32')
try:
coeffs, errors, covar, iiter, sigma, chi2, dof, fit, plotx, ploty = \
kepfit.lsqclip(functype, pinit,
intime[cstep1[i]:cstep2[i]+1] - intime[cstep1[i]],
indata[cstep1[i]:cstep2[i]+1], None, nsig,
nsig, niter, logfile, verbose)
for j in range(len(coeffs)):
masterfit[cstep1[i]: cstep2[i] + 1] += (coeffs[j]
* (intime[cstep1[i]:cstep2[i]+1] - intime[cstep1[i]]) ** j)
for j in range(cstep1[i], cstep2[i] + 1):
mastersigma[j] = sigma
if plotfit:
plt.plot(plotx + intime[cstep1[i]] - intime0, ploty / 10 ** nrm,
'g', lw=3)
except:
for j in range(cstep1[i], cstep2[i] + 1):
masterfit[j] = indata[j]
mastersigma[j] = 1.0e10
message = ('WARNING -- KEPOUTLIER: could not fit range '
+ str(intime[cstep1[i]]) + '-' + str(intime[cstep2[i]]))
kepmsg.warn(logfile, message, verbose)
# reject outliers
rejtime, rejdata = [], []
naxis2 = 0
for i in tqdm(range(len(masterfit))):
if (abs(indata[i] - masterfit[i]) > nsig * mastersigma[i]
and i in cadencelis):
rejtime.append(intime[i])
rejdata.append(indata[i])
if operation == 'replace':
[rnd] = kepstat.randarray([masterfit[i]], [mastersigma[i]])
table[naxis2] = table[i]
table.field(datacol)[naxis2] = rnd
naxis2 += 1
else:
table[naxis2] = table[i]
naxis2 += 1
instr[1].data = table[:naxis2]
if plot:
rejtime = np.array(rejtime, dtype='float64')
rejdata = np.array(rejdata, dtype='float32')
plt.plot(rejtime - intime0, rejdata / 10 ** nrm, 'ro')
# plot ranges
plt.xlim(xmin - xr * 0.01, xmax + xr * 0.01)
if ymin >= 0.0:
plt.ylim(ymin - yr * 0.01, ymax + yr * 0.01)
else:
plt.ylim(1.0e-10, ymax + yr * 0.01)
# render plot
plt.show()
# write output file
print("Writing output file {}...".format(outfile))
instr.writeto(outfile)
# close input file
instr.close()
kepmsg.clock('KEPOUTLIER completed at', logfile, verbose)
def kepoutlier_main():
import argparse
parser = argparse.ArgumentParser(
description='Remove or replace data outliers from a time series',
formatter_class=PyKEArgumentHelpFormatter)
parser.add_argument('infile', help='Name of input file', type=str)
parser.add_argument('--outfile',
help=('Name of FITS file to output.'
' If None, outfile is infile-kepoutlier.'),
default=None)
parser.add_argument('--datacol', default='SAP_FLUX',
help='Name of data column to plot', type=str)
parser.add_argument('--nsig', default=3.,
help='Sigma clipping threshold for outliers',
type=float)
parser.add_argument('--stepsize', default=1.0,
help='Stepsize on which to fit data [days]',
type=float)
parser.add_argument('--npoly', default=3,
help='Polynomial order for each fit', type=int)
parser.add_argument('--niter', default=1,
help='Maximum number of clipping iterations', type=int)
parser.add_argument('--operation', default='remove',
help='Remove or replace outliers?', type=str,
choices=['replace','remove'])
parser.add_argument('--ranges', default='0,0',
help='Time ranges of regions to filter', type=str)
parser.add_argument('--plot', action='store_true', help='Plot result?')
parser.add_argument('--plotfit', action='store_true',
help='Plot fit over results?')
parser.add_argument('--overwrite', action='store_true',
help='Overwrite output file?')
parser.add_argument('--verbose', action='store_true',
help='Write to a log file?')
parser.add_argument('--logfile', '-l', help='Name of ascii log file',
default='kepoutlier.log', dest='logfile', type=str)
args = parser.parse_args()
kepoutlier(args.infile, args.outfile, args.datacol, args.nsig,
args.stepsize, args.npoly,args.niter, args.operation,
args.ranges, args.plot, args.plotfit, args.overwrite,
args.verbose, args.logfile)<|fim▁end|>
|
The polynomial order of each best-fit function.
niter : int
If outliers are found in a particular data section, that data will be
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.