prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>EG_LineJoinProperties_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Baliance. All rights reserved.
//
// DO NOT EDIT: generated by gooxml ECMA-376 generator
//
// Use of this source code is governed by the terms of the Affero GNU General
// Public License version 3.0 as published by the Free Software Foundation and
// appearing in the file LICENSE included in the packaging of this file. A
// commercial license can be purchased by contacting [email protected].
package dml_test
import (<|fim▁hole|> "testing"
"baliance.com/gooxml/schema/soo/dml"
)
func TestEG_LineJoinPropertiesConstructor(t *testing.T) {
v := dml.NewEG_LineJoinProperties()
if v == nil {
t.Errorf("dml.NewEG_LineJoinProperties must return a non-nil value")
}
if err := v.Validate(); err != nil {
t.Errorf("newly constructed dml.EG_LineJoinProperties should validate: %s", err)
}
}
func TestEG_LineJoinPropertiesMarshalUnmarshal(t *testing.T) {
v := dml.NewEG_LineJoinProperties()
buf, _ := xml.Marshal(v)
v2 := dml.NewEG_LineJoinProperties()
xml.Unmarshal(buf, v2)
}<|fim▁end|> | "encoding/xml" |
<|file_name|>resources.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.conf.urls.defaults import patterns, url
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.urlresolvers import NoReverseMatch, reverse, resolve, Resolver404
from django.db.models.sql.constants import QUERY_TERMS, LOOKUP_SEP
from django.http import HttpResponse
from django.utils.cache import patch_cache_control
from tastypie.authentication import Authentication
from tastypie.authorization import ReadOnlyAuthorization
from tastypie.bundle import Bundle
from tastypie.cache import NoCache
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.exceptions import NotFound, BadRequest, InvalidFilterError, HydrationError, InvalidSortError, ImmediateHttpResponse
from tastypie.fields import *
from tastypie.http import *
from tastypie.paginator import Paginator
from tastypie.serializers import Serializer
from tastypie.throttle import BaseThrottle
from tastypie.utils import is_valid_jsonp_callback_value, dict_strip_unicode_keys, trailing_slash
from tastypie.utils.mime import determine_format, build_content_type
from tastypie.validation import Validation
try:
set
except NameError:
from sets import Set as set
# The ``copy`` module was added in Python 2.5 and ``copycompat`` was added in
# post 1.1.1 Django (r11901)
try:
from django.utils.copycompat import deepcopy
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from copy import deepcopy
def csrf_exempt(func):
return func
class ResourceOptions(object):
"""
A configuration class for ``Resource``.
Provides sane defaults and the logic needed to augment these settings with
the internal ``class Meta`` used on ``Resource`` subclasses.
"""
serializer = Serializer()
authentication = Authentication()
authorization = ReadOnlyAuthorization()
cache = NoCache()
throttle = BaseThrottle()
validation = Validation()
allowed_methods = ['get', 'post', 'put', 'delete']
list_allowed_methods = None
detail_allowed_methods = None
limit = getattr(settings, 'API_LIMIT_PER_PAGE', 20)
api_name = None
resource_name = None
urlconf_namespace = None
default_format = 'application/json'
filtering = {}
ordering = []
object_class = None
queryset = None
fields = []
excludes = []
include_resource_uri = True
include_absolute_url = False
def __new__(cls, meta=None):
overrides = {}
# Handle overrides.
if meta:
for override_name in dir(meta):
# No internals please.
if not override_name.startswith('_'):
overrides[override_name] = getattr(meta, override_name)
allowed_methods = overrides.get('allowed_methods', ['get', 'post', 'put', 'delete'])
if overrides.get('list_allowed_methods', None) is None:
overrides['list_allowed_methods'] = allowed_methods
if overrides.get('detail_allowed_methods', None) is None:
overrides['detail_allowed_methods'] = allowed_methods
if not overrides.get('queryset', None) is None:
overrides['object_class'] = overrides['queryset'].model
return object.__new__(type('ResourceOptions', (cls,), overrides))
class DeclarativeMetaclass(type):
def __new__(cls, name, bases, attrs):
attrs['base_fields'] = {}
declared_fields = {}
# Inherit any fields from parent(s).
try:
parents = [b for b in bases if issubclass(b, Resource)]
for p in parents:
fields = getattr(p, 'base_fields', {})
for field_name, field_object in fields.items():
attrs['base_fields'][field_name] = deepcopy(field_object)
except NameError:
pass
for field_name, obj in attrs.items():
if isinstance(obj, ApiField):
field = attrs.pop(field_name)
declared_fields[field_name] = field
attrs['base_fields'].update(declared_fields)
attrs['declared_fields'] = declared_fields
new_class = super(DeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
opts = getattr(new_class, 'Meta', None)
new_class._meta = ResourceOptions(opts)
if not getattr(new_class._meta, 'resource_name', None):
# No ``resource_name`` provided. Attempt to auto-name the resource.
class_name = new_class.__name__
name_bits = [bit for bit in class_name.split('Resource') if bit]
resource_name = ''.join(name_bits).lower()
new_class._meta.resource_name = resource_name
if getattr(new_class._meta, 'include_resource_uri', True):
if not 'resource_uri' in new_class.base_fields:
new_class.base_fields['resource_uri'] = CharField(readonly=True)
elif 'resource_uri' in new_class.base_fields and not 'resource_uri' in attrs:
del(new_class.base_fields['resource_uri'])
for field_name, field_object in new_class.base_fields.items():
if hasattr(field_object, 'contribute_to_class'):
field_object.contribute_to_class(new_class, field_name)
return new_class
class Resource(object):
"""
Handles the data, request dispatch and responding to requests.
Serialization/deserialization is handled "at the edges" (i.e. at the
beginning/end of the request/response cycle) so that everything internally
is Python data structures.
This class tries to be non-model specific, so it can be hooked up to other
data sources, such as search results, files, other data, etc.
"""
__metaclass__ = DeclarativeMetaclass
def __init__(self, api_name=None):
self.fields = deepcopy(self.base_fields)
if not api_name is None:
self._meta.api_name = api_name
def __getattr__(self, name):
if name in self.fields:
return self.fields[name]
def wrap_view(self, view):
"""<|fim▁hole|> Wraps methods so they can be called in a more functional way as well
as handling exceptions better.
Note that if ``BadRequest`` or an exception with a ``response`` attr
are seen, there is special handling to either present a message back
to the user or return the response traveling with the exception.
"""
@csrf_exempt
def wrapper(request, *args, **kwargs):
try:
callback = getattr(self, view)
response = callback(request, *args, **kwargs)
if request.is_ajax():
# IE excessively caches XMLHttpRequests, so we're disabling
# the browser cache here.
# See http://www.enhanceie.com/ie/bugs.asp for details.
patch_cache_control(response, no_cache=True)
return response
except (BadRequest, ApiFieldError), e:
return HttpBadRequest(e.args[0])
except Exception, e:
if hasattr(e, 'response'):
return e.response
# A real, non-expected exception.
# Handle the case where the full traceback is more helpful
# than the serialized error.
if settings.DEBUG and getattr(settings, 'TASTYPIE_FULL_DEBUG', False):
raise
# Rather than re-raising, we're going to things similar to
# what Django does. The difference is returning a serialized
# error message.
return self._handle_500(request, e)
return wrapper
def _handle_500(self, request, exception):
import traceback
import sys
the_trace = '\n'.join(traceback.format_exception(*(sys.exc_info())))
if settings.DEBUG:
data = {
"error_message": exception.message,
"traceback": the_trace,
}
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return HttpApplicationError(content=serialized, content_type=build_content_type(desired_format))
# When DEBUG is False, send an error message to the admins.
from django.core.mail import mail_admins
subject = 'Error (%s IP): %s' % ((request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS and 'internal' or 'EXTERNAL'), request.path)
try:
request_repr = repr(request)
except:
request_repr = "Request repr() unavailable"
message = "%s\n\n%s" % (the_trace, request_repr)
mail_admins(subject, message, fail_silently=True)
# Prep the data going out.
data = {
"error_message": getattr(settings, 'TASTYPIE_CANNED_ERROR', "Sorry, this request could not be processed. Please try again later."),
}
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return HttpApplicationError(content=serialized, content_type=build_content_type(desired_format))
def _build_reverse_url(self, name, args=None, kwargs=None):
"""
A convenience hook for overriding how URLs are built.
See ``NamespacedModelResource._build_reverse_url`` for an example.
"""
return reverse(name, args=args, kwargs=kwargs)
def base_urls(self):
"""
The standard URLs this ``Resource`` should respond to.
"""
# Due to the way Django parses URLs, ``get_multiple`` won't work without
# a trailing slash.
return [
url(r"^(?P<resource_name>%s)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_list'), name="api_dispatch_list"),
url(r"^(?P<resource_name>%s)/schema%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('get_schema'), name="api_get_schema"),
url(r"^(?P<resource_name>%s)/set/(?P<pk_list>\w[\w/;-]*)/$" % self._meta.resource_name, self.wrap_view('get_multiple'), name="api_get_multiple"),
url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('dispatch_detail'), name="api_dispatch_detail"),
]
def override_urls(self):
"""
A hook for adding your own URLs or overriding the default URLs.
"""
return []
@property
def urls(self):
"""
The endpoints this ``Resource`` responds to.
Mostly a standard URLconf, this is suitable for either automatic use
when registered with an ``Api`` class or for including directly in
a URLconf should you choose to.
"""
urls = self.override_urls() + self.base_urls()
urlpatterns = patterns('',
*urls
)
return urlpatterns
def determine_format(self, request):
"""
Used to determine the desired format.
Largely relies on ``tastypie.utils.mime.determine_format`` but here
as a point of extension.
"""
return determine_format(request, self._meta.serializer, default_format=self._meta.default_format)
def serialize(self, request, data, format, options=None):
"""
Given a request, data and a desired format, produces a serialized
version suitable for transfer over the wire.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
options = options or {}
if 'text/javascript' in format:
# get JSONP callback name. default to "callback"
callback = request.GET.get('callback', 'callback')
if not is_valid_jsonp_callback_value(callback):
raise BadRequest('JSONP callback name is invalid.')
options['callback'] = callback
return self._meta.serializer.serialize(data, format, options)
def deserialize(self, request, data, format='application/json'):
"""
Given a request, data and a format, deserializes the given data.
It relies on the request properly sending a ``CONTENT_TYPE`` header,
falling back to ``application/json`` if not provided.
Mostly a hook, this uses the ``Serializer`` from ``Resource._meta``.
"""
return self._meta.serializer.deserialize(data, format=request.META.get('CONTENT_TYPE', 'application/json'))
def dispatch_list(self, request, **kwargs):
"""
A view for handling the various HTTP methods (GET/POST/PUT/DELETE) over
the entire list of resources.
Relies on ``Resource.dispatch`` for the heavy-lifting.
"""
return self.dispatch('list', request, **kwargs)
def dispatch_detail(self, request, **kwargs):
"""
A view for handling the various HTTP methods (GET/POST/PUT/DELETE) on
a single resource.
Relies on ``Resource.dispatch`` for the heavy-lifting.
"""
return self.dispatch('detail', request, **kwargs)
def dispatch(self, request_type, request, **kwargs):
"""
Handles the common operations (allowed HTTP method, authentication,
throttling, method lookup) surrounding most CRUD interactions.
"""
allowed_methods = getattr(self._meta, "%s_allowed_methods" % request_type, None)
request_method = self.method_check(request, allowed=allowed_methods)
method = getattr(self, "%s_%s" % (request_method, request_type), None)
if method is None:
raise ImmediateHttpResponse(response=HttpNotImplemented())
self.is_authenticated(request)
self.is_authorized(request)
self.throttle_check(request)
# All clear. Process the request.
request = convert_post_to_put(request)
response = method(request, **kwargs)
# Add the throttled request.
self.log_throttled_access(request)
# If what comes back isn't a ``HttpResponse``, assume that the
# request was accepted and that some action occurred. This also
# prevents Django from freaking out.
if not isinstance(response, HttpResponse):
return HttpAccepted()
return response
def remove_api_resource_names(self, url_dict):
"""
Given a dictionary of regex matches from a URLconf, removes
``api_name`` and/or ``resource_name`` if found.
This is useful for converting URLconf matches into something suitable
for data lookup. For example::
Model.objects.filter(**self.remove_api_resource_names(matches))
"""
kwargs_subset = url_dict.copy()
for key in ['api_name', 'resource_name']:
try:
del(kwargs_subset[key])
except KeyError:
pass
return kwargs_subset
def method_check(self, request, allowed=None):
"""
Ensures that the HTTP method used on the request is allowed to be
handled by the resource.
Takes an ``allowed`` parameter, which should be a list of lowercase
HTTP methods to check against. Usually, this looks like::
# The most generic lookup.
self.method_check(request, self._meta.allowed_methods)
# A lookup against what's allowed for list-type methods.
self.method_check(request, self._meta.list_allowed_methods)
# A useful check when creating a new endpoint that only handles
# GET.
self.method_check(request, ['get'])
"""
if allowed is None:
allowed = []
request_method = request.method.lower()
if not request_method in allowed:
raise ImmediateHttpResponse(response=HttpMethodNotAllowed())
return request_method
def is_authorized(self, request, object=None):
"""
Handles checking of permissions to see if the user has authorization
to GET, POST, PUT, or DELETE this resource. If ``object`` is provided,
the authorization backend can apply additional row-level permissions
checking.
"""
auth_result = self._meta.authorization.is_authorized(request, object)
if isinstance(auth_result, HttpResponse):
raise ImmediateHttpResponse(response=auth_result)
if not auth_result is True:
raise ImmediateHttpResponse(response=HttpUnauthorized())
def is_authenticated(self, request):
"""
Handles checking if the user is authenticated and dealing with
unauthenticated users.
Mostly a hook, this uses class assigned to ``authentication`` from
``Resource._meta``.
"""
# Authenticate the request as needed.
auth_result = self._meta.authentication.is_authenticated(request)
if isinstance(auth_result, HttpResponse):
raise ImmediateHttpResponse(response=auth_result)
if not auth_result is True:
raise ImmediateHttpResponse(response=HttpUnauthorized())
def throttle_check(self, request):
"""
Handles checking if the user should be throttled.
Mostly a hook, this uses class assigned to ``throttle`` from
``Resource._meta``.
"""
identifier = self._meta.authentication.get_identifier(request)
# Check to see if they should be throttled.
if self._meta.throttle.should_be_throttled(identifier):
# Throttle limit exceeded.
raise ImmediateHttpResponse(response=HttpForbidden())
def log_throttled_access(self, request):
"""
Handles the recording of the user's access for throttling purposes.
Mostly a hook, this uses class assigned to ``throttle`` from
``Resource._meta``.
"""
request_method = request.method.lower()
self._meta.throttle.accessed(self._meta.authentication.get_identifier(request), url=request.get_full_path(), request_method=request_method)
def build_bundle(self, obj=None, data=None):
"""
Given either an object, a data dictionary or both, builds a ``Bundle``
for use throughout the ``dehydrate/hydrate`` cycle.
If no object is provided, an empty object from
``Resource._meta.object_class`` is created so that attempts to access
``bundle.obj`` do not fail.
"""
if obj is None:
obj = self._meta.object_class()
return Bundle(obj, data)
def build_filters(self, filters=None):
"""
Allows for the filtering of applicable objects.
This needs to be implemented at the user level.'
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
return filters
def apply_sorting(self, obj_list, options=None):
"""
Allows for the sorting of objects being returned.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
return obj_list
# URL-related methods.
def get_resource_uri(self, bundle_or_obj):
"""
This needs to be implemented at the user level.
A ``return reverse("api_dispatch_detail", kwargs={'resource_name':
self.resource_name, 'pk': object.id})`` should be all that would
be needed.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def get_resource_list_uri(self):
"""
Returns a URL specific to this resource's list endpoint.
"""
kwargs = {
'resource_name': self._meta.resource_name,
}
if self._meta.api_name is not None:
kwargs['api_name'] = self._meta.api_name
try:
return self._build_reverse_url("api_dispatch_list", kwargs=kwargs)
except NoReverseMatch:
return None
def get_via_uri(self, uri):
"""
This pulls apart the salient bits of the URI and populates the
resource via a ``obj_get``.
If you need custom behavior based on other portions of the URI,
simply override this method.
"""
try:
view, args, kwargs = resolve(uri)
except Resolver404:
raise NotFound("The URL provided '%s' was not a link to a valid resource." % uri)
return self.obj_get(**self.remove_api_resource_names(kwargs))
# Data preparation.
def full_dehydrate(self, obj):
"""
Given an object instance, extract the information from it to populate
the resource.
"""
bundle = Bundle(obj=obj)
# Dehydrate each field.
for field_name, field_object in self.fields.items():
# A touch leaky but it makes URI resolution work.
if isinstance(field_object, RelatedField):
field_object.api_name = self._meta.api_name
field_object.resource_name = self._meta.resource_name
bundle.data[field_name] = field_object.dehydrate(bundle)
# Check for an optional method to do further dehydration.
method = getattr(self, "dehydrate_%s" % field_name, None)
if method:
bundle.data[field_name] = method(bundle)
bundle = self.dehydrate(bundle)
return bundle
def dehydrate(self, bundle):
"""
A hook to allow a final manipulation of data once all fields/methods
have built out the dehydrated data.
Useful if you need to access more than one dehydrated field or want
to annotate on additional data.
Must return the modified bundle.
"""
return bundle
def full_hydrate(self, bundle):
"""
Given a populated bundle, distill it and turn it back into
a full-fledged object instance.
"""
if bundle.obj is None:
bundle.obj = self._meta.object_class()
for field_name, field_object in self.fields.items():
if field_object.attribute:
value = field_object.hydrate(bundle)
if value is not None:
# We need to avoid populating M2M data here as that will
# cause things to blow up.
if not getattr(field_object, 'is_related', False):
setattr(bundle.obj, field_object.attribute, value)
elif not getattr(field_object, 'is_m2m', False):
setattr(bundle.obj, field_object.attribute, value.obj)
# Check for an optional method to do further hydration.
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
bundle = method(bundle)
bundle = self.hydrate(bundle)
return bundle
def hydrate(self, bundle):
"""
A hook to allow a final manipulation of data once all fields/methods
have built out the hydrated data.
Useful if you need to access more than one hydrated field or want
to annotate on additional data.
Must return the modified bundle.
"""
return bundle
def hydrate_m2m(self, bundle):
"""
Populate the ManyToMany data on the instance.
"""
if bundle.obj is None:
raise HydrationError("You must call 'full_hydrate' before attempting to run 'hydrate_m2m' on %r." % self)
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
if field_object.attribute:
# Note that we only hydrate the data, leaving the instance
# unmodified. It's up to the user's code to handle this.
# The ``ModelResource`` provides a working baseline
# in this regard.
bundle.data[field_name] = field_object.hydrate_m2m(bundle)
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
method = getattr(self, "hydrate_%s" % field_name, None)
if method:
method(bundle)
return bundle
def build_schema(self):
"""
Returns a dictionary of all the fields on the resource and some
properties about those fields.
Used by the ``schema/`` endpoint to describe what will be available.
"""
data = {
'fields': {},
'default_format': self._meta.default_format,
}
if self._meta.ordering:
data['ordering'] = self._meta.ordering
if self._meta.filtering:
data['filtering'] = self._meta.filtering
for field_name, field_object in self.fields.items():
data['fields'][field_name] = {
'type': field_object.dehydrated_type,
'nullable': field_object.null,
'readonly': field_object.readonly,
'help_text': field_object.help_text,
}
return data
def dehydrate_resource_uri(self, bundle):
"""
For the automatically included ``resource_uri`` field, dehydrate
the URI for the given bundle.
Returns empty string if no URI can be generated.
"""
try:
return self.get_resource_uri(bundle)
except NotImplementedError:
return ''
except NoReverseMatch:
return ''
def generate_cache_key(self, *args, **kwargs):
"""
Creates a unique-enough cache key.
This is based off the current api_name/resource_name/args/kwargs.
"""
smooshed = []
for key, value in kwargs.items():
smooshed.append("%s=%s" % (key, value))
# Use a list plus a ``.join()`` because it's faster than concatenation.
return "%s:%s:%s:%s" % (self._meta.api_name, self._meta.resource_name, ':'.join(args), ':'.join(smooshed))
# Data access methods.
def get_object_list(self, request):
"""
A hook to allow making returning the list of available objects.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def apply_authorization_limits(self, request, object_list):
"""
Allows the ``Authorization`` class to further limit the object list.
Also a hook to customize per ``Resource``.
"""
if hasattr(self._meta.authorization, 'apply_limits'):
object_list = self._meta.authorization.apply_limits(request, object_list)
return object_list
def obj_get_list(self, request=None, **kwargs):
"""
Fetches the list of objects available on the resource.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def cached_obj_get_list(self, request=None, **kwargs):
"""
A version of ``obj_get_list`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('list', **kwargs)
obj_list = self._meta.cache.get(cache_key)
if obj_list is None:
obj_list = self.obj_get_list(request=request, **kwargs)
self._meta.cache.set(cache_key, obj_list)
return obj_list
def obj_get(self, request=None, **kwargs):
"""
Fetches an individual object on the resource.
This needs to be implemented at the user level. If the object can not
be found, this should raise a ``NotFound`` exception.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def cached_obj_get(self, request=None, **kwargs):
"""
A version of ``obj_get`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('detail', **kwargs)
bundle = self._meta.cache.get(cache_key)
if bundle is None:
bundle = self.obj_get(request=request, **kwargs)
self._meta.cache.set(cache_key, bundle)
return bundle
def obj_create(self, bundle, request=None, **kwargs):
"""
Creates a new object based on the provided data.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_update(self, bundle, request=None, **kwargs):
"""
Updates an existing object (or creates a new object) based on the
provided data.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete_list(self, request=None, **kwargs):
"""
Deletes an entire list of objects.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def obj_delete(self, request=None, **kwargs):
"""
Deletes a single object.
This needs to be implemented at the user level.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
def create_response(self, request, data):
"""
Extracts the common "which-format/serialize/return-response" cycle.
Mostly a useful shortcut/hook.
"""
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return HttpResponse(content=serialized, content_type=build_content_type(desired_format))
def is_valid(self, bundle, request=None):
"""
Handles checking if the data provided by the user is valid.
Mostly a hook, this uses class assigned to ``validation`` from
``Resource._meta``.
If validation fails, an error is raised with the error messages
serialized inside it.
"""
errors = self._meta.validation.is_valid(bundle, request)
if len(errors):
if request:
desired_format = self.determine_format(request)
else:
desired_format = self._meta.default_format
serialized = self.serialize(request, errors, desired_format)
response = HttpBadRequest(content=serialized, content_type=build_content_type(desired_format))
raise ImmediateHttpResponse(response=response)
def rollback(self, bundles):
"""
Given the list of bundles, delete all objects pertaining to those
bundles.
This needs to be implemented at the user level. No exceptions should
be raised if possible.
``ModelResource`` includes a full working version specific to Django's
``Models``.
"""
raise NotImplementedError()
# Views.
def get_list(self, request, **kwargs):
"""
Returns a serialized list of resources.
Calls ``obj_get_list`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
# TODO: Uncached for now. Invalidation that works for everyone may be
# impossible.
objects = self.obj_get_list(request=request, **self.remove_api_resource_names(kwargs))
sorted_objects = self.apply_sorting(objects, options=request.GET)
paginator = Paginator(request.GET, sorted_objects, resource_uri=self.get_resource_list_uri(),
limit=self._meta.limit)
to_be_serialized = paginator.page()
# Dehydrate the bundles in preparation for serialization.
to_be_serialized['objects'] = [self.full_dehydrate(obj=obj) for obj in to_be_serialized['objects']]
return self.create_response(request, to_be_serialized)
def get_detail(self, request, **kwargs):
"""
Returns a single serialized resource.
Calls ``cached_obj_get/obj_get`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
try:
obj = self.cached_obj_get(request=request, **self.remove_api_resource_names(kwargs))
except ObjectDoesNotExist:
return HttpGone()
except MultipleObjectsReturned:
return HttpMultipleChoices("More than one resource is found at this URI.")
bundle = self.full_dehydrate(obj)
return self.create_response(request, bundle)
def put_list(self, request, **kwargs):
"""
Replaces a collection of resources with another collection.
Calls ``delete_list`` to clear out the collection then ``obj_create``
with the provided the data to create the new collection.
Return ``HttpAccepted`` (204 No Content).
"""
deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json'))
if not 'objects' in deserialized:
raise BadRequest("Invalid data sent.")
self.obj_delete_list(request=request, **self.remove_api_resource_names(kwargs))
bundles_seen = []
for object_data in deserialized['objects']:
bundle = self.build_bundle(data=dict_strip_unicode_keys(object_data))
# Attempt to be transactional, deleting any previously created
# objects if validation fails.
try:
self.is_valid(bundle, request)
except ImmediateHttpResponse:
self.rollback(bundles_seen)
raise
self.obj_create(bundle, request=request)
bundles_seen.append(bundle)
return HttpAccepted()
def put_detail(self, request, **kwargs):
"""
Either updates an existing resource or creates a new one with the
provided data.
Calls ``obj_update`` with the provided data first, but falls back to
``obj_create`` if the object does not already exist.
If a new resource is created, return ``HttpCreated`` (201 Created).
If an existing resource is modified, return ``HttpAccepted`` (204 No Content).
"""
deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json'))
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized))
self.is_valid(bundle, request)
try:
updated_bundle = self.obj_update(bundle, request=request, pk=kwargs.get('pk'))
return HttpAccepted()
except:
updated_bundle = self.obj_create(bundle, request=request, pk=kwargs.get('pk'))
return HttpCreated(location=self.get_resource_uri(updated_bundle))
def post_list(self, request, **kwargs):
"""
Creates a new resource/object with the provided data.
Calls ``obj_create`` with the provided data and returns a response
with the new resource's location.
If a new resource is created, return ``HttpCreated`` (201 Created).
"""
deserialized = self.deserialize(request, request.raw_post_data, format=request.META.get('CONTENT_TYPE', 'application/json'))
bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized))
self.is_valid(bundle, request)
updated_bundle = self.obj_create(bundle, request=request)
return HttpCreated(location=self.get_resource_uri(updated_bundle))
def post_detail(self, request, **kwargs):
"""
Creates a new subcollection of the resource under a resource.
This is not implemented by default because most people's data models
aren't self-referential.
If a new resource is created, return ``HttpCreated`` (201 Created).
"""
return HttpNotImplemented()
def delete_list(self, request, **kwargs):
"""
Destroys a collection of resources/objects.
Calls ``obj_delete_list``.
If the resources are deleted, return ``HttpAccepted`` (204 No Content).
"""
self.obj_delete_list(request=request, **self.remove_api_resource_names(kwargs))
return HttpAccepted()
def delete_detail(self, request, **kwargs):
"""
Destroys a single resource/object.
Calls ``obj_delete``.
If the resource is deleted, return ``HttpAccepted`` (204 No Content).
If the resource did not exist, return ``HttpGone`` (410 Gone).
"""
try:
self.obj_delete(request=request, **self.remove_api_resource_names(kwargs))
return HttpAccepted()
except NotFound:
return HttpGone()
def get_schema(self, request, **kwargs):
"""
Returns a serialized form of the schema of the resource.
Calls ``build_schema`` to generate the data. This method only responds
to HTTP GET.
Should return a HttpResponse (200 OK).
"""
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
self.log_throttled_access(request)
return self.create_response(request, self.build_schema())
def get_multiple(self, request, **kwargs):
"""
Returns a serialized list of resources based on the identifiers
from the URL.
Calls ``obj_get`` to fetch only the objects requested. This method
only responds to HTTP GET.
Should return a HttpResponse (200 OK).
"""
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
# Rip apart the list then iterate.
obj_pks = kwargs.get('pk_list', '').split(';')
objects = []
not_found = []
for pk in obj_pks:
try:
obj = self.obj_get(request, pk=pk)
bundle = self.full_dehydrate(obj)
objects.append(bundle)
except ObjectDoesNotExist:
not_found.append(pk)
object_list = {
'objects': objects,
}
if len(not_found):
object_list['not_found'] = not_found
self.log_throttled_access(request)
return self.create_response(request, object_list)
class ModelDeclarativeMetaclass(DeclarativeMetaclass):
def __new__(cls, name, bases, attrs):
new_class = super(ModelDeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
fields = getattr(new_class._meta, 'fields', [])
excludes = getattr(new_class._meta, 'excludes', [])
field_names = new_class.base_fields.keys()
for field_name in field_names:
if field_name == 'resource_uri':
continue
if field_name in new_class.declared_fields:
continue
if len(fields) and not field_name in fields:
del(new_class.base_fields[field_name])
if len(excludes) and field_name in excludes:
del(new_class.base_fields[field_name])
# Add in the new fields.
new_class.base_fields.update(new_class.get_fields(fields, excludes))
if getattr(new_class._meta, 'include_absolute_url', True):
if not 'absolute_url' in new_class.base_fields:
new_class.base_fields['absolute_url'] = CharField(attribute='get_absolute_url', readonly=True)
elif 'absolute_url' in new_class.base_fields and not 'absolute_url' in attrs:
del(new_class.base_fields['absolute_url'])
return new_class
class ModelResource(Resource):
"""
A subclass of ``Resource`` designed to work with Django's ``Models``.
This class will introspect a given ``Model`` and build a field list based
on the fields found on the model (excluding relational fields).
Given that it is aware of Django's ORM, it also handles the CRUD data
operations of the resource.
"""
__metaclass__ = ModelDeclarativeMetaclass
@classmethod
def should_skip_field(cls, field):
"""
Given a Django model field, return if it should be included in the
contributed ApiFields.
"""
# Ignore certain fields (related fields).
if getattr(field, 'rel'):
return True
return False
@classmethod
def api_field_from_django_field(cls, f, default=CharField):
"""
Returns the field type that would likely be associated with each
Django type.
"""
result = default
if f.get_internal_type() in ('DateField', 'DateTimeField'):
result = DateTimeField
elif f.get_internal_type() in ('BooleanField', 'NullBooleanField'):
result = BooleanField
elif f.get_internal_type() in ('DecimalField', 'FloatField'):
result = FloatField
elif f.get_internal_type() in ('IntegerField', 'PositiveIntegerField', 'PositiveSmallIntegerField', 'SmallIntegerField'):
result = IntegerField
elif f.get_internal_type() in ('FileField', 'ImageField'):
result = FileField
# TODO: Perhaps enable these via introspection. The reason they're not enabled
# by default is the very different ``__init__`` they have over
# the other fields.
# elif f.get_internal_type() == 'ForeignKey':
# result = ForeignKey
# elif f.get_internal_type() == 'ManyToManyField':
# result = ManyToManyField
return result
@classmethod
def get_fields(cls, fields=None, excludes=None):
"""
Given any explicit fields to include and fields to exclude, add
additional fields based on the associated model.
"""
final_fields = {}
fields = fields or []
excludes = excludes or []
if not cls._meta.object_class:
return final_fields
for f in cls._meta.object_class._meta.fields:
# If the field name is already present, skip
if f.name in cls.base_fields:
continue
# If field is not present in explicit field listing, skip
if fields and f.name not in fields:
continue
# If field is in exclude list, skip
if excludes and f.name in excludes:
continue
if cls.should_skip_field(f):
continue
api_field_class = cls.api_field_from_django_field(f)
kwargs = {
'attribute': f.name,
}
if f.null is True:
kwargs['null'] = True
kwargs['unique'] = f.unique
if not f.null and f.blank is True:
kwargs['default'] = ''
if f.get_internal_type() == 'TextField':
kwargs['default'] = ''
if f.has_default():
kwargs['default'] = f.default
final_fields[f.name] = api_field_class(**kwargs)
final_fields[f.name].instance_name = f.name
return final_fields
def build_filters(self, filters=None):
"""
Given a dictionary of filters, create the necessary ORM-level filters.
Keys should be resource fields, **NOT** model fields.
Valid values are either a list of Django filter types (i.e.
``['startswith', 'exact', 'lte']``), the ``ALL`` constant or the
``ALL_WITH_RELATIONS`` constant.
"""
# At the declarative level:
# filtering = {
# 'resource_field_name': ['exact', 'startswith', 'endswith', 'contains'],
# 'resource_field_name_2': ['exact', 'gt', 'gte', 'lt', 'lte', 'range'],
# 'resource_field_name_3': ALL,
# 'resource_field_name_4': ALL_WITH_RELATIONS,
# ...
# }
# Accepts the filters as a dict. None by default, meaning no filters.
if filters is None:
filters = {}
qs_filters = {}
for filter_expr, value in filters.items():
filter_bits = filter_expr.split(LOOKUP_SEP)
if not filter_bits[0] in self.fields:
# It's not a field we know about. Move along citizen.
continue
if not filter_bits[0] in self._meta.filtering:
raise InvalidFilterError("The '%s' field does not allow filtering." % filter_bits[0])
if filter_bits[-1] in QUERY_TERMS.keys():
filter_type = filter_bits.pop()
else:
filter_type = 'exact'
# Check to see if it's allowed lookup type.
if not self._meta.filtering[filter_bits[0]] in (ALL, ALL_WITH_RELATIONS):
# Must be an explicit whitelist.
if not filter_type in self._meta.filtering[filter_bits[0]]:
raise InvalidFilterError("'%s' is not an allowed filter on the '%s' field." % (filter_expr, filter_bits[0]))
# Check to see if it's a relational lookup and if that's allowed.
if len(filter_bits) > 1:
if not self._meta.filtering[filter_bits[0]] == ALL_WITH_RELATIONS:
raise InvalidFilterError("Lookups are not allowed more than one level deep on the '%s' field." % filter_bits[0])
if self.fields[filter_bits[0]].attribute is None:
raise InvalidFilterError("The '%s' field has no 'attribute' for searching with." % filter_bits[0])
if value in ['true', 'True', True]:
value = True
elif value in ['false', 'False', False]:
value = False
elif value in ('nil', 'none', 'None', None):
value = None
db_field_name = LOOKUP_SEP.join([self.fields[filter_bits[0]].attribute] + filter_bits[1:])
qs_filter = "%s%s%s" % (db_field_name, LOOKUP_SEP, filter_type)
qs_filters[qs_filter] = value
return dict_strip_unicode_keys(qs_filters)
def apply_sorting(self, obj_list, options=None):
"""
Given a dictionary of options, apply some ORM-level sorting to the
provided ``QuerySet``.
Looks for the ``sort_by`` key and handles either ascending (just the
field name) or descending (the field name with a ``-`` in front).
The field name should be the resource field, **NOT** model field.
"""
if options is None:
options = {}
if not 'sort_by' in options:
# Nothing to alter the sort order. Return what we've got.
return obj_list
order_by_args = []
if hasattr(options, 'getlist'):
sort_bits = options.getlist('sort_by')
else:
sort_bits = options.get('sort_by')
if not isinstance(sort_bits, (list, tuple)):
sort_bits = [sort_bits]
for sort_by in sort_bits:
sort_by_bits = sort_by.split(LOOKUP_SEP)
field_name = sort_by_bits[0]
order = ''
if sort_by_bits[0].startswith('-'):
field_name = sort_by_bits[0][1:]
order = '-'
if not field_name in self.fields:
# It's not a field we know about. Move along citizen.
raise InvalidSortError("No matching '%s' field for ordering on." % field_name)
if not field_name in self._meta.ordering:
raise InvalidSortError("The '%s' field does not allow ordering." % field_name)
if self.fields[field_name].attribute is None:
raise InvalidSortError("The '%s' field has no 'attribute' for ordering with." % field_name)
order_by_args.append("%s%s" % (order, LOOKUP_SEP.join([self.fields[field_name].attribute] + sort_by_bits[1:])))
return obj_list.order_by(*order_by_args)
def get_object_list(self, request):
"""
An ORM-specific implementation of ``get_object_list``.
Returns a queryset that may have been limited by authorization or other
overrides.
"""
base_object_list = self._meta.queryset
# Limit it as needed.
authed_object_list = self.apply_authorization_limits(request, base_object_list)
return authed_object_list
def obj_get_list(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_get_list``.
Takes an optional ``request`` object, whose ``GET`` dictionary can be
used to narrow the query.
"""
filters = None
if hasattr(request, 'GET'):
filters = request.GET
applicable_filters = self.build_filters(filters=filters)
try:
return self.get_object_list(request).filter(**applicable_filters)
except ValueError, e:
raise NotFound("Invalid resource lookup data provided (mismatched type).")
def obj_get(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_get``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
try:
return self.get_object_list(request).get(**kwargs)
except ValueError, e:
raise NotFound("Invalid resource lookup data provided (mismatched type).")
def obj_create(self, bundle, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_create``.
"""
bundle.obj = self._meta.object_class()
for key, value in kwargs.items():
setattr(bundle.obj, key, value)
bundle = self.full_hydrate(bundle)
bundle.obj.save()
# Now pick up the M2M bits.
m2m_bundle = self.hydrate_m2m(bundle)
self.save_m2m(m2m_bundle)
return bundle
def obj_update(self, bundle, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_update``.
"""
if not bundle.obj or not bundle.obj.pk:
# Attempt to hydrate data from kwargs before doing a lookup for the object.
# This step is needed so certain values (like datetime) will pass model validation.
try:
bundle.obj = self.get_object_list(request).model()
bundle.data.update(kwargs)
bundle = self.full_hydrate(bundle)
lookup_kwargs = kwargs.copy()
lookup_kwargs.update(dict(
(k, getattr(bundle.obj, k))
for k in kwargs.keys()
if getattr(bundle.obj, k) is not None))
except:
# if there is trouble hydrating the data, fall back to just
# using kwargs by itself (usually it only contains a "pk" key
# and this will work fine.
lookup_kwargs = kwargs
try:
bundle.obj = self.get_object_list(request).get(**lookup_kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
bundle = self.full_hydrate(bundle)
bundle.obj.save()
# Now pick up the M2M bits.
m2m_bundle = self.hydrate_m2m(bundle)
self.save_m2m(m2m_bundle)
return bundle
def obj_delete_list(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_delete_list``.
Takes optional ``kwargs``, which can be used to narrow the query.
"""
self.get_object_list(request).filter(**kwargs).delete()
def obj_delete(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_delete``.
Takes optional ``kwargs``, which are used to narrow the query to find
the instance.
"""
try:
obj = self.get_object_list(request).get(**kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.")
obj.delete()
def rollback(self, bundles):
"""
A ORM-specific implementation of ``rollback``.
Given the list of bundles, delete all models pertaining to those
bundles.
"""
for bundle in bundles:
if bundle.obj and getattr(bundle.obj, 'pk', None):
bundle.obj.delete()
def save_m2m(self, bundle):
"""
Handles the saving of related M2M data.
Due to the way Django works, the M2M data must be handled after the
main instance, which is why this isn't a part of the main ``save`` bits.
Currently slightly inefficient in that it will clear out the whole
relation and recreate the related data as needed.
"""
for field_name, field_object in self.fields.items():
if not getattr(field_object, 'is_m2m', False):
continue
if not field_object.attribute:
continue
# Get the manager.
related_mngr = getattr(bundle.obj, field_object.attribute)
if hasattr(related_mngr, 'clear'):
# Clear it out, just to be safe.
related_mngr.clear()
related_objs = []
for related_bundle in bundle.data[field_name]:
related_bundle.obj.save()
related_objs.append(related_bundle.obj)
related_mngr.add(*related_objs)
def get_resource_uri(self, bundle_or_obj):
"""
Handles generating a resource URI for a single resource.
Uses the model's ``pk`` in order to create the URI.
"""
kwargs = {
'resource_name': self._meta.resource_name,
}
if isinstance(bundle_or_obj, Bundle):
kwargs['pk'] = bundle_or_obj.obj.pk
else:
kwargs['pk'] = bundle_or_obj.id
if self._meta.api_name is not None:
kwargs['api_name'] = self._meta.api_name
return self._build_reverse_url("api_dispatch_detail", kwargs=kwargs)
class NamespacedModelResource(ModelResource):
"""
A ModelResource subclass that respects Django namespaces.
"""
def _build_reverse_url(self, name, args=None, kwargs=None):
namespaced = "%s:%s" % (self._meta.urlconf_namespace, name)
return reverse(namespaced, args=args, kwargs=kwargs)
# Based off of ``piston.utils.coerce_put_post``. Similarly BSD-licensed.
# And no, the irony is not lost on me.
def convert_post_to_put(request):
"""
Force Django to process the PUT.
"""
if request.method == "PUT":
if hasattr(request, '_post'):
del request._post
del request._files
try:
request.method = "POST"
request._load_post_and_files()
request.method = "PUT"
except AttributeError:
request.META['REQUEST_METHOD'] = 'POST'
request._load_post_and_files()
request.META['REQUEST_METHOD'] = 'PUT'
request.PUT = request.POST
return request<|fim▁end|> | |
<|file_name|>comp-1546.component.spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { Comp1546Component } from './comp-1546.component';
describe('Comp1546Component', () => {
let component: Comp1546Component;
let fixture: ComponentFixture<Comp1546Component>;
beforeEach(async(() => {
TestBed.configureTestingModule({<|fim▁hole|> }));
beforeEach(() => {
fixture = TestBed.createComponent(Comp1546Component);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | declarations: [ Comp1546Component ]
})
.compileComponents(); |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>#!/bin/false
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <[email protected]>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# Reads board information from boards/BOARDNAME.py - used by build_board_docs,
# build_pininfo, and build_platform_config
# ----------------------------------------------------------------------------------------
import subprocess;
import re;
import json;
import sys;
import os;
import importlib;
silent = os.getenv("SILENT");
if silent:
class Discarder(object):
def write(self, text):
pass # do nothing
# now discard everything coming out of stdout
sys.stdout = Discarder()
# http://stackoverflow.com/questions/4814970/subprocess-check-output-doesnt-seem-to-exist-python-2-6-5
if "check_output" not in dir( subprocess ):
def f(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd)
return output
subprocess.check_output = f
# Scans files for comments of the form /*JSON......*/
#
# Comments look like:
#
#/*JSON{ "type":"staticmethod|staticproperty|constructor|method|property|function|variable|class|library|idle|init|kill",
# // class = built-in class that does not require instantiation
# // library = built-in class that needs require('classname')
# // idle = function to run on idle regardless
# // init = function to run on initialisation
# // kill = function to run on deinitialisation
# "class" : "Double", "name" : "doubleToIntBits",
# "needs_parentName":true, // optional - if for a method, this makes the first 2 args parent+parentName (not just parent)
# "generate_full|generate|wrap" : "*(JsVarInt*)&x",
# "description" : " Convert the floating point value given into an integer representing the bits contained in it",
# "params" : [ [ "x" , "float|int|int32|bool|pin|JsVar|JsVarName|JsVarArray", "A floating point number"] ],
# // float - parses into a JsVarFloat which is passed to the function
# // int - parses into a JsVarInt which is passed to the function
# // int32 - parses into a 32 bit int
# // bool - parses into a boolean
# // pin - parses into a pin
# // JsVar - passes a JsVar* to the function (after skipping names)
# // JsVarArray - parses this AND ANY SUBSEQUENT ARGUMENTS into a JsVar of type JSV_ARRAY. THIS IS ALWAYS DEFINED, EVEN IF ZERO LENGTH. Currently it must be the only parameter
# "return" : ["int|float|JsVar", "The integer representation of x"],
# "return_object" : "ObjectName", // optional - used for tern's code analysis - so for example we can do hints for openFile(...).yyy
# "no_create_links":1 // optional - if this is set then hyperlinks are not created when this name is mentioned (good example = bit() )
# "not_real_object" : "anything", // optional - for classes, this means we shouldn't treat this as a built-in object, as internally it isn't stored in a JSV_OBJECT
# "prototype" : "Object", // optional - for classes, this is what their prototype is. It's particlarly helpful if not_real_object, because there is no prototype var in that case
# "check" : "jsvIsFoo(var)", // for classes - this is code that returns true if 'var' is of the given type
# "ifndef" : "SAVE_ON_FLASH", // if the given preprocessor macro is defined, don't implement this
# "ifdef" : "USE_LCD_FOO", // if the given preprocessor macro isn't defined, don't implement this
# "#if" : "A>2", // add a #if statement in the generated C file (ONLY if type==object)
#}*/
#
# description can be an array of strings as well as a simple string (in which case each element is separated by a newline),
# and adding ```sometext``` in the description surrounds it with HTML code tags
#
def get_jsondata(is_for_document, parseArgs = True, board = False):
scriptdir = os.path.dirname (os.path.realpath(__file__))
print("Script location "+scriptdir)
os.chdir(scriptdir+"/..")
jswraps = []
defines = []
if board and ("build" in board.info) and ("defines" in board.info["build"]):
for i in board.info["build"]["defines"]:
print("Got define from board: " + i);
defines.append(i)
if parseArgs and len(sys.argv)>1:
print("Using files from command line")
for i in range(1,len(sys.argv)):
arg = sys.argv[i]
if arg[0]=="-":
if arg[1]=="D":
defines.append(arg[2:])
elif arg[1]=="B":
board = importlib.import_module(arg[2:])
if "usart" in board.chip: defines.append("USART_COUNT="+str(board.chip["usart"]));
if "spi" in board.chip: defines.append("SPI_COUNT="+str(board.chip["spi"]));
if "i2c" in board.chip: defines.append("I2C_COUNT="+str(board.chip["i2c"]));
if "USB" in board.devices: defines.append("defined(USB)=True");
else: defines.append("defined(USB)=False");
elif arg[1]=="F":
"" # -Fxxx.yy in args is filename xxx.yy, which is mandatory for build_jswrapper.py
else:
print("Unknown command-line option")
exit(1)
else:
jswraps.append(arg)
else:
print("Scanning for jswrap.c files")
jswraps = subprocess.check_output(["find", ".", "-name", "jswrap*.c"]).strip().split("\n")
if len(defines)>1:
print("Got #DEFINES:")
for d in defines: print(" "+d)
jsondatas = []
for jswrap in jswraps:
# ignore anything from archives
if jswrap.startswith("./archives/"): continue
# now scan
print("Scanning "+jswrap)
code = open(jswrap, "r").read()
if is_for_document and "DO_NOT_INCLUDE_IN_DOCS" in code:
print("FOUND 'DO_NOT_INCLUDE_IN_DOCS' IN FILE "+jswrap)
continue
for comment in re.findall(r"/\*JSON.*?\*/", code, re.VERBOSE | re.MULTILINE | re.DOTALL):
charnumber = code.find(comment)
linenumber = 1+code.count("\n", 0, charnumber)
# Strip off /*JSON .. */ bit
comment = comment[6:-2]
endOfJson = comment.find("\n}")+2;
jsonstring = comment[0:endOfJson];
description = comment[endOfJson:].strip();
# print("Parsing "+jsonstring)
try:
jsondata = json.loads(jsonstring)
if len(description): jsondata["description"] = description;
jsondata["filename"] = jswrap
jsondata["include"] = jswrap[:-2]+".h"
jsondata["githublink"] = "https://github.com/espruino/Espruino/blob/master/"+jswrap+"#L"+str(linenumber)
dropped_prefix = "Dropped "
if "name" in jsondata: dropped_prefix += jsondata["name"]+" "
elif "class" in jsondata: dropped_prefix += jsondata["class"]+" "
drop = False
if not is_for_document:
if ("ifndef" in jsondata) and (jsondata["ifndef"] in defines):
print(dropped_prefix+" because of #ifndef "+jsondata["ifndef"])
drop = True
if ("ifdef" in jsondata) and not (jsondata["ifdef"] in defines):
print(dropped_prefix+" because of #ifdef "+jsondata["ifdef"])
drop = True
if ("#if" in jsondata):
expr = jsondata["#if"]
for defn in defines:
if defn.find('=')!=-1:
dname = defn[:defn.find('=')]
dkey = defn[defn.find('=')+1:]
expr = expr.replace(dname, dkey);
try:
r = eval(expr)
except:
print("WARNING: error evaluating '"+expr+"' - from '"+jsondata["#if"]+"'")
r = True
if not r:
print(dropped_prefix+" because of #if "+jsondata["#if"]+ " -> "+expr)
drop = True
if not drop:
jsondatas.append(jsondata)
except ValueError as e:
sys.stderr.write( "JSON PARSE FAILED for " + jsonstring + " - "+ str(e) + "\n")
exit(1)
except:
sys.stderr.write( "JSON PARSE FAILED for " + jsonstring + " - "+str(sys.exc_info()[0]) + "\n" )
exit(1)
print("Scanning finished.")
return jsondatas
# Takes the data from get_jsondata and restructures it in prepartion for output as JS
#
# Results look like:,
#{
# "Pin": {
# "desc": [
# "This is the built-in class for Pins, such as D0,D1,LED1, or BTN",
# "You can call the methods on Pin, or you can use Wiring-style functions such as digitalWrite"
# ],
# "methods": {
# "read": {
# "desc": "Returns the input state of the pin as a boolean",
# "params": [],
# "return": [
# "bool",
# "Whether pin is a logical 1 or 0"
# ]
# },
# "reset": {
# "desc": "Sets the output state of the pin to a 0",
# "params": [],
# "return": []
# },
# ...
# },
# "props": {},
# "staticmethods": {},
# "staticprops": {}
# },
# "print": {
# "desc": "Print the supplied string",
# "return": []
# },
# ...
#}
#
def get_struct_from_jsondata(jsondata):
context = {"modules": {}}
def checkClass(details):
cl = details["class"]
if not cl in context:
context[cl] = {"type": "class", "methods": {}, "props": {}, "staticmethods": {}, "staticprops": {}, "desc": details.get("description", "")}
return cl
def addConstructor(details):
cl = checkClass(details)
context[cl]["constructor"] = {"params": details.get("params", []), "return": details.get("return", []), "desc": details.get("description", "")}
def addMethod(details, type = ""):
cl = checkClass(details)
context[cl][type + "methods"][details["name"]] = {"params": details.get("params", []), "return": details.get("return", []), "desc": details.get("description", "")}
def addProp(details, type = ""):
cl = checkClass(details)
context[cl][type + "props"][details["name"]] = {"return": details.get("return", []), "desc": details.get("description", "")}
def addFunc(details):
context[details["name"]] = {"type": "function", "return": details.get("return", []), "desc": details.get("description", "")}
def addObj(details):
context[details["name"]] = {"type": "object", "instanceof": details.get("instanceof", ""), "desc": details.get("description", "")}
def addLib(details):
context["modules"][details["class"]] = {"desc": details.get("description", "")}
def addVar(details):
return
for data in jsondata:
type = data["type"]
if type=="class":
checkClass(data)
elif type=="constructor":
addConstructor(data)
elif type=="method":
addMethod(data)
elif type=="property":
addProp(data)
elif type=="staticmethod":
addMethod(data, "static")
elif type=="staticproperty":
addProp(data, "static")
elif type=="function":
addFunc(data)
elif type=="object":
addObj(data)
elif type=="library":
addLib(data)
elif type=="variable":
addVar(data)
else:
print(json.dumps(data, sort_keys=True, indent=2))
return context
def get_includes_from_jsondata(jsondatas):
includes = []
for jsondata in jsondatas:<|fim▁hole|> if not include in includes:
includes.append(include)
return includes
def is_property(jsondata):
return jsondata["type"]=="property" or jsondata["type"]=="staticproperty" or jsondata["type"]=="variable"
def is_function(jsondata):
return jsondata["type"]=="function" or jsondata["type"]=="method"
def get_prefix_name(jsondata):
if jsondata["type"]=="event": return "event"
if jsondata["type"]=="constructor": return "constructor"
if jsondata["type"]=="function": return "function"
if jsondata["type"]=="method": return "function"
if jsondata["type"]=="variable": return "variable"
if jsondata["type"]=="property": return "property"
return ""
def get_ifdef_description(d):
if d=="SAVE_ON_FLASH": return "devices with low flash memory"
if d=="STM32F1": return "STM32F1 devices (including Espruino Board)"
if d=="USE_LCD_SDL": return "Linux with SDL support compiled in"
if d=="USE_TLS": return "devices with TLS and SSL support (Espruino Pico only)"
if d=="RELEASE": return "release builds"
if d=="LINUX": return "Linux-based builds"
if d=="USE_USB_HID": return "devices that support USB HID (Espruino Pico)"
if d=="USE_AES": return "devices that support AES (Espruino Pico, Espruino Wifi or Linux)"
if d=="USE_CRYPTO": return "devices that support Crypto Functionality (Espruino Pico, Espruino Wifi, Linux or ESP8266)"
print("WARNING: Unknown ifdef '"+d+"' in common.get_ifdef_description")
return d
def get_script_dir():
return os.path.dirname(os.path.realpath(__file__))
def get_version():
# Warning: the same release label derivation is also in the Makefile
scriptdir = get_script_dir()
jsutils = scriptdir+"/../src/jsutils.h"
version = re.compile("^.*JS_VERSION.*\"(.*)\"");
alt_release = os.getenv("ALT_RELEASE")
if alt_release == None:
# Default release labeling based on commits since last release tag
latest_release = subprocess.check_output('git tag 2>nul | grep RELEASE_ | sort | tail -1', shell=True).strip()
commits_since_release = subprocess.check_output('git log --oneline 2>nul '+latest_release.decode("utf-8")+'..HEAD | wc -l', shell=True).decode("utf-8").strip()
else:
# Alternate release labeling with fork name (in ALT_RELEASE env var) plus branch
# name plus commit SHA
sha = subprocess.check_output('git rev-parse --short HEAD 2>nul', shell=True).strip()
branch = subprocess.check_output('git name-rev --name-only HEAD 2>nul', shell=True).strip()
commits_since_release = alt_release + '_' + branch + '_' + sha
for line in open(jsutils):
match = version.search(line);
if (match != None):
v = match.group(1);
if commits_since_release=="0": return v
else: return v+"."+commits_since_release
return "UNKNOWN"
def get_name_or_space(jsondata):
if "name" in jsondata: return jsondata["name"]
return ""
def get_bootloader_size(board):
if board.chip["family"]=="STM32F4": return 16*1024; # 16kb Pages, so we have no choice
return 10*1024;
# On normal chips this is 0x00000000
# On boards with bootloaders it's generally + 10240
# On F401, because of the setup of pages we put the bootloader in the first 16k, then in the 16+16+16 we put the saved code, and then finally we but the binary somewhere else
def get_espruino_binary_address(board):
if "place_text_section" in board.chip:
return board.chip["place_text_section"]
if "bootloader" in board.info and board.info["bootloader"]==1:
return get_bootloader_size(board);
return 0;
def get_board_binary_name(board):
return board.info["binary_name"].replace("%v", get_version());<|fim▁end|> | include = jsondata["include"] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Support for Waterfurnaces."""
from datetime import timedelta
import logging
import threading
import time
import voluptuous as vol
from waterfurnace.waterfurnace import WaterFurnace, WFCredentialError, WFException
from homeassistant.components import persistent_notification
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
EVENT_HOMEASSISTANT_STOP,
Platform,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, discovery
from homeassistant.helpers.typing import ConfigType
_LOGGER = logging.getLogger(__name__)
DOMAIN = "waterfurnace"
UPDATE_TOPIC = f"{DOMAIN}_update"
SCAN_INTERVAL = timedelta(seconds=10)
ERROR_INTERVAL = timedelta(seconds=300)
MAX_FAILS = 10
NOTIFICATION_ID = "waterfurnace_website_notification"
NOTIFICATION_TITLE = "WaterFurnace website status"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass: HomeAssistant, base_config: ConfigType) -> bool:
"""Set up waterfurnace platform."""
config = base_config[DOMAIN]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
wfconn = WaterFurnace(username, password)
# NOTE(sdague): login will throw an exception if this doesn't
# work, which will abort the setup.
try:
wfconn.login()
except WFCredentialError:
_LOGGER.error("Invalid credentials for waterfurnace login")<|fim▁hole|>
discovery.load_platform(hass, Platform.SENSOR, DOMAIN, {}, config)
return True
class WaterFurnaceData(threading.Thread):
"""WaterFurnace Data collector.
This is implemented as a dedicated thread polling a websocket in a
tight loop. The websocket will shut itself from the server side if
a packet is not sent at least every 30 seconds. The reading is
cheap, the login is less cheap, so keeping this open and polling
on a very regular cadence is actually the least io intensive thing
to do.
"""
def __init__(self, hass, client):
"""Initialize the data object."""
super().__init__()
self.hass = hass
self.client = client
self.unit = self.client.gwid
self.data = None
self._shutdown = False
self._fails = 0
def _reconnect(self):
"""Reconnect on a failure."""
self._fails += 1
if self._fails > MAX_FAILS:
_LOGGER.error("Failed to refresh login credentials. Thread stopped")
persistent_notification.create(
self.hass,
"Error:<br/>Connection to waterfurnace website failed "
"the maximum number of times. Thread has stopped",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
self._shutdown = True
return
# sleep first before the reconnect attempt
_LOGGER.debug("Sleeping for fail # %s", self._fails)
time.sleep(self._fails * ERROR_INTERVAL.total_seconds())
try:
self.client.login()
self.data = self.client.read()
except WFException:
_LOGGER.exception("Failed to reconnect attempt %s", self._fails)
else:
_LOGGER.debug("Reconnected to furnace")
self._fails = 0
def run(self):
"""Thread run loop."""
@callback
def register():
"""Connect to hass for shutdown."""
def shutdown(event):
"""Shutdown the thread."""
_LOGGER.debug("Signaled to shutdown")
self._shutdown = True
self.join()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
self.hass.add_job(register)
# This does a tight loop in sending read calls to the
# websocket. That's a blocking call, which returns pretty
# quickly (1 second). It's important that we do this
# frequently though, because if we don't call the websocket at
# least every 30 seconds the server side closes the
# connection.
while True:
if self._shutdown:
_LOGGER.debug("Graceful shutdown")
return
try:
self.data = self.client.read()
except WFException:
# WFExceptions are things the WF library understands
# that pretty much can all be solved by logging in and
# back out again.
_LOGGER.exception("Failed to read data, attempting to recover")
self._reconnect()
else:
self.hass.helpers.dispatcher.dispatcher_send(UPDATE_TOPIC)
time.sleep(SCAN_INTERVAL.total_seconds())<|fim▁end|> | return False
hass.data[DOMAIN] = WaterFurnaceData(hass, wfconn)
hass.data[DOMAIN].start() |
<|file_name|>mpl_settings.py<|end_file_name|><|fim▁begin|>import logging
import matplotlib as mpl
from .tools import get_figure_size
_logger = logging.getLogger("mpl_settings")
orig_settings = {**mpl.rcParams}
latex_settings = {
# change this if using contex, xetex or lualatex
"pgf.texsystem": "pdflatex",
# use LaTeX to write all text
"text.usetex": True,
'font.family': 'lmodern',
# blank entries should cause plots to inherit fonts from the document
# "font.serif": [],
# "font.sans-serif": [],
# "font.monospace": [],
# "text.fontsize": 11,
"legend.fontsize": 9, # Make the legend/label fonts a little smaller
"xtick.labelsize": 9,
"ytick.labelsize": 9,
"figure.figsize": get_figure_size(1), # default fig size of 1\textwidth
"lines.linewidth": 0.5,
"axes.labelsize": 11, # LaTeX default is 10pt font.
"axes.linewidth": 0.5,
"axes.unicode_minus": False,
# subfig related
"figure.subplot.left": 0.1,<|fim▁hole|> "figure.subplot.top": 0.95,
# the amount of width reserved for blank space between subplots
"figure.subplot.wspace": 0.4,
# the amount of height reserved for white space between subplots
"figure.subplot.hspace": 0.4,
# Patches are graphical objects that fill 2D space, like polygons or circles
"patch.linewidth": 0.5,
}
def enable_latex():
_logger.info("LaTeX export enabled")
mpl.rcParams['text.latex.preamble'].append(r'\usepackage{lmodern}'),
mpl.rcParams['text.latex.preamble'].append(r'\usepackage{siunitx}'),
mpl.rcParams.update(latex_settings)
def disable_latex():
_logger.info("LaTeX export disabled")
mpl.rcParams = orig_settings<|fim▁end|> | "figure.subplot.right": 0.95,
"figure.subplot.bottom": 0.125, |
<|file_name|>toDoController.js<|end_file_name|><|fim▁begin|>angular.module('myApp.toDoController', []).
controller('ToDoCtrl', ['$scope', '$state', '$http', '$route', function ($scope, $state, $http, $route) {
$scope.$state = $state;
$scope.addToDo = function() {
// Just in case...
if ($scope.toDoList.length > 50) {
alert("Exceeded to-do limit!!!");
return;
}
if (!$scope.newToDoName || !$scope.newToDoDesc) {
alert("Please fill out both fields!");
return;
}
var newToDo = {
'todo': $scope.newToDoName,
'description': $scope.newToDoDesc
};
$http.post('/to-dos/add-to-do', newToDo).
success(function(data, status, headers, config) {
}).
then(function(answer){
$scope.newToDoName = '';
$scope.newToDoDesc = '';
getToDos();
});
};
$scope.editToDoId = '';
$scope.editToDo = function(toDo) {
// Set the ID of the todo being edited
$scope.editToDoId = toDo._id;
// Reset the to do list in case we were editing other to dos
getToDos();
};
$scope.confirmEditToDo = function() {
<|fim▁hole|> // Get the data from the ToDo of interest
var toDoToEdit = '';
for (var i=0; i<$scope.toDoList.length; i++) {
if ($scope.toDoList[i]._id === $scope.editToDoId){
toDoToEdit = {
"todo" : $scope.toDoList[i].todo,
"description" : $scope.toDoList[i].description
};
break;
}
}
if (!toDoToEdit) {
alert("Could not get edited to-do!");
return;
} else if (!toDoToEdit.todo || !toDoToEdit.description) {
alert("Please fill out both fields!");
return;
}
$http.put('/to-dos/update-to-do/' + $scope.editToDoId, toDoToEdit).
success(function(data, status, headers, config) {
$scope.editToDoId = '';
}).
then(function(answer){
getToDos();
});
};
$scope.deleteToDo = function(toDo) {
var confirmation = confirm('Are you sure you want to delete?');
if (!confirmation){
return;
}
$http.delete('/to-dos/delete-to-do/' + toDo._id).
success(function(data, status, headers, config) {
}).
then(function(answer){
getToDos();
});
};
$scope.cancelEditToDo = function() {
$scope.editToDoId = '';
getToDos();
};
var getToDos = function() {
$http.get('/to-dos/to-dos').success(function(data, status, headers, config) {
$scope.toDoList = data;
});
};
// Execute these functions on page load
angular.element(document).ready(function () {
getToDos();
});
}]);<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>__all__ = ["cc"]<|fim▁end|> | # -*- coding: utf-8 -*-
|
<|file_name|>classie.js<|end_file_name|><|fim▁begin|>/*!
* classie - class helper functions
* from bonzo https://github.com/ded/bonzo
*
* classie.has( elem, 'my-class' ) -> true/false
* classie.add( elem, 'my-new-class' )
* classie.remove( elem, 'my-unwanted-class' )
*/
<|fim▁hole|>'use strict';
// class helper functions from bonzo https://github.com/ded/bonzo
function classReg( className ) {
return new RegExp("(^|\\s+)" + className + "(\\s+|$)");
}
// classList support for class management
// altho to be fair, the api sucks because it won't accept multiple classes at once
var hasClass, addClass, removeClass;
if ( 'classList' in document.documentElement ) {
hasClass = function( elem, c ) {
return elem.classList.contains( c );
};
addClass = function( elem, c ) {
elem.classList.add( c );
};
removeClass = function( elem, c ) {
elem.classList.remove( c );
};
}
else {
hasClass = function( elem, c ) {
return classReg( c ).test( elem.className );
};
addClass = function( elem, c ) {
if ( !hasClass( elem, c ) ) {
elem.className = elem.className + ' ' + c;
}
};
removeClass = function( elem, c ) {
elem.className = elem.className.replace( classReg( c ), ' ' );
};
}
window.classie = {
// full names
hasClass: hasClass,
addClass: addClass,
removeClass: removeClass,
// short names
has: hasClass,
add: addClass,
remove: removeClass
};
})( window );<|fim▁end|> | /*jshint browser: true, strict: true, undef: true */
( function( window ) {
|
<|file_name|>item.go<|end_file_name|><|fim▁begin|>package goque
import (
"bytes"
"encoding/binary"
"encoding/gob"
)
// Item represents an entry in either a stack or queue.
type Item struct {
ID uint64
Key []byte
Value []byte
}
// ToString returns the item value as a string.
func (i *Item) ToString() string {
return string(i.Value)
}
// ToObject decodes the item value into the given value type using
// encoding/gob.
//
// The value passed to this method should be a pointer to a variable
// of the type you wish to decode into. The variable pointed to will
// hold the decoded object.
func (i *Item) ToObject(value interface{}) error {
buffer := bytes.NewBuffer(i.Value)
dec := gob.NewDecoder(buffer)
return dec.Decode(value)
}<|fim▁hole|> ID uint64
Priority uint8
Key []byte
Value []byte
}
// ToString returns the priority item value as a string.
func (pi *PriorityItem) ToString() string {
return string(pi.Value)
}
// ToObject decodes the item value into the given value type using
// encoding/gob.
//
// The value passed to this method should be a pointer to a variable
// of the type you wish to decode into. The variable pointed to will
// hold the decoded object.
func (pi *PriorityItem) ToObject(value interface{}) error {
buffer := bytes.NewBuffer(pi.Value)
dec := gob.NewDecoder(buffer)
return dec.Decode(value)
}
// idToKey converts and returns the given ID to a key.
func idToKey(id uint64) []byte {
key := make([]byte, 8)
binary.BigEndian.PutUint64(key, id)
return key
}
// keyToID converts and returns the given key to an ID.
func keyToID(key []byte) uint64 {
return binary.BigEndian.Uint64(key)
}<|fim▁end|> |
// PriorityItem represents an entry in a priority queue.
type PriorityItem struct { |
<|file_name|>WindowVisualViewport.ts<|end_file_name|><|fim▁begin|>import { Fun, Optional } from '@ephox/katamari';
import { PlatformDetection } from '@ephox/sand';
import { fromRawEvent } from '../../impl/FilteredEvent';
import { EventHandler, EventUnbinder } from '../events/Types';
import { SugarElement } from '../node/SugarElement';
import * as Scroll from './Scroll';
export interface Bounds {
readonly x: number;
readonly y: number;
readonly width: number;
readonly height: number;
readonly right: number;
readonly bottom: number;
}
const get = (_win?: Window): Optional<VisualViewport> => {
const win = _win === undefined ? window : _win;
if (PlatformDetection.detect().browser.isFirefox()) {
// TINY-7984: Firefox 91 is returning incorrect values for visualViewport.pageTop, so disable it for now
return Optional.none();
} else {
return Optional.from(win.visualViewport);
}
};
const bounds = (x: number, y: number, width: number, height: number): Bounds => ({
x,
y,
width,
height,
right: x + width,
bottom: y + height
});
const getBounds = (_win?: Window): Bounds => {
const win = _win === undefined ? window : _win;
const doc = win.document;
const scroll = Scroll.get(SugarElement.fromDom(doc));
return get(win).fold(
() => {
const html = win.document.documentElement;
// Don't use window.innerWidth/innerHeight here, as we don't want to include scrollbars
// since the right/bottom position is based on the edge of the scrollbar not the window
const width = html.clientWidth;
const height = html.clientHeight;
return bounds(scroll.left, scroll.top, width, height);
},
(visualViewport) =>
// iOS doesn't update the pageTop/pageLeft when element.scrollIntoView() is called, so we need to fallback to the
// scroll position which will always be less than the page top/left values when page top/left are accurate/correct.
bounds(Math.max(visualViewport.pageLeft, scroll.left), Math.max(visualViewport.pageTop, scroll.top), visualViewport.width, visualViewport.height)
);
};
const bind = (name: string, callback: EventHandler, _win?: Window): EventUnbinder =>
get(_win).map((visualViewport) => {
const handler = (e: Event) => callback(fromRawEvent(e));<|fim▁hole|> return {
unbind: () => visualViewport.removeEventListener(name, handler)
};
}).getOrThunk(() => ({
unbind: Fun.noop
}));
export {
bind,
get,
getBounds
};<|fim▁end|> | visualViewport.addEventListener(name, handler);
|
<|file_name|>skill_queue.py<|end_file_name|><|fim▁begin|>import xml.etree.ElementTree as ElementTree
from model.dynamic.api import api
from model.dynamic.skills.skill_queue_item import SkillQueueItem
class SkillQueue(object):
def __init__(self, user_id, api_key, character_id):
api.fetch("char", "SkillQueue", user_id, api_key, character_id)
tree = ElementTree.parse("%s/SkillQueue.xml.aspx" % \
api.build_path("char", user_id, character_id))
root = tree.getroot()
rowset = root.find("result").find("rowset")
self.skill_queue = list()
if rowset.getchildren():
for element in rowset:
self.skill_queue.insert\
(int(element.get("queuePosition")),<|fim▁hole|> int(element.get("endSP")),
element.get("startTime"),
element.get("endTime")))<|fim▁end|> | SkillQueueItem(int(element.get("typeID")),
int(element.get("level")),
int(element.get("startSP")), |
<|file_name|>navigation_bars.py<|end_file_name|><|fim▁begin|>import basepage
class NavigationBars(basepage.BasePage):
def expand_project_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_admin_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-admin"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_identity_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-identity"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_developer_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-developer"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
"""
Project > Compute > Resource
"""
def expand_project_compute(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-compute"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_compute_overview(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/"]').click()
def click_project_compute_instance(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/instances/"]').click()
def click_project_compute_volumes(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/volumes/"]').click()
def click_project_compute_images(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/images/"]').click()
def click_project_compute_access_and_security(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/access_and_security/"]').click()
"""
Project > Network > Resource
"""
def expand_project_network(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-network"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_network_network_topology(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/network_topology/"]').click()
def click_project_network_networks(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/networks/"]').click()
def click_project_network_routers(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/routers/"]').click()
def click_project_network_loadbalancers(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/ngloadbalancersv2/"]').click()
"""
Project > Orchestration > Resource
"""
def expand_project_orchestration(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-orchestration"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_orchestration_stacks(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/"]').click()
def click_project_orchestration_resource_types(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/resource_types/"]').click()
def click_project_orchestration_template_versions(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/template_versions/"]').click()
"""
Project > Object Store > Resource
"""
def expand_project_object_store(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-object_store"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_object_store_containers(self):
NavigationBars.expand_project_object_store(self)
self.driver.find_element_by_css_selector(
'a[href="/project/containers/"]').click()
"""
Admin > System > Resource
"""
def expand_admin_system(self):
NavigationBars.expand_admin_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-admin-admin"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_admin_system_overview(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/"]').click()
def click_admin_system_hypervisors(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/hypervisors/"]').click()
def click_admin_system_host_aggregates(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/aggregates/"]').click()
def click_admin_system_instances(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/instances/"]').click()
def click_admin_system_volumes(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/volumes/"]').click()
def click_admin_system_flavors(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/flavors/"]').click()
def click_admin_system_images(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/images/"]').click()
def click_admin_system_networks(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/networks/"]').click()
def click_admin_system_routers(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/routers/"]').click()
def click_admin_system_floating_ips(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/floating_ips/"]').click()
def click_admin_system_defaults(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/defaults/"]').click()
def click_admin_system_metadata_definitions(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/metadata_defs/"]').click()
def click_admin_system_info(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/info/"]').click()
"""
Identity > Resource
"""
def click_identity_projects(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/"]').click()
def click_identity_users(self):<|fim▁hole|>
def click_identity_groups(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/groups/"]').click()
def click_identity_roles(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/roles/"]').click()<|fim▁end|> | NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/users/"]').click() |
<|file_name|>emailer.py<|end_file_name|><|fim▁begin|># encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import smtplib
import sys
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from mo_logs import Log
from mo_dots import listwrap
from mo_dots import coalesce
from mo_kwargs import override
<|fim▁hole|>
class Emailer:
@override
def __init__(
self,
from_address,
to_address,
host,
username,
password,
subject="catchy title",
port=465,
use_ssl=1,
kwargs=None
):
self.settings = kwargs
self.server = None
def __enter__(self):
if self.server is not None:
Log.error("Got a problem")
if self.settings.use_ssl:
self.server = smtplib.SMTP_SSL(self.settings.host, self.settings.port)
else:
self.server = smtplib.SMTP(self.settings.host, self.settings.port)
if self.settings.username and self.settings.password:
self.server.login(self.settings.username, self.settings.password)
return self
def __exit__(self, type, value, traceback):
try:
self.server.quit()
except Exception as e:
Log.warning("Problem with smtp server quit(), ignoring problem", e)
self.server = None
def send_email(self,
from_address=None,
to_address=None,
subject=None,
text_data=None,
html_data=None
):
"""Sends an email.
from_addr is an email address; to_addrs is a list of email adresses.
Addresses can be plain (e.g. "[email protected]") or with real names
(e.g. "John Smith <[email protected]>").
text_data and html_data are both strings. You can specify one or both.
If you specify both, the email will be sent as a MIME multipart
alternative, i.e., the recipient will see the HTML content if his
viewer supports it; otherwise he'll see the text content.
"""
settings = self.settings
from_address = coalesce(from_address, settings["from"], settings.from_address)
to_address = listwrap(coalesce(to_address, settings.to_address, settings.to_addrs))
if not from_address or not to_address:
raise Exception("Both from_addr and to_addrs must be specified")
if not text_data and not html_data:
raise Exception("Must specify either text_data or html_data")
if not html_data:
msg = MIMEText(text_data)
elif not text_data:
msg = MIMEText(html_data, 'html')
else:
msg = MIMEMultipart('alternative')
msg.attach(MIMEText(text_data, 'plain'))
msg.attach(MIMEText(html_data, 'html'))
msg['Subject'] = coalesce(subject, settings.subject)
msg['From'] = from_address
msg['To'] = ', '.join(to_address)
if self.server:
# CALL AS PART OF A SMTP SESSION
self.server.sendmail(from_address, to_address, msg.as_string())
else:
# CALL AS STAND-ALONE
with self:
self.server.sendmail(from_address, to_address, msg.as_string())
if sys.hexversion < 0x020603f0:
# versions earlier than 2.6.3 have a bug in smtplib when sending over SSL:
# http://bugs.python.org/issue4066
# Unfortunately the stock version of Python in Snow Leopard is 2.6.1, so
# we patch it here to avoid having to install an updated Python version.
import socket
import ssl
def _get_socket_fixed(self, host, port, timeout):
if self.debuglevel > 0:
print>> sys.stderr, 'connect:', (host, port)
new_socket = socket.create_connection((host, port), timeout)
new_socket = ssl.wrap_socket(new_socket, self.keyfile, self.certfile)
self.file = smtplib.SSLFakeFile(new_socket)
return new_socket
smtplib.SMTP_SSL._get_socket = _get_socket_fixed<|fim▁end|> | |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>/**
* Test program.
*
* Copyright (c) 2015 Alex Jin ([email protected])
*/
#include "test/test.h"
#include <iostream>
namespace {
void help() {
std::cout << "Usage: testalgo -a" << std::endl;
std::cout << " testalgo <test-case-name>" << std::endl;
std::cout << std::endl;
std::cout << "Options:" << std::endl;
std::cout << " -a Run all test cases" << std::endl;
std::cout << std::endl;
std::cout << "Test Case Names:" << std::endl;
const auto names = test_manager_t::instance().get_names();
for (auto it = names.begin(); it != names.end(); ++it) {
std::cout << " " << *it << std::endl;
}
std::cout << std::endl;
}
} // unnamed namespace
int main(int argc, char* argv[]) {<|fim▁hole|> help();
exit(1);
}
bool result = false;
std::string arg1 = argv[1];
if (arg1 == "-a") {
result = test_manager_t::instance().run();
}
else {
result = test_manager_t::instance().run(arg1);
}
return result ? 0 : 2;
}<|fim▁end|> |
if (argc != 2) { |
<|file_name|>elapsed-time.js<|end_file_name|><|fim▁begin|>$(document).ready(function () {
google.charts.load('current', { packages: ['corechart', 'bar'] });
google.charts.setOnLoadCallback(drawBuildSummary);
function drawBuildSummary() {
var elem = $('#elapsed_time_chart');
var data = [['Elapsed Time', 'Count']];
var categories = [];
//Categorize elapsed time based on their range<|fim▁hole|> var eTime = ["0", "0", "0", "0", "0", "0"];
var i = 0;
values.forEach(function (str, _, _) {
eTime[i] = str;
i = i + 1;
});
var digits = 1;
var lowerRange = '0 ~ '
for (var i in eTime) {
var upperRange = Math.pow(10, digits);
var strRange = lowerRange + upperRange + 's'
data.push([strRange, parseInt(eTime[i])]);
categories.push(strRange);
lowerRange = upperRange + ' ~ ';
digits = digits + 1;
}
var dataTable = google.visualization.arrayToDataTable(data);
var options = {
title: 'Elapsed Time',
curveType: 'function',
bar: { groupWidth: '75%' },
isStacked: true
};
var chart = new google.visualization.BarChart(elem.get(0));
chart.draw(dataTable, options);
google.visualization.events.addListener(chart, 'select', function () {
var selectedItem = chart.getSelection()[0];
if (selectedItem) {
var category = categories[selectedItem.row];
$('#category_form_kind').attr('value', category);
var form = $('#category_form').submit()
}
});
}
});<|fim▁end|> | var values = elem.attr('data-values').split(';'); |
<|file_name|>mainTest.py<|end_file_name|><|fim▁begin|>from netools import nextIpInPool, ping, aliveHost, hostsUnDone
def main():
aliveHosts = []
# pool IP
ipStart = "192.168.56.1"
ipEnd = "192.168.56.5"<|fim▁hole|> print"Scanning online Router on network..."
aliveHosts = aliveHost(ipStart, ipEnd)
print "online Router:"
print aliveHosts
# print"New Hosts Alive in Pools:",hostsUnDone(aliveHosts, aliveHost(ipStart,ipEnd))
if __name__ == '__main__':
main()<|fim▁end|> |
print"Pools: ", ipStart + " -> " + ipEnd
|
<|file_name|>analytics.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
The following examples are used to demonstrate how to get/record
analytics
The method signatures are:
Pushbots.get_analytics()
and
Pushbots.record_analytics(platform=None, data=None)
In which you must specify either platform or data.
"""
from pushbots import Pushbots
def example_get_analytics():
"""Get analytics by calling Pushbots.get_analytics()"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
code, message = pushbots.get_analytics()
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
<|fim▁hole|>def example_record_analytics1():
"""Record analytics by passing platform directly to
Pushbots.record_analytics()
"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
# Define platform
platform = Pushbots.PLATFORM_IOS
code, message = pushbots.record_analytics(platform=platform)
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))
def example_record_analytics2():
"""Record analytics by passing data defined by you to
Pushbots.record_analytics()
"""
# Define app_id and secret
my_app_id = 'my_app_id'
my_secret = 'my_secret'
# Create a Pushbots instance
pushbots = Pushbots(app_id=my_app_id, secret=my_secret)
# Define data
data = {'platform': '0'} # '0' is Equivalent to Pushbots.PLATFORM_IOS
code, message = pushbots.record_analytics(data=data)
print('Returned code: {0}'.format(code))
print('Returned message: {0}'.format(message))<|fim▁end|> | |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/**
* Created by wQ on 2017/3/28.
*/
export * from './navbar/index';
export * from './footer/index';<|fim▁hole|><|fim▁end|> | export * from './sidenav/index'; |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | pub fn call() {} |
<|file_name|>manager.py<|end_file_name|><|fim▁begin|>from sris import db, models
from messenger import Messenger
from service import SMSService
from datetime import datetime
class Manager:
"""
The middle-man of interaction between messenger and the SMS service.
"""
def __init__(self):
self.config = self.__load_config_file()
self.messenger = Messenger(self.config)
self.sms_service = SMSService()
def send_initial_greeting(self):
"""
Sends the initial SMS to new* patients at a pre-defined client time.
*New patients are those that have recently been added
to the clients database, which the service does not know.
Note: this is REQUIRED otherwise 'respond' & other services do not
function as database errors are thrown (understandably).
"""
from datetime import datetime
current_time = str(datetime.now().time())[0:5]
# Send the message to new patients at the defined time.
if current_time == self.config['initialQuestion']['time']:
for number in self.__new_patients():
message = self.messenger.initial_message()
self.sms_service.send(number, message)
self.__create_new_patient(number)
self.__save_message(number, message, 'sent')
def respond(self, patient_response):
"""
Respond to new SMS when it is received via a POST request.
Args:
patient_message (dict): Contains the number, and message sent to
the service by a patient.
Returns:
response (XML): twilio formatted response.
"""
number = patient_response['number']
patient_message = patient_response['message']
# Generate a reflective summary based on the patient's response.
summary = self.messenger.summary(patient_message)
# TODO: Fix this with the system set time (i.e. UTC)
midnight = int(datetime.today().strftime("%s")) - 24*60*60
# The number of questions sent since last night.
_questions = db.session.query(models.Message).filter(
models.Message.mobile == number,
models.Message.status == 'sent',
models.Message.timestamp >= midnight).all()
all_sent = [item.message for item in _questions]
# The number of OEQ sent since last night.
num_oeq = len([i for i in self.config['questions'] if i in all_sent])
print 'Number OEQ sent since last night was: %s' % str(num_oeq)
response = None
# Do not send a response if initial daily conversation not started.
if num_oeq >= 1:
print 'The last sms sent was: %s' % all_sent[-1]
if all_sent[-1] in self.config['questions']:
print 'Last message sent was an OEQ. Sending a RS to patient.'
response = summary
else:
print 'Inside the else..'
if (num_oeq >= int(self.config['limit'])): # True: OEQ >= LIMIT
print 'Inside the else... in the if...'
if self.config['endQuestion'] not in all_sent:<|fim▁hole|> print 'Sending the conversation closer as limit met.'
response = self.config['endQuestion']
else:
print 'Message received was response to a RS. Sending OEQ.'
response = self.__select_question(number)
if response:
self.__save_message(number, patient_message, 'received')
self.__save_message(number, response, 'sent')
print 'The response (%s) has been saved to the database.' % response
return self.sms_service.reply(response)
else:
print 'No response was created.'
return '' # Prevents a 500 error code returned to POST.
def send_initial_question_to_all(self):
"""
Sends a question to all patients at a pre-defined day and time.
"""
known_patients = [item.mobile for item in
db.session.query(models.Patient.mobile).all()]
from datetime import datetime
print "Checking to see if open-ended question should be sent."
isDay = datetime.now().strftime("%A") in self.config["daysToSend"]
isTime = str(datetime.now().time())[0:5] == self.config["sendTime"]
if isDay and isTime:
for number in known_patients:
message = self.__select_question(number)
print "OEQ (%s) to patient (%s)." % (message, number)
self.__save_message(number, message, 'sent')
self.sms_service.send(number, message)
def __select_question(self, number):
"""
Select a client-defined open-ended question that has not been previously
selected at random. If all have been sent then select one at random.
Args:
number (str): The mobile number of the patient.
Returns:
str: An open-ended question to ask the patient.
"""
questions = self.config['questions']
sent_questions = [item.message for item in db.session.query(
models.Message).filter(models.Message.mobile == number).all()]
unsent_questions = list(set(questions).difference(sent_questions))
# TODO: Select most important question based on client's situation
import random
if unsent_questions:
print "Sending a message that HAS NOT been previously sent."
message = random.choice(unsent_questions)
else:
print "Sending a message that HAS been previously sent."
message = random.choice(questions)
return message
def __load_config_file(self):
"""
Stores the contents of the client-defined config file to a json object.
Returns:
json: A json object of the user-defined config file.
"""
import json
from flask import current_app
config_file = current_app.config['PROJECT_ROOT'] + '/sris/config/' + \
current_app.config['CLIENT_NAME'] + '.json'
with open(config_file) as json_settings:
return json.load(json_settings)
def __new_patients(self):
"""
Checks to see if any new patients have been added to the client DB.
Returns:
list: Mobile numbers the client knows & the service does not.
"""
# ALL numbers obtained from the client.
client_numbers = db.session.query(models.Patient.mobile).all()
# The numbers the service has to date.
service_numbers = db.session.query(models.User.mobile).all()
# The numbers the client has, but the service does not.
numbers = set(client_numbers).difference(service_numbers)
print 'There was %s new patients' % str(len(numbers))
# Convert SQLAlchemy KeyedTuple to ordinary list.
return [item.mobile for item in numbers]
def __create_new_patient(self, number):
"""
Adds the patient to the service database.
Args:
number (str): The mobile number of the patient.
"""
db.session.add(models.User(mobile=number))
db.session.commit()
def __save_message(self, number, message, status):
"""
Save the SMS message (sent or received) to the service database.
Args:
number (str): The mobile number of the patient.
message (str): The SMS message content.
status (str): The status of the message, e.g. 'sent' or 'received'.
"""
db.session.add(models.Message(mobile=number, message=message,
status=status))
db.session.commit()<|fim▁end|> | |
<|file_name|>test_sourcepoint_interval.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import glob
import os
import sys
sys.path.insert(0, os.pardir)
from testing_harness import TestHarness
from openmc.statepoint import StatePoint
class SourcepointTestHarness(TestHarness):
def _test_output_created(self):
"""Make sure statepoint.* files have been created."""
statepoint = glob.glob(os.path.join(os.getcwd(), 'statepoint.*'))
assert len(statepoint) == 5, '5 statepoint files must exist.'
assert statepoint[0].endswith('h5'), \
'Statepoint file is not a HDF5 file.'
def _get_results(self):
"""Digest info in the statepoint and return as a string."""
# Read the statepoint file.
statepoint = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0]
sp = StatePoint(statepoint)<|fim▁hole|> # Add the source information.
xyz = sp.source[0]['xyz']
outstr += ' '.join(['{0:12.6E}'.format(x) for x in xyz])
outstr += "\n"
return outstr
if __name__ == '__main__':
harness = SourcepointTestHarness('statepoint.08.*')
harness.main()<|fim▁end|> |
# Get the eigenvalue information.
outstr = TestHarness._get_results(self)
|
<|file_name|>toolbutton.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! A ToolItem subclass that displays buttons
use std::ptr;
use gtk::{mod, ffi};
/// ToolButton — A ToolItem subclass that displays buttons
struct_Widget!(ToolButton)
impl ToolButton {
pub fn new<T: gtk::WidgetTrait>(icon_widget: Option<&T>, label: Option<&str>) -> Option<ToolButton> {
let tmp_pointer = unsafe {
match label {
Some(l) => {
l.with_c_str(|c_str| {
match icon_widget {
Some(i) => ffi::gtk_tool_button_new(i.get_widget(), c_str),
None => ffi::gtk_tool_button_new(ptr::null_mut(), c_str)
}
})
},
None => {
match icon_widget {
Some(i) => ffi::gtk_tool_button_new(i.get_widget(), ptr::null()),
None => ffi::gtk_tool_button_new(ptr::null_mut(), ptr::null())
}
}
}
};
check_pointer!(tmp_pointer, ToolButton)
}
pub fn new_from_stock(stock_id: &str) -> Option<ToolButton> {
let tmp_pointer = stock_id.with_c_str(|c_str| {
unsafe { ffi::gtk_tool_button_new_from_stock(c_str) }
});
check_pointer!(tmp_pointer, ToolButton)<|fim▁hole|>
impl_drop!(ToolButton)
impl_TraitWidget!(ToolButton)
impl gtk::ContainerTrait for ToolButton {}
impl gtk::BinTrait for ToolButton {}
impl gtk::ToolItemTrait for ToolButton {}
impl gtk::ToolButtonTrait for ToolButton {}
impl_widget_events!(ToolButton)
impl_button_events!(ToolButton)<|fim▁end|> | }
} |
<|file_name|>string_eval.py<|end_file_name|><|fim▁begin|>import sys
from pathlib import Path
from analysis.PluginBase import AnalysisBasePlugin
from plugins.mime_blacklists import MIME_BLACKLIST_COMPRESSED
try:
from ..internal.string_eval import eval_strings
except ImportError:
sys.path.append(str(Path(__file__).parent.parent / 'internal'))
from string_eval import eval_strings
class AnalysisPlugin(AnalysisBasePlugin):
'''<|fim▁hole|> Credits:
Original version by Paul Schiffer created during Firmware Bootcamp WT16/17 at University of Bonn
Refactored and improved by Fraunhofer FKIE
'''
NAME = 'string_evaluator'
DEPENDENCIES = ['printable_strings']
MIME_BLACKLIST = MIME_BLACKLIST_COMPRESSED
DESCRIPTION = 'Tries to sort strings based on usefulness'
VERSION = '0.2.1'
def __init__(self, plugin_administrator, config=None, recursive=True, timeout=300):
super().__init__(plugin_administrator, config=config, recursive=recursive, timeout=timeout, plugin_path=__file__)
def process_object(self, file_object):
list_of_printable_strings = file_object.processed_analysis['printable_strings']['strings']
file_object.processed_analysis[self.NAME] = dict(string_eval=eval_strings(list_of_printable_strings))
return file_object<|fim▁end|> | Sort strings by relevance
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
import setuptools
setuptools.setup(
name='vimdecrypt',
version='2.0',
author='Gertjan van Zwieten',<|fim▁hole|><|fim▁end|> | py_modules=['vimdecrypt'],
scripts=['vimdecrypt'],
test_suite='vimdecrypt',
) |
<|file_name|>backend.go<|end_file_name|><|fim▁begin|>package mysql
import (
"database/sql"
"fmt"
"strings"
"sync"
_ "github.com/go-sql-driver/mysql"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/logical/framework"
)
func Factory(conf *logical.BackendConfig) (logical.Backend, error) {
return Backend().Setup(conf)
}
func Backend() *backend {
var b backend
b.Backend = &framework.Backend{
Help: strings.TrimSpace(backendHelp),
Paths: []*framework.Path{
pathConfigConnection(&b),
pathConfigLease(&b),
pathListRoles(&b),
pathRoles(&b),
pathRoleCreate(&b),
},
Secrets: []*framework.Secret{
secretCreds(&b),
},
}
return &b
}
<|fim▁hole|>type backend struct {
*framework.Backend
db *sql.DB
lock sync.Mutex
}
// DB returns the database connection.
func (b *backend) DB(s logical.Storage) (*sql.DB, error) {
b.lock.Lock()
defer b.lock.Unlock()
// If we already have a DB, we got it!
if b.db != nil {
if err := b.db.Ping(); err == nil {
return b.db, nil
}
// If the ping was unsuccessful, close it and ignore errors as we'll be
// reestablishing anyways
b.db.Close()
}
// Otherwise, attempt to make connection
entry, err := s.Get("config/connection")
if err != nil {
return nil, err
}
if entry == nil {
return nil,
fmt.Errorf("configure the DB connection with config/connection first")
}
var connConfig connectionConfig
if err := entry.DecodeJSON(&connConfig); err != nil {
return nil, err
}
conn := connConfig.ConnectionURL
if len(conn) == 0 {
conn = connConfig.ConnectionString
}
b.db, err = sql.Open("mysql", conn)
if err != nil {
return nil, err
}
// Set some connection pool settings. We don't need much of this,
// since the request rate shouldn't be high.
b.db.SetMaxOpenConns(connConfig.MaxOpenConnections)
b.db.SetMaxIdleConns(connConfig.MaxIdleConnections)
return b.db, nil
}
// ResetDB forces a connection next time DB() is called.
func (b *backend) ResetDB() {
b.lock.Lock()
defer b.lock.Unlock()
if b.db != nil {
b.db.Close()
}
b.db = nil
}
// Lease returns the lease information
func (b *backend) Lease(s logical.Storage) (*configLease, error) {
entry, err := s.Get("config/lease")
if err != nil {
return nil, err
}
if entry == nil {
return nil, nil
}
var result configLease
if err := entry.DecodeJSON(&result); err != nil {
return nil, err
}
return &result, nil
}
const backendHelp = `
The MySQL backend dynamically generates database users.
After mounting this backend, configure it using the endpoints within
the "config/" path.
`<|fim▁end|> | |
<|file_name|>unix.rs<|end_file_name|><|fim▁begin|>use libloading::Library;
use std::fs::ReadDir;
use types::Identifier;
/// Grabs all `Library` entries found within a given directory
pub(crate) struct LibraryIterator {
directory: ReadDir,
}
impl LibraryIterator {
pub(crate) fn new(directory: ReadDir) -> LibraryIterator { LibraryIterator { directory } }
}
impl Iterator for LibraryIterator {
// The `Identifier` is the name of the namespace for which values may be pulled.
// The `Library` is a handle to dynamic library loaded into memory.
type Item = (Identifier, Library);
fn next(&mut self) -> Option<(Identifier, Library)> {
while let Some(entry) = self.directory.next() {
let entry = if let Ok(entry) = entry { entry } else { continue };
let path = entry.path();
// An entry is a library if it is a file with a 'so' extension.<|fim▁hole|> // The identifier will be the file name of that file, without the extension.
let identifier = match path.file_stem().unwrap().to_str() {
Some(filename) => Identifier::from(filename),
None => {
eprintln!("ion: namespace plugin has invalid filename");
continue;
}
};
// This will attempt to load the library into memory.
match Library::new(path.as_os_str()) {
Ok(library) => return Some((identifier, library)),
Err(why) => {
eprintln!("ion: failed to load library: {:?}, {:?}", path, why);
continue;
}
}
} else {
continue;
}
}
None
}
}<|fim▁end|> | if path.is_file() && path.extension().map_or(false, |ext| ext == "so") { |
<|file_name|>AbstractTextStreamFormatGenerator.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.dmdl.directio.text;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import org.apache.hadoop.io.compress.CompressionCodec;
import com.asakusafw.dmdl.directio.util.CharsetUtil;
import com.asakusafw.dmdl.directio.util.ClassName;
import com.asakusafw.dmdl.directio.util.Value;
import com.asakusafw.dmdl.java.emitter.EmitContext;
import com.asakusafw.dmdl.java.util.JavaName;
import com.asakusafw.dmdl.model.BasicTypeKind;
import com.asakusafw.dmdl.semantics.ModelDeclaration;
import com.asakusafw.dmdl.semantics.PropertyDeclaration;
import com.asakusafw.dmdl.semantics.type.BasicType;
import com.asakusafw.dmdl.util.AttributeUtil;
import com.asakusafw.runtime.io.text.TextFormat;
import com.asakusafw.runtime.io.text.TextInput;
import com.asakusafw.runtime.io.text.directio.AbstractTextStreamFormat;
import com.asakusafw.runtime.io.text.driver.FieldDefinition;
import com.asakusafw.runtime.io.text.driver.RecordDefinition;
import com.asakusafw.runtime.io.text.value.BooleanOptionFieldAdapter;<|fim▁hole|>import com.asakusafw.runtime.io.text.value.DoubleOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.FloatOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.IntOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.LongOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ShortOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.StringOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.ValueOptionFieldAdapter;
import com.asakusafw.runtime.io.util.InputSplitter;
import com.asakusafw.runtime.io.util.InputSplitters;
import com.asakusafw.runtime.value.StringOption;
import com.asakusafw.utils.java.model.syntax.ClassDeclaration;
import com.asakusafw.utils.java.model.syntax.Expression;
import com.asakusafw.utils.java.model.syntax.InfixOperator;
import com.asakusafw.utils.java.model.syntax.MethodDeclaration;
import com.asakusafw.utils.java.model.syntax.ModelFactory;
import com.asakusafw.utils.java.model.syntax.SimpleName;
import com.asakusafw.utils.java.model.syntax.Statement;
import com.asakusafw.utils.java.model.syntax.Type;
import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration;
import com.asakusafw.utils.java.model.util.AttributeBuilder;
import com.asakusafw.utils.java.model.util.ExpressionBuilder;
import com.asakusafw.utils.java.model.util.JavadocBuilder;
import com.asakusafw.utils.java.model.util.Models;
import com.asakusafw.utils.java.model.util.TypeBuilder;
/**
* Generates {@link AbstractTextStreamFormat}.
* @since 0.9.1
*/
public abstract class AbstractTextStreamFormatGenerator {
private static final Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> ADAPTER_TYPES;
static {
Map<BasicTypeKind, Class<? extends ValueOptionFieldAdapter<?>>> map = new EnumMap<>(BasicTypeKind.class);
map.put(BasicTypeKind.BYTE, ByteOptionFieldAdapter.class);
map.put(BasicTypeKind.SHORT, ShortOptionFieldAdapter.class);
map.put(BasicTypeKind.INT, IntOptionFieldAdapter.class);
map.put(BasicTypeKind.LONG, LongOptionFieldAdapter.class);
map.put(BasicTypeKind.FLOAT, FloatOptionFieldAdapter.class);
map.put(BasicTypeKind.DOUBLE, DoubleOptionFieldAdapter.class);
map.put(BasicTypeKind.DECIMAL, DecimalOptionFieldAdapter.class);
map.put(BasicTypeKind.TEXT, StringOptionFieldAdapter.class);
map.put(BasicTypeKind.BOOLEAN, BooleanOptionFieldAdapter.class);
map.put(BasicTypeKind.DATE, DateOptionFieldAdapter.class);
map.put(BasicTypeKind.DATETIME, DateTimeOptionFieldAdapter.class);
ADAPTER_TYPES = map;
}
/**
* The current context.
*/
protected final EmitContext context;
/**
* The target model.
*/
protected final ModelDeclaration model;
private final ModelFactory f;
private final TextFormatSettings formatSettings;
private final TextFieldSettings fieldDefaultSettings;
/**
* Creates a new instance.
* @param context the current context
* @param model the target model
* @param formatSettings the text format settings
* @param fieldDefaultSettings the field default settings
*/
public AbstractTextStreamFormatGenerator(
EmitContext context, ModelDeclaration model,
TextFormatSettings formatSettings, TextFieldSettings fieldDefaultSettings) {
this.context = context;
this.model = model;
this.formatSettings = formatSettings;
this.fieldDefaultSettings = fieldDefaultSettings;
this.f = context.getModelFactory();
}
/**
* Emits an implementation of {@link AbstractTextStreamFormat} class as a Java compilation unit.
* @param description the format description
* @throws IOException if I/O error was occurred while emitting the compilation unit
*/
protected void emit(String description) throws IOException {
ClassDeclaration decl = f.newClassDeclaration(
new JavadocBuilder(f)
.inline(Messages.getString("AbstractTextStreamFormatGenerator.javadocClassOverview"), //$NON-NLS-1$
d -> d.text(description),
d -> d.linkType(context.resolve(model.getSymbol())))
.toJavadoc(),
new AttributeBuilder(f)
.Public()
.toAttributes(),
context.getTypeName(),
f.newParameterizedType(
context.resolve(AbstractTextStreamFormat.class),
context.resolve(model.getSymbol())),
Collections.emptyList(),
createMembers());
context.emit(decl);
}
private List<? extends TypeBodyDeclaration> createMembers() {
List<TypeBodyDeclaration> results = new ArrayList<>();
results.add(createGetSupportedType());
results.add(createCreateTextFormat());
results.addAll(createCreateRecordDefinition());
createGetInputSplitter().ifPresent(results::add);
createGetCompressionCodecClass().ifPresent(results::add);
createAfterInput().ifPresent(results::add);
createBeforeOutput().ifPresent(results::add);
return results;
}
private MethodDeclaration createGetSupportedType() {
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
f.newParameterizedType(
context.resolve(Class.class),
context.resolve(model.getSymbol())),
f.newSimpleName("getSupportedType"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, context.resolve(model.getSymbol()))
.dotClass()
.toReturnStatement()));
}
private MethodDeclaration createCreateTextFormat() {
return f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Public()
.toAttributes(),
context.resolve(TextFormat.class),
f.newSimpleName("createTextFormat"), //$NON-NLS-1$
Collections.emptyList(),
createGetTextFormatInternal());
}
/**
* Returns a body of {@link AbstractTextStreamFormat#getTextFormat()}.
* @return the body statements
*/
protected abstract List<Statement> createGetTextFormatInternal();
private List<MethodDeclaration> createCreateRecordDefinition() {
SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
statements.add(new TypeBuilder(f, context.resolve(RecordDefinition.class))
.method("builder", f.newClassLiteral(context.resolve(model.getSymbol()))) //$NON-NLS-1$
.toLocalVariableDeclaration(
f.newParameterizedType(
context.resolve(RecordDefinition.Builder.class),
context.resolve(model.getSymbol())),
builder));
List<MethodDeclaration> fields = buildRecordDefinition(statements, builder);
statements.add(new ExpressionBuilder(f, builder)
.method("build") //$NON-NLS-1$
.toReturnStatement());
List<MethodDeclaration> results = new ArrayList<>();
results.add(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
f.newParameterizedType(
context.resolve(RecordDefinition.class),
context.resolve(model.getSymbol())),
f.newSimpleName("createRecordDefinition"), //$NON-NLS-1$
Collections.emptyList(),
statements));
results.addAll(fields);
return results;
}
private List<MethodDeclaration> buildRecordDefinition(List<Statement> statements, SimpleName builder) {
formatSettings.getHeaderType().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withHeaderType", resolve(v)) //$NON-NLS-1$
.toStatement()));
formatSettings.getLessInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnLessInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
formatSettings.getMoreInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMoreInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withTrimInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
fieldDefaultSettings.getUnmappableOutputAction().ifPresent(v -> statements.add(
new ExpressionBuilder(f, builder)
.method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$
.toStatement()));
List<MethodDeclaration> fields = new ArrayList<>();
for (PropertyDeclaration property : model.getDeclaredProperties()) {
if (TextFieldTrait.getKind(property) != TextFieldTrait.Kind.VALUE) {
continue;
}
MethodDeclaration method = createGetFieldDefinition(property);
fields.add(method);
statements.add(new ExpressionBuilder(f, builder)
.method("withField", //$NON-NLS-1$
new TypeBuilder(f, context.resolve(model.getSymbol()))
.methodReference(context.getOptionGetterName(property))
.toExpression(),
new ExpressionBuilder(f, f.newThis())
.method(method.getName())
.toExpression())
.toStatement());
}
return fields;
}
private MethodDeclaration createGetFieldDefinition(PropertyDeclaration property) {
SimpleName builder = f.newSimpleName("builder"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
statements.add(new TypeBuilder(f, context.resolve(FieldDefinition.class))
.method("builder", //$NON-NLS-1$
resolve(TextFieldTrait.getName(property)),
buildFieldAdapter(property))
.toLocalVariableDeclaration(
f.newParameterizedType(
context.resolve(FieldDefinition.Builder.class),
context.getFieldType(property)),
builder));
TextFieldSettings settings = TextFieldTrait.getSettings(property);
settings.getTrimInputWhitespaces().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withTrimInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getSkipEmptyInput().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withSkipEmptyInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getMalformedInputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnMalformedInput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getUnmappableOutputAction().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOnUnmappableOutput", resolve(v)) //$NON-NLS-1$
.toStatement()));
settings.getQuoteStyle().ifPresent(v -> statements.add(new ExpressionBuilder(f, builder)
.method("withOutputOption", resolve(v)) //$NON-NLS-1$
.toStatement()));
statements.add(new ExpressionBuilder(f, builder)
.method("build") //$NON-NLS-1$
.toReturnStatement());
JavaName name = JavaName.of(property.getName());
name.addFirst("get"); //$NON-NLS-1$
name.addLast("field"); //$NON-NLS-1$
name.addLast("definition"); //$NON-NLS-1$
return f.newMethodDeclaration(
new JavadocBuilder(f)
.inline(Messages.getString("AbstractTextStreamFormatGenerator.javafocGetFieldDefinitionOverview"), //$NON-NLS-1$
d -> d.linkMethod(
context.resolve(model.getSymbol()),
context.getOptionGetterName(property)))
.returns()
.text(Messages.getString("AbstractTextStreamFormatGenerator.javadocGetFieldDefinitionReturn")) //$NON-NLS-1$
.toJavadoc(),
new AttributeBuilder(f)
.Protected()
.toAttributes(),
f.newParameterizedType(
context.resolve(FieldDefinition.class),
context.getFieldType(property)),
f.newSimpleName(name.toMemberName()),
Collections.emptyList(),
statements);
}
private Expression buildFieldAdapter(PropertyDeclaration property) {
TextFieldSettings settings = TextFieldTrait.getSettings(property);
Value<ClassName> adapterClass = setting(settings, TextFieldSettings::getAdapterClass);
if (adapterClass.isPresent()) {
return new TypeBuilder(f, resolve(adapterClass.getEntity()))
.constructorReference()
.toExpression();
}
BasicTypeKind kind = ((BasicType) property.getType()).getKind();
Class<? extends ValueOptionFieldAdapter<?>> basicAdapterClass = ADAPTER_TYPES.get(kind);
assert basicAdapterClass != null;
ExpressionBuilder builder = new TypeBuilder(f, context.resolve(basicAdapterClass)).method("builder"); //$NON-NLS-1$
setting(settings, TextFieldSettings::getNullFormat).ifPresent(v -> builder
.method("withNullFormat", resolve(v))); //$NON-NLS-1$
switch (kind) {
case BOOLEAN:
setting(settings, TextFieldSettings::getTrueFormat).ifPresent(v -> builder
.method("withTrueFormat", resolve(v))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getFalseFormat).ifPresent(v -> builder
.method("withFalseFormat", resolve(v))); //$NON-NLS-1$
break;
case DATE:
setting(settings, TextFieldSettings::getDateFormat).ifPresent(v -> builder
.method("withDateFormat", resolve(v.toString()))); //$NON-NLS-1$
break;
case DATETIME:
setting(settings, TextFieldSettings::getDateTimeFormat).ifPresent(v -> builder
.method("withDateTimeFormat", resolve(v.toString()))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getTimeZone).ifPresent(v -> builder
.method("withTimeZone", resolve(v.getId()))); //$NON-NLS-1$
break;
case DECIMAL:
setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder
.method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$
setting(settings, TextFieldSettings::getDecimalOutputStyle).ifPresent(v -> builder
.method("withOutputStyle", resolve(v))); //$NON-NLS-1$
break;
case BYTE:
case INT:
case SHORT:
case LONG:
case FLOAT:
case DOUBLE:
setting(settings, TextFieldSettings::getNumberFormat).ifPresent(v -> builder
.method("withNumberFormat", resolve(v.toString()))); //$NON-NLS-1$
break;
case TEXT:
// no special members
break;
default:
throw new AssertionError(kind);
}
return builder.method("lazy").toExpression(); //$NON-NLS-1$
}
private <T> Value<T> setting(TextFieldSettings settings, Function<TextFieldSettings, Value<T>> getter) {
return getter.apply(settings).orDefault(getter.apply(fieldDefaultSettings));
}
private Optional<MethodDeclaration> createGetInputSplitter() {
if (isSplittable()) {
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
context.resolve(InputSplitter.class),
f.newSimpleName("getInputSplitter"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, context.resolve(InputSplitters.class))
.method("byLineFeed") //$NON-NLS-1$
.toReturnStatement())));
} else {
return Optional.empty();
}
}
private boolean isSplittable() {
if (formatSettings.getCharset().isPresent()) {
if (!CharsetUtil.isAsciiCompatible(formatSettings.getCharset().getEntity())) {
return false;
}
}
if (formatSettings.getCompressionType().isPresent()) {
return false;
}
if (model.getDeclaredProperties().stream()
.map(TextFieldTrait::getKind)
.anyMatch(Predicate.isEqual(TextFieldTrait.Kind.LINE_NUMBER)
.or(Predicate.isEqual(TextFieldTrait.Kind.RECORD_NUMBER)))) {
return false;
}
return isSplittableInternal();
}
/**
* Returns whether or not the input is splittable.
* @return {@code true} if it is splittable, otherwise {@code false}
*/
protected abstract boolean isSplittableInternal();
private Optional<MethodDeclaration> createGetCompressionCodecClass() {
if (formatSettings.getCompressionType().isPresent()) {
ClassName codec = formatSettings.getCompressionType().getEntity();
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
new TypeBuilder(f, context.resolve(Class.class))
.parameterize(f.newWildcardExtends(context.resolve(CompressionCodec.class)))
.toType(),
f.newSimpleName("getCompressionCodecClass"), //$NON-NLS-1$
Collections.emptyList(),
Arrays.asList(new TypeBuilder(f, resolve(codec))
.dotClass()
.toReturnStatement())));
} else {
return Optional.empty();
}
}
private Optional<MethodDeclaration> createAfterInput() {
SimpleName object = f.newSimpleName("object"); //$NON-NLS-1$
SimpleName path = f.newSimpleName("path"); //$NON-NLS-1$
SimpleName input = f.newSimpleName("input"); //$NON-NLS-1$
List<Statement> statements = new ArrayList<>();
for (PropertyDeclaration property : model.getDeclaredProperties()) {
switch (TextFieldTrait.getKind(property)) {
case VALUE:
break; // does nothing
case IGNORE:
statements.add(new ExpressionBuilder(f, object)
.method(context.getOptionSetterName(property), Models.toNullLiteral(f))
.toStatement());
break;
case FILE_NAME:
statements.add(new ExpressionBuilder(f, object)
.method(context.getOptionSetterName(property), path)
.toStatement());
break;
case LINE_NUMBER:
statements.add(new ExpressionBuilder(f, object)
.method(context.getValueSetterName(property),
adjustLong(property, new ExpressionBuilder(f, input)
.method("getLineNumber") //$NON-NLS-1$
.apply(InfixOperator.PLUS, Models.toLiteral(f, 1L))))
.toStatement());
break;
case RECORD_NUMBER:
statements.add(new ExpressionBuilder(f, object)
.method(context.getValueSetterName(property),
adjustLong(property, new ExpressionBuilder(f, input)
.method("getRecordIndex") //$NON-NLS-1$
.apply(InfixOperator.PLUS, Models.toLiteral(f, 1L))))
.toStatement());
break;
default:
throw new AssertionError(TextFieldTrait.getKind(property));
}
}
if (statements.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(f.newMethodDeclaration(
null,
new AttributeBuilder(f)
.annotation(context.resolve(Override.class))
.Protected()
.toAttributes(),
context.resolve(void.class),
f.newSimpleName("afterInput"), //$NON-NLS-1$
Arrays.asList(
f.newFormalParameterDeclaration(context.resolve(model.getSymbol()), object),
f.newFormalParameterDeclaration(context.resolve(StringOption.class), path),
f.newFormalParameterDeclaration(
f.newParameterizedType(
context.resolve(TextInput.class),
context.resolve(model.getSymbol())),
input)),
statements));
}
}
private Expression adjustLong(PropertyDeclaration property, ExpressionBuilder builder) {
if (AttributeUtil.hasFieldType(property, BasicTypeKind.LONG)) {
return builder.toExpression();
} else if (AttributeUtil.hasFieldType(property, BasicTypeKind.INT)) {
return builder.castTo(context.resolve(int.class)).toExpression();
} else {
throw new AssertionError(property);
}
}
private Optional<MethodDeclaration> createBeforeOutput() {
return Optional.empty();
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(boolean value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(char value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(String value) {
return Models.toLiteral(f, value);
}
/**
* Resolves a value.
* @param value the value
* @return the resolved expression
*/
protected Expression resolve(Enum<?> value) {
return new TypeBuilder(f, context.resolve(value.getDeclaringClass()))
.field(value.name())
.toExpression();
}
/**
* Resolves a value.
* @param type the value
* @return the resolved expression
*/
protected Type resolve(ClassName type) {
return context.resolve(Models.toName(f, type.toString()));
}
}<|fim▁end|> | import com.asakusafw.runtime.io.text.value.ByteOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DateOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DateTimeOptionFieldAdapter;
import com.asakusafw.runtime.io.text.value.DecimalOptionFieldAdapter; |
<|file_name|>ajaxify.js<|end_file_name|><|fim▁begin|>/*
* ajaxify.js
* Ajaxify - The Ajax Plugin
* https://4nf.org/
*
* Copyright Arvind Gupta; MIT Licensed
*/
/* INTERFACE: See also https://4nf.org/interface/
Simplest plugin call:
let ajaxify = new Ajaxify({options});
Ajaxifies the whole site, dynamically replacing the elements specified in "elements" across pages
*/
// The main plugin - Ajaxify
// Is passed the global options
// Checks for necessary pre-conditions - otherwise gracefully degrades
// Initialises sub-plugins
// Calls Pronto
class Ajaxify { constructor(options) {
String.prototype.iO = function(s) { return this.toString().indexOf(s) + 1; }; //Intuitively better understandable shorthand for String.indexOf() - String.iO()
let $ = this;
//Options default values
$.s = {
// basic config parameters
elements: "body", //selector for element IDs that are going to be swapped (e.g. "#el1, #el2, #el3")
selector : "a:not(.no-ajaxy)", //selector for links to trigger swapping - not elements to be swapped - i.e. a selection of links
forms : "form:not(.no-ajaxy)", // selector for ajaxifying forms - set to "false" to disable
canonical : false, // Fetch current URL from "canonical" link if given, updating the History API. In case of a re-direct...
refresh : false, // Refresh the page even if link clicked is current page
// visual effects settings
requestDelay : 0, //in msec - Delay of Pronto request
scrolltop : "s", // Smart scroll, true = always scroll to top of page, false = no scroll
bodyClasses : false, // Copy body classes from target page, set to "true" to enable
// script and style handling settings, prefetch
deltas : true, // true = deltas loaded, false = all scripts loaded
asyncdef : true, // default async value for dynamically inserted external scripts, false = synchronous / true = asynchronous
alwayshints : false, // strings, - separated by ", " - if matched in any external script URL - these are always loaded on every page load
inline : true, // true = all inline scripts loaded, false = only specific inline scripts are loaded
inlinesync : true, // synchronise inline scripts loading by adding a central tiny delay to all of them
inlinehints : false, // strings - separated by ", " - if matched in any inline scripts - only these are executed - set "inline" to false beforehand
inlineskip : "adsbygoogle", // strings - separated by ", " - if matched in any inline scripts - these are NOT are executed - set "inline" to true beforehand
inlineappend : true, // append scripts to the main content element, instead of "eval"-ing them
style : true, // true = all style tags in the head loaded, false = style tags on target page ignored
prefetchoff : false, // Plugin pre-fetches pages on hoverIntent - true = set off completely // strings - separated by ", " - hints to select out
// debugging & advanced settings
verbosity : 0, //Debugging level to console: default off. Can be set to 10 and higher (in case of logging enabled)
memoryoff : false, // strings - separated by ", " - if matched in any URLs - only these are NOT executed - set to "true" to disable memory completely
cb : 0, // callback handler on completion of each Ajax request - default 0
pluginon : true, // Plugin set "on" or "off" (==false) manually
passCount: false // Show number of pass for debugging
};
$.pass = 0; $.currentURL = "";
$.parse = (s, pl) => (pl = document.createElement('div'), pl.insertAdjacentHTML('afterbegin', s), pl.firstElementChild); // HTML parser
$.trigger = (t, e) => { let ev = document.createEvent('HTMLEvents'); ev.initEvent("pronto." + t, true, false); ev.data = e ? e : $.Rq("e"); window.dispatchEvent(ev); }
$.internal = (url) => { if (!url) return false; if (typeof(url) === "object") url = url.href; if (url==="") return true; return url.substring(0,rootUrl.length) === rootUrl || !url.iO(":"); }
//Module global variables
let rootUrl = location.origin, api = window.history && window.history.pushState && window.history.replaceState,
//Regexes for escaping fetched HTML of a whole page - best of Baluptons Ajaxify
//Makes it possible to pre-fetch an entire page
docType = /<\!DOCTYPE[^>]*>/i,
tagso = /<(html|head|link)([\s\>])/gi,
tagsod = /<(body)([\s\>])/gi,
tagsc = /<\/(html|head|body|link)\>/gi,
//Helper strings
div12 = '<div class="ajy-$1"$2',
divid12 = '<div id="ajy-$1"$2',
linki = '<link rel="stylesheet" type="text/css" href="*" />',
linkr = 'link[href*="!"]',
scrr = 'script[src*="!"]',
inlineclass = "ajy-inline";
//Global helpers
let doc=document, bdy,
qa=(s,o=doc)=>o.querySelectorAll(s),
qs=(s,o=doc)=>o.querySelector(s);
function _copyAttributes(el, $S, flush) { //copy all attributes of element generically
if (flush) [...el.attributes].forEach(e => el.removeAttribute(e.name)); //delete all old attributes
[...$S.attributes].forEach(e => el.setAttribute(e.nodeName, e.nodeValue)); //low-level insertion
}
function _on(eventName, elementSelector, handler, el = document) { //e.currentTarget is document when the handler is called
el.addEventListener(eventName, function(e) {
// loop parent nodes from the target to the delegation node
for (var target = e.target; target && target != this; target = target.parentNode) {
if (target.matches(elementSelector)) {
handler(target, e);
break;
}
}
}, !!eventName.iO('mo'));
}
function Hints(hints) {
if (!(this instanceof Hints)) return new Hints(hints); //automatically create an instance
this.myHints = (typeof hints === 'string' && hints.length > 0) ? hints.split(", ") : false; //hints are passed as a comma separated string
}
Hints.prototype.find = function (t) {return (!t || !this.myHints) ? false : this.myHints.some(h => t.iO(h))}; //iterate through hints within passed text (t)
function lg(m){ $.s.verbosity && console && console.log(m); }
// The stateful Cache class
// Usage - parameter "o" values:
// none - returns currently cached page
// <URL> - returns page with specified URL
// <object> - saves the page in cache
// f - flushes the cache
class Cache { constructor() {
let d = false;
this.a = function (o) {
if (!o) return d;
if (typeof o === "string") { //URL or "f" passed
if(o === "f") { //"f" passed -> flush
$.pages("f"); //delegate flush to $.pages
lg("Cache flushed");
} else d = $.pages($.memory(o)); //URL passed -> look up page in memory
return d; //return cached page
}
if (typeof o === "object") {
d = o;
return d;
}
};
}}
// The stateful Memory class
// Usage: $.memory(<URL>) - returns the same URL if not turned off internally
class Memory { constructor(options) {
this.a = function (h) {
if (!h || $.s.memoryoff === true) return false;
if ($.s.memoryoff === false) return h;
return Hints($.s.memoryoff).find(h) ? false : h;
};
}}
// The stateful Pages class
// Usage - parameter "h" values:
// <URL> - returns page with specified URL from internal array
// <object> - saves the passed page in internal array
// false - returns false
class Pages { constructor() {
let d = [], i = -1;
this.a = function (h) {
if (typeof h === "string") {
if(h === "f") d = [];
else if((i=_iPage(h)) !== -1) return d[i][1];
}
if (typeof h === "object") {
if((i=_iPage(h)) === -1) d.push(h);
else d[i] = h;
}
if (typeof h === "boolean") return false;
};
let _iPage = h => d.findIndex(e => e[0] == h)
}}
// The GetPage class
// First parameter (o) is a switch:
// empty - returns cache
// <URL> - loads HTML via Ajax, second parameter "p" must be callback
// + - pre-fetches page, second parameter "p" must be URL, third parameter "p2" must be callback
// - - loads page into DOM and handle scripts, second parameter "p" must hold selection to load
// x - returns response
// otherwise - returns selection of current page to client
class GetPage { constructor() {
let rsp = 0, cb = 0, plus = 0, rt = "", ct = 0, rc = 0, ac = 0;
this.a = function (o, p, p2) {
if (!o) return $.cache();
if (o.iO("/")) {
cb = p;
if(plus == o) return;
return _lPage(o);
}
if (o === "+") {
plus = p;
cb = p2;
return _lPage(p, true);
}
if (o === "a") { if (rc > 0) {_cl(); ac.abort();} return; }
if (o === "s") return ((rc) ? 1 : 0) + rt;
if (o === "-") return _lSel(p);
if (o === "x") return rsp;
if (!$.cache()) return;
if (o === "body") return qs("#ajy-" + o, $.cache());
if (o === "script") return qa(o, $.cache());
return qs((o === "title") ? o : ".ajy-" + o, $.cache());
};
let _lSel = $t => (
$.pass++,
_lEls($t),
qa("body > script").forEach(e => (e.classList.contains(inlineclass)) ? e.parentNode.removeChild(e) : false),
$.scripts(true),
$.scripts("s"),
$.scripts("c")
),
_lPage = (h, pre) => {
if (h.iO("#")) h = h.split("#")[0];
if ($.Rq("is") || !$.cache(h)) return _lAjax(h, pre);
plus = 0;
if (cb) return cb();
},
_ld = ($t, $h) => {
if(!$h) {
lg("Inserting placeholder for ID: " + $t.getAttribute("id"));
var tagN = $t.tagName.toLowerCase();
$t.parentNode.replaceChild($.parse("<" + tagN + " id='" + $t.getAttribute("id") + "'></" + tagN + ">"), $t);
return;
}
var $c = $h.cloneNode(true); // clone element node (true = deep clone)
qa("script", $c).forEach(e => e.parentNode.removeChild(e));
_copyAttributes($t, $c, true);
$t.innerHTML = $c.innerHTML;
},
_lEls = $t =>
$.cache() && !_isBody($t) && $t.forEach(function($el) {
_ld($el, qs("#" + $el.getAttribute("id"), $.cache()));
}),
_isBody = $t => $t[0].tagName.toLowerCase() == "body" && (_ld(bdy, qs("#ajy-body", $.cache())), 1),
_lAjax = (hin, pre) => {
var ispost = $.Rq("is");
if (pre) rt="p"; else rt="c";
ac = new AbortController(); // set abort controller
rc++; // set active request counter
fetch(hin, {
method: ((ispost) ? "POST" : "GET"),
cache: "default",
mode: "same-origin",
headers: {"X-Requested-With": "XMLHttpRequest"},
body: (ispost) ? $.Rq("d") : null,
signal: ac.signal
}).then(r => {
if (!r.ok || !_isHtml(r)) {
if (!pre) {location.href = hin; _cl(); $.pronto(0, $.currentURL);}
return;
}
rsp = r; // store response
return r.text();
}).then(r => {
_cl(1); // clear only plus variable
if (!r) return; // ensure data
rsp.responseText = r; // store response text
return _cache(hin, r);
}).catch(err => {
if(err.name === "AbortError") return;
try {
$.trigger("error", err);
lg("Response text : " + err.message);
return _cache(hin, err.message, err);
} catch (e) {}
}).finally(() => rc--); // reset active request counter
},
_cl = c => (plus = 0, (!c) ? cb = 0 : 0), // clear plus AND/OR callback
_cache = (href, h, err) => $.cache($.parse(_parseHTML(h))) && ($.pages([href, $.cache()]), 1) && cb && cb(err),
_isHtml = x => (ct = x.headers.get("content-type")) && (ct.iO("html") || ct.iO("form-")),
_parseHTML = h => document.createElement("html").innerHTML = _replD(h).trim(),
_replD = h => String(h).replace(docType, "").replace(tagso, div12).replace(tagsod, divid12).replace(tagsc, "</div>")
}}
// The stateful Scripts plugin
// First parameter "o" is switch:
// i - initailise options
// c - fetch canonical URL
// <object> - handle one inline script
// otherwise - delta loading
class Scripts { constructor() {
let $s = false, txt = 0;
this.a = function (o) {
if (o === "i") {
if(!$s) $s = {};
return true;
}
if (o === "s") return _allstyle($s.y);
if (o === "1") {
$.detScripts($s);
return _addScripts($s);
}
if (o === "c") return $.s.canonical && $s.can ? $s.can.getAttribute("href") : false;
if (o === "d") return $.detScripts($s);
if (o && typeof o == "object") return _onetxt(o);
if ($.scripts("d")) return;
_addScripts($s);
};
let _allstyle = $s =>
!$.s.style || !$s || (
qa("style", qs("head")).forEach(e => e.parentNode.removeChild(e)),
$s.forEach(el => _addstyle(el.textContent))
),
_onetxt = $s =>
(!(txt = $s.textContent).iO(").ajaxify(") && (!txt.iO("new Ajaxify(")) &&
(($.s.inline && !Hints($.s.inlineskip).find(txt)) || $s.classList.contains("ajaxy") ||
Hints($.s.inlinehints).find(txt))
) && _addtxt($s),
_addtxt = $s => {
if(!txt || !txt.length) return;
if($.s.inlineappend || ($s.getAttribute("type") && !$s.getAttribute("type").iO("text/javascript"))) try { return _apptxt($s); } catch (e) { }
try { eval(txt); } catch (e1) {
lg("Error in inline script : " + txt + "\nError code : " + e1);
}
},
_apptxt = $s => { let sc = document.createElement("script"); _copyAttributes(sc, $s); sc.classList.add(inlineclass);
try {sc.appendChild(document.createTextNode($s.textContent))} catch(e) {sc.text = $s.textContent};
return qs("body").appendChild(sc);
},
_addstyle = t => qs("head").appendChild($.parse('<style>' + t + '</style>')),
_addScripts = $s => ( $.addAll($s.c, "href"), $.s.inlinesync ? setTimeout(() => $.addAll($s.j, "src")) : $.addAll($s.j, "src"))
}}
// The DetScripts plugin - stands for "detach scripts"
// Works on "$s" <object> that is passed in and fills it
// Fetches all stylesheets in the head
// Fetches the canonical URL
// Fetches all external scripts on the page
// Fetches all inline scripts on the page
class DetScripts { constructor() {
let head = 0, lk = 0, j = 0;
this.a = function ($s) {
head = $.pass ? $.fn("head") : qs("head"); //If "pass" is 0 -> fetch head from DOM, otherwise from target page
if (!head) return true;
lk = qa($.pass ? ".ajy-link" : "link", head); //If "pass" is 0 -> fetch links from DOM, otherwise from target page
j = $.pass ? $.fn("script") : qa("script"); //If "pass" is 0 -> fetch JSs from DOM, otherwise from target page
$s.c = _rel(lk, "stylesheet"); //Extract stylesheets
$s.y = qa("style", head); //Extract style tags
$s.can = _rel(lk, "canonical"); //Extract canonical tag
$s.j = j; //Assign JSs to internal selection
};
let _rel = (lk, v) => Array.prototype.filter.call(lk, e => e.getAttribute("rel").iO(v));
}}
// The AddAll plugin
// Works on a new selection of scripts to apply delta-loading to it
// pk parameter:
// href - operate on stylesheets in the new selection
// src - operate on JS scripts
class AddAll { constructor() {
let $scriptsO = [], $sCssO = [], $sO = [], PK = 0, url = 0;
this.a = function ($this, pk) {
if(!$this.length) return; //ensure input
if($.s.deltas === "n") return true; //Delta-loading completely disabled
PK = pk; //Copy "primary key" into internal variable
if(!$.s.deltas) return _allScripts($this); //process all scripts
//deltas presumed to be "true" -> proceed with normal delta-loading
$scriptsO = PK == "href" ? $sCssO : $sO; //Copy old. Stylesheets or JS
if(!$.pass) _newArray($this); //Fill new array on initial load, nothing more
else $this.forEach(function(s) { //Iterate through selection
var $t = s;
url = $t.getAttribute(PK);
if(_classAlways($t)) { //Class always handling
_removeScript(); //remove from DOM
_iScript($t); //insert back single external script in the head
return;
}
if(url) { //URL?
if(!$scriptsO.some(e => e == url)) { // Test, whether new
$scriptsO.push(url); //If yes: Push to old array
_iScript($t);
}
//Otherwise nothing to do
return;
}
if(PK != "href" && !$t.classList.contains("no-ajaxy")) $.scripts($t); //Inline JS script? -> inject into DOM
});
};
let _allScripts = $t => $t.forEach(e => _iScript(e)),
_newArray = $t => $t.forEach(e => (url = e.getAttribute(PK)) ? $scriptsO.push(url) : 0),
_classAlways = $t => $t.getAttribute("data-class") == "always" || Hints($.s.alwayshints).find(url),
_iScript = $S => {
url = $S.getAttribute(PK);
if(PK == "href") return qs("head").appendChild($.parse(linki.replace("*", url)));
if(!url) return $.scripts($S);
var sc = document.createElement("script");
sc.async = $.s.asyncdef;
_copyAttributes(sc, $S);
qs("head").appendChild(sc);
},
_removeScript = () => qa((PK == "href" ? linkr : scrr).replace("!", url)).forEach(e => e.parentNode.removeChild(e))
}}
// The Rq plugin - stands for request
// Stores all kinds of and manages data concerning the pending request
// Simplifies the Pronto plugin by managing request data separately, instead of passing it around...
// Second parameter (p) : data
// First parameter (o) values:
// = - check whether internally stored "href" ("h") variable is the same as the global currentURL
// ! - update last request ("l") variable with passed href
// ? - Edin's intelligent plausibility check - can spawn an external fetch abort
// v - validate value passed in "p", which is expected to be a click event value - also performs "i" afterwards
// i - initialise request defaults and return "c" (currentTarget)
// h - access internal href hard
// e - set / get internal "e" (event)
// p - set / get internal "p" (push flag)
// is - set / get internal "ispost" (flag whether request is a POST)
// d - set / get internal "d" (data for central $.ajax())
// C - set / get internal "can" ("href" of canonical URL)
// c - check whether simple canonical URL is given and return, otherwise return value passed in "p"
class RQ { constructor() {
let ispost = 0, data = 0, push = 0, can = 0, e = 0, c = 0, h = 0, l = false;
this.a = function (o, p, t) {
if(o === "=") {
if(p) return h === $.currentURL //check whether internally stored "href" ("h") variable is the same as the global currentURL
|| h === l; //or href of last request ("l")
return h === $.currentURL; //for click requests
}
if(o === "!") return l = h; //store href in "l" (last request)
if(o === "?") { //Edin previously called this "isOK" - powerful intelligent plausibility check
let xs=$.fn("s");
if (!xs.iO("0") && !p) $.fn("a"); //if fetch is not idle and new request is standard one, do ac.abort() to set it free
if (xs==="1c" && p) return false; //if fetch is processing standard request and new request is prefetch, cancel prefetch until fetch is finished
if (xs==="1p" && p) $.s.memoryoff ? $.fn("a") : 1; //if fetch is processing prefetch request and new request is prefetch do nothing (see [options] comment below)
//([semaphore options for requests] $.fn("a") -> abort previous, proceed with new | return false -> leave previous, stop new | return true -> proceed)
return true;
}
if(o === "v") { //validate value passed in "p", which is expected to be a click event value - also performs "i" afterwards
if(!p) return false; //ensure data
_setE(p, t); //Set event and href in one go
if(!$.internal(h)) return false; //if not internal -> report failure
o = "i"; //continue with "i"
}
if(o === "i") { //initialise request defaults and return "c" (currentTarget)
ispost = false; //GET assumed
data = null; //reset data
push = true; //assume we want to push URL to the History API
can = false; //reset can (canonical URL)
return h; //return "h" (href)
}
if(o === "h") { // Access href hard
if(p) {
if (typeof p === "string") e = 0; // Reset e -> default handler
h = (p.href) ? p.href : p; // Poke in href hard
}
return h; //href
}
if(o === "e") { //set / get internal "e" (event)
if(p) _setE(p, t); //Set event and href in one go
return e ? e : h; // Return "e" or if not given "h"
}
if(o === "p") { //set / get internal "p" (push flag)
if(p !== undefined) push = p;
return push;
}
if(o === "is") { //set / get internal "ispost" (flag whether request is a POST)
if(p !== undefined) ispost = p;
return ispost;
}
if(o === "d") { //set / get internal "d" (data for central $.ajax())
if(p) data = p;
return data;
}
if(o === "C") { //set internal "can" ("href" of canonical URL)
if(p !== undefined) can = p;
return can;
}
if(o === "c") return can && can !== p && !p.iO("#") && !p.iO("?") ? can : p; //get internal "can" ("href" of canonical URL)
};
let _setE = (p, t) => h = typeof (e = p) !== "string" ? (e.currentTarget && e.currentTarget.href) || (t && t.href) || e.currentTarget.action || e.originalEvent.state.url : e
}}
// The Frms plugin - stands for forms
// Ajaxify all forms in the specified divs
// Switch (o) values:
// d - set divs variable
// a - Ajaxify all forms in divs
class Frms { constructor() {
let fm = 0, divs = 0;
this.a = function (o, p) {
if (!$.s.forms || !o) return; //ensure data
if(o === "d") divs = p; //set divs variable
if(o === "a") divs.forEach(div => { //iterate through divs
Array.prototype.filter.call(qa($.s.forms, div), function(e) { //filter forms
let c = e.getAttribute("action");
return($.internal(c && c.length > 0 ? c : $.currentURL)); //ensure "action"
}).forEach(frm => { //iterate through forms
frm.addEventListener("submit", q => { //create event listener
fm = q.target; // fetch target
p = _k(); //Serialise data
var g = "get", //assume GET
m = fm.getAttribute("method"); //fetch method attribute
if (m.length > 0 && m.toLowerCase() == "post") g = "post"; //Override with "post"
var h, a = fm.getAttribute("action"); //fetch action attribute
if (a && a.length > 0) h = a; //found -> store
else h = $.currentURL; //not found -> select current URL
$.Rq("v", q); //validate request
if (g == "get") h = _b(h, p); //GET -> copy URL parameters
else {
$.Rq("is", true); //set is POST in request data
$.Rq("d", p); //save data in request data
}
$.trigger("submit", h); //raise pronto.submit event
$.pronto(0, { href: h }); //programmatically change page
q.preventDefault(); //prevent default form action
return(false); //success -> disable default behaviour
})
});
});
};
let _k = () => {
let o = new FormData(fm), n = qs("input[name][type=submit]", fm);
if (n) o.append(n.getAttribute("name"), n.value);
return o;
},
_b = (m, n) => {
let s = "";
if (m.iO("?")) m = m.substring(0, m.iO("?"));
for (var [k, v] of n.entries()) s += `${k}=${encodeURIComponent(v)}&`;
return `${m}?${s.slice(0,-1)}`;
}
}}
// The stateful Offsets plugin
// Usage:
// 1) $.offsets(<URL>) - returns offset of specified URL from internal array
// 2) $.offsets() - saves the current URL + offset in internal array
class Offsets { constructor() {
let d = [], i = -1;
this.a = function (h) {
if (typeof h === "string") { //Lookup page offset
h = h.iO("?") ? h.split("?")[0] : h; //Handle root URL only from dynamic pages
i = _iOffset(h); //Fetch offset
if(i === -1) return 0; // scrollTop if not found
return d[i][1]; //Return offset that was found
}
//Add page offset
var u = $.currentURL, us1 = u.iO("?") ? u.split("?")[0] : u, us = us1.iO("#") ? us1.split("#")[0] : us1, os = [us, (document.documentElement && document.documentElement.scrollTop) || document.body.scrollTop];
i = _iOffset(us); //get page index
if(i === -1) d.push(os); //doesn't exist -> push to array
else d[i] = os; //exists -> overwrite
};
let _iOffset = h => d.findIndex(e => e[0] == h)
}}
// The Scrolly plugin - manages scroll effects centrally
// scrolltop values: "s" - "smart" (default), true - always scroll to top, false - no scroll
// Switch (o) values:
// + - add current page to offsets
// ! - scroll to current page offset
class Scrolly { constructor() {
this.a = function (o) {
if(!o) return; //ensure operator
var op = o; //cache operator
if(o === "+" || o === "!") o = $.currentURL; //fetch currentURL for "+" and "-" operators
if(op !== "+" && o.iO("#") && (o.iO("#") < o.length - 1)) { //if hash in URL and not standalone hash
let $el = qs("#" + o.split("#")[1]); //fetch the element
if (!$el) return; //nothing found -> return quickly
let box = $el.getBoundingClientRect();
_scrll(box.top + window.pageYOffset - document.documentElement.clientTop); // ...animate to ID
return;
}
if($.s.scrolltop === "s") { //smart scroll enabled
if(op === "+") $.offsets(); //add page offset
if(op === "!") _scrll($.offsets(o)); //scroll to stored position of page
return;
}
if(op !== "+" && $.s.scrolltop) _scrll(0); //otherwise scroll to top of page
//default -> do nothing
};
let _scrll = o => window.scrollTo(0, o)
}}
// The hApi plugin - manages operatios on the History API centrally
// Second parameter (p) - set global currentURL
// Switch (o) values:
// = - perform a replaceState, using currentURL
// otherwise - perform a pushState, using currentURL
class HApi { constructor() {
this.a = function (o, p) {
if(!o) return; //ensure operator
if(p) $.currentURL = p; //if p given -> update current URL
if(o === "=") history.replaceState({ url: $.currentURL }, "state-" + $.currentURL, $.currentURL); //perform replaceState
else if ($.currentURL !== window.location.href) history.pushState({ url: $.currentURL }, "state-" + $.currentURL, $.currentURL); //perform pushState
};
}}
// The Pronto plugin - Pronto variant of Ben Plum's Pronto plugin - low level event handling in general
// Works on a selection, passed to Pronto by the selection, which specifies, which elements to Ajaxify
// Switch (h) values:
// i - initialise Pronto
// <object> - fetch href part and continue with _request()
// <URL> - set "h" variable of Rq hard and continue with _request()
class Pronto { constructor() {
let $gthis = 0, requestTimer = 0, pd = 150, ptim = 0;
this.a = function ($this, h) {
if(!h) return; //ensure data
if(h === "i") { //request to initialise
bdy = document.body;
if(!$this.length) $this = "body";
$gthis = qa($this); //copy selection to global selector
$.frms = new Frms().a; //initialise forms sub-plugin
if($.s.idleTime) $.slides = new classSlides($).a; //initialise optional slideshow sub-plugin
$.scrolly = new Scrolly().a; //initialise scroll effects sub-plugin
$.offsets = new Offsets().a;
$.hApi = new HApi().a;
_init_p(); //initialise Pronto sub-plugin
return $this; //return query selector for chaining<|fim▁hole|> $.Rq("h", h);
_request();
return;
}
if(h.iO("/")) { //jump to internal page programmatically -> default handler
$.Rq("h", h);
_request(true);
}
};
let _init_p = () => {
$.hApi("=", window.location.href);
window.addEventListener("popstate", _onPop);
if ($.s.prefetchoff !== true) {
_on("mouseenter", $.s.selector, _preftime); // start prefetch timeout
_on("mouseleave", $.s.selector, _prefstop); // stop prefetch timeout
_on("touchstart", $.s.selector, _prefetch);
}
_on("click", $.s.selector, _click, bdy);
$.frms("d", qa("body"));
$.frms("a");
$.frms("d", $gthis);
if($.s.idleTime) $.slides("i");
},
_preftime = (t, e) => ptim = setTimeout(()=> _prefetch(t, e), pd), // call prefetch if timeout expires without being cleared by _prefstop
_prefstop = () => clearTimeout(ptim),
_prefetch = (t, e) => {
if($.s.prefetchoff === true) return;
if (!$.Rq("?", true)) return;
var href = $.Rq("v", e, t);
if ($.Rq("=", true) || !href || Hints($.s.prefetchoff).find(href)) return;
$.fn("+", href, () => false);
},
_stopBubbling = e => (
e.preventDefault(),
e.stopPropagation(),
e.stopImmediatePropagation()
),
_click = (t, e, notPush) => {
if(!$.Rq("?")) return;
var href = $.Rq("v", e, t);
if(!href || _exoticKey(t)) return;
if(href.substr(-1) ==="#") return true;
if(_hashChange()) {
$.hApi("=", href);
return true;
}
$.scrolly("+");
_stopBubbling(e);
if($.Rq("=")) $.hApi("=");
if($.s.refresh || !$.Rq("=")) _request(notPush);
},
_request = notPush => {
$.Rq("!");
if(notPush) $.Rq("p", false);
$.trigger("request");
$.fn($.Rq("h"), err => {
if (err) {
lg("Error in _request : " + err);
$.trigger("error", err);
}
_render();
});
},
_render = () => {
$.trigger("beforeload");
if($.s.requestDelay) {
if(requestTimer) clearTimeout(requestTimer);
requestTimer = setTimeout(_doRender, $.s.requestDelay);
} else _doRender();
},
_onPop = e => {
var url = window.location.href;
$.Rq("i");
$.Rq("h", url);
$.Rq("p", false);
$.scrolly("+");
if (!url || url === $.currentURL) return;
$.trigger("request");
$.fn(url, _render);
},
_doRender = () => {
$.trigger("load");
if($.s.bodyClasses) { var classes = $.fn("body").getAttribute("class"); bdy.setAttribute("class", classes ? classes : ""); }
var href = $.Rq("h"), title;
href = $.Rq("c", href);
$.hApi($.Rq("p") ? "+" : "=", href);
if(title = $.fn("title")) qs("title").innerHTML = title.innerHTML;
$.Rq("C", $.fn("-", $gthis));
$.frms("a");
$.scrolly("!");
_gaCaptureView(href);
$.trigger("render");
if($.s.passCount) qs("#" + $.s.passCount).innerHTML = "Pass: " + $.pass;
if($.s.cb) $.s.cb();
},
_gaCaptureView = href => {
href = "/" + href.replace(rootUrl,"");
if (typeof window.ga !== "undefined") window.ga("send", "pageview", href);
else if (typeof window._gaq !== "undefined") window._gaq.push(["_trackPageview", href]);
},
_exoticKey = (t) => {
var href = $.Rq("h"), e = $.Rq("e"), tgt = e.currentTarget.target || t.target;
return (e.which > 1 || e.metaKey || e.ctrlKey || e.shiftKey || e.altKey || tgt === "_blank"
|| href.iO("wp-login") || href.iO("wp-admin"));
},
_hashChange = () => {
var e = $.Rq("e");
return (e.hash && e.href.replace(e.hash, "") === window.location.href.replace(location.hash, "") || e.href === window.location.href + "#");
}
}}
$.init = () => {
let o = options;
if (!o || typeof(o) !== "string") {
if (document.readyState === "complete" || (document.readyState !== "loading" && !document.documentElement.doScroll))
run();
else document.addEventListener('DOMContentLoaded', run);
return $;
}
else return $.pronto(0, o);
};
let run = () => {
$.s = Object.assign($.s, options);
$.pages = new Pages().a;
$.pronto = new Pronto().a;
if (load()) {
$.pronto($.s.elements, "i");
if ($.s.deltas) $.scripts("1");
}
},
load = () => {
if (!api || !$.s.pluginon) {
lg("Gracefully exiting...");
return false;
}
lg("Ajaxify loaded..."); //verbosity option steers, whether this initialisation message is output
$.scripts = new Scripts().a;
$.scripts("i");
$.cache = new Cache().a;
$.memory = new Memory().a;
$.fn = $.getPage = new GetPage().a;
$.detScripts = new DetScripts().a;
$.addAll = new AddAll().a;
$.Rq = new RQ().a;
return true;
}
$.init(); // initialize Ajaxify on definition
}}<|fim▁end|> | }
if(typeof(h) === "object") { //jump to internal page programmatically -> handler for forms sub-plugin |
<|file_name|>fund-table.component.ts<|end_file_name|><|fim▁begin|>import {Component, OnInit} from '@angular/core';
import {FundDataService} from '../funddata/funddata.service';
@Component({
selector: 'app-fund-table',
templateUrl: './fund-table.component.html',
styleUrls: ['./fund-table.component.css']
})
export class FundTableComponent implements OnInit {
private colShow: string[] = ['link_linkDisplay', 'fundResult_morningstarRating', 'oneQuarterAgoChange'];
private columns: string[];
private funds: Object[];
private fundsShow: Object[];
private error: any;
constructor(private fundDataService: FundDataService) {
}
ngOnInit() {
this.fundDataService.loadFunds().subscribe((datas) => {
this.funds = datas;
this.fundsShow = datas;
this.columns = Object.keys(datas[0]);
// console.log('columns: ' + this.columns);
// console.log(JSON.stringify(datas));
}, (err) => {// error
console.log(err);
this.error = err;
}, () => {// complete
});
}
toggleColumn(col: string) {
if (this.colShow.indexOf(col) === -1) {
this.colShow.push(col);
} else {<|fim▁hole|> }
}<|fim▁end|> | this.colShow.splice(this.colShow.indexOf(col), 1);
} |
<|file_name|>test_json_protocol.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from thriftpy.protocol import TJSONProtocol
from thriftpy.thrift import TPayload, TType
from thriftpy.transport import TMemoryBuffer
from thriftpy._compat import u
import thriftpy.protocol.json as proto
class TItem(TPayload):
thrift_spec = {
1: (TType.I32, "id"),
2: (TType.LIST, "phones", (TType.STRING)),
}
default_spec = [("id", None), ("phones", None)]
def test_map_to_obj():
val = [{"key": "ratio", "value": "0.618"}]
spec = [TType.STRING, TType.DOUBLE]
obj = proto.map_to_obj(val, spec)
assert {"ratio": 0.618} == obj
def test_map_to_json():
obj = {"ratio": 0.618}
spec = [TType.STRING, TType.DOUBLE]
json = proto.map_to_json(obj, spec)
assert [{"key": "ratio", "value": 0.618}] == json
def test_list_to_obj():
val = [4, 8, 4, 12, 67]
spec = TType.I32
obj = proto.list_to_obj(val, spec)
assert [4, 8, 4, 12, 67] == obj
def test_list_to_json():
val = [4, 8, 4, 12, 67]
spec = TType.I32
json = proto.list_to_json(val, spec)
assert [4, 8, 4, 12, 67] == json
def test_struct_to_json():
obj = TItem(id=13, phones=["5234", "12346456"])
json = proto.struct_to_json(obj)
assert {"id": 13, "phones": ["5234", "12346456"]} == json
def test_struct_to_obj():
json = {"id": 13, "phones": ["5234", "12346456"]}
obj = TItem()
obj = proto.struct_to_obj(json, obj)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_json_proto_api_write():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
data = trans.getvalue().decode("utf-8")
length = data[0:4]<|fim▁hole|>
import json
data = json.loads(data[4:])
assert length == "\x00\x00\x00S" and data == {
"metadata": {"version": 1},
"payload": {"phones": ["5234", "12346456"], "id": 13}}
def test_json_proto_api_read():
obj = TItem(id=13, phones=["5234", "12346456"])
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
p.write_struct(obj)
obj2 = TItem()
obj2 = p.read_struct(obj2)
assert obj.id == 13 and obj.phones == ["5234", "12346456"]
def test_unicode_string():
class Foo(TPayload):
thrift_spec = {
1: (TType.STRING, "name")
}
default_spec = [("name", None)]
trans = TMemoryBuffer()
p = TJSONProtocol(trans)
foo = Foo(name=u('pão de açúcar'))
foo.write(p)
foo2 = Foo()
foo2.read(p)
assert foo == foo2<|fim▁end|> | |
<|file_name|>int_tests.py<|end_file_name|><|fim▁begin|># Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import proboscis
from trove.tests.api import backups
from trove.tests.api import configurations
from trove.tests.api import databases
from trove.tests.api import datastores
from trove.tests.api import flavors<|fim▁hole|>from trove.tests.api import instances
from trove.tests.api import instances_actions
from trove.tests.api.mgmt import accounts
from trove.tests.api.mgmt import admin_required
from trove.tests.api.mgmt import hosts
from trove.tests.api.mgmt import instances as mgmt_instances
from trove.tests.api.mgmt import storage
from trove.tests.api import replication
from trove.tests.api import root
from trove.tests.api import user_access
from trove.tests.api import users
from trove.tests.api import versions
GROUP_SERVICES_INITIALIZE = "services.initialize"
black_box_groups = [
flavors.GROUP,
users.GROUP,
user_access.GROUP,
databases.GROUP,
root.GROUP,
GROUP_SERVICES_INITIALIZE,
instances.GROUP_START,
instances.GROUP_QUOTAS,
instances.GROUP_SECURITY_GROUPS,
backups.GROUP,
replication.GROUP,
configurations.GROUP,
datastores.GROUP,
instances_actions.GROUP_RESIZE,
# TODO(SlickNik): The restart tests fail intermittently so pulling
# them out of the blackbox group temporarily. Refer to Trove bug:
# https://bugs.launchpad.net/trove/+bug/1204233
# instances_actions.GROUP_RESTART,
instances_actions.GROUP_STOP_MYSQL,
instances.GROUP_STOP,
versions.GROUP,
instances.GROUP_GUEST,
]
proboscis.register(groups=["blackbox", "mysql"],
depends_on_groups=black_box_groups)
simple_black_box_groups = [
GROUP_SERVICES_INITIALIZE,
flavors.GROUP,
versions.GROUP,
instances.GROUP_START_SIMPLE,
admin_required.GROUP,
]
proboscis.register(groups=["simple_blackbox"],
depends_on_groups=simple_black_box_groups)
black_box_mgmt_groups = [
accounts.GROUP,
hosts.GROUP,
storage.GROUP,
instances_actions.GROUP_REBOOT,
admin_required.GROUP,
mgmt_instances.GROUP,
]
proboscis.register(groups=["blackbox_mgmt"],
depends_on_groups=black_box_mgmt_groups)
# Datastores groups for int-tests
datastore_group = [
GROUP_SERVICES_INITIALIZE,
flavors.GROUP,
versions.GROUP,
instances.GROUP_START_SIMPLE,
]
proboscis.register(groups=["cassandra", "couchbase", "mongodb", "postgresql"],
depends_on_groups=datastore_group)<|fim▁end|> | |
<|file_name|>SqBounds.cpp<|end_file_name|><|fim▁begin|>// This code contains NVIDIA Confidential Information and is disclosed to you
// under a form of NVIDIA software license agreement provided separately to you.
//
// Notice
// NVIDIA Corporation and its licensors retain all intellectual property and
// proprietary rights in and to this software and related documentation and
// any modifications thereto. Any use, reproduction, disclosure, or
// distribution of this software and related documentation without an express
// license agreement from NVIDIA Corporation is strictly prohibited.
//
// ALL NVIDIA DESIGN SPECIFICATIONS, CODE ARE PROVIDED "AS IS.". NVIDIA MAKES
// NO WARRANTIES, EXPRESSED, IMPLIED, STATUTORY, OR OTHERWISE WITH RESPECT TO
// THE MATERIALS, AND EXPRESSLY DISCLAIMS ALL IMPLIED WARRANTIES OF NONINFRINGEMENT,
// MERCHANTABILITY, AND FITNESS FOR A PARTICULAR PURPOSE.
//
// Information and code furnished is believed to be accurate and reliable.
// However, NVIDIA Corporation assumes no responsibility for the consequences of use of such
// information or for any infringement of patents or other rights of third parties that may
// result from its use. No license is granted by implication or otherwise under any patent
// or patent rights of NVIDIA Corporation. Details are subject to change without notice.
// This code supersedes and replaces all information previously supplied.
// NVIDIA Corporation products are not authorized for use as critical
// components in life support devices or systems without express written approval of
// NVIDIA Corporation.
//
// Copyright (c) 2008-2018 NVIDIA Corporation. All rights reserved.
// Copyright (c) 2004-2008 AGEIA Technologies, Inc. All rights reserved.
// Copyright (c) 2001-2004 NovodeX AG. All rights reserved.
#include "foundation/PxTransform.h"
#include "SqBounds.h"
#include "CmTransformUtils.h"
#include "SqPruner.h"
#include "ScbShape.h"
#include "ScbActor.h"
#include "ScbRigidStatic.h"
#include "ScbBody.h"<|fim▁hole|>using namespace Sq;
void Sq::computeStaticWorldAABB(PxBounds3& bounds, const Scb::Shape& scbShape, const Scb::Actor& scbActor)
{
const PxTransform& shape2Actor = scbShape.getShape2Actor();
PX_ALIGN(16, PxTransform) globalPose;
Cm::getStaticGlobalPoseAligned(static_cast<const Scb::RigidStatic&>(scbActor).getActor2World(), shape2Actor, globalPose);
Gu::computeBounds(bounds, scbShape.getGeometry(), globalPose, 0.0f, NULL, SQ_PRUNER_INFLATION, false);
}
void Sq::computeDynamicWorldAABB(PxBounds3& bounds, const Scb::Shape& scbShape, const Scb::Actor& scbActor)
{
const PxTransform& shape2Actor = scbShape.getShape2Actor();
PX_ALIGN(16, PxTransform) globalPose;
{
const Scb::Body& body = static_cast<const Scb::Body&>(scbActor);
PX_ALIGN(16, PxTransform) kinematicTarget;
const PxU16 sqktFlags = PxRigidBodyFlag::eKINEMATIC | PxRigidBodyFlag::eUSE_KINEMATIC_TARGET_FOR_SCENE_QUERIES;
const bool useTarget = (PxU16(body.getFlags()) & sqktFlags) == sqktFlags;
const PxTransform& body2World = (useTarget && body.getKinematicTarget(kinematicTarget)) ? kinematicTarget : body.getBody2World();
Cm::getDynamicGlobalPoseAligned(body2World, shape2Actor, body.getBody2Actor(), globalPose);
}
Gu::computeBounds(bounds, scbShape.getGeometry(), globalPose, 0.0f, NULL, SQ_PRUNER_INFLATION, false);
}
const ComputeBoundsFunc Sq::gComputeBoundsTable[2] =
{
computeStaticWorldAABB,
computeDynamicWorldAABB
};<|fim▁end|> | #include "PsAllocator.h"
#include "GuBounds.h"
using namespace physx; |
<|file_name|>adaption.hpp<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\
*
* bitpit
*
* Copyright (C) 2015-2021 OPTIMAD engineering Srl
*
* -------------------------------------------------------------------------
* License
* This file is part of bitpit.
*
* bitpit is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License v3 (LGPL)
* as published by the Free Software Foundation.
*
* bitpit is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with bitpit. If not, see <http://www.gnu.org/licenses/>.
*
\*---------------------------------------------------------------------------*/
#ifndef __BITPIT_ADAPTION_HPP__
#define __BITPIT_ADAPTION_HPP__
#include <vector>
#include <unordered_map>
#include <unordered_set>
namespace bitpit {
namespace adaption
{
enum Marker {
MARKER_REFINE,
MARKER_NONE,
MARKER_COARSEN,
MARKER_UNDEFINED
};
enum Type {
TYPE_UNKNOWN = -2,
TYPE_NONE = -1,
TYPE_CREATION = 0,
TYPE_DELETION,
TYPE_REFINEMENT,
TYPE_COARSENING,
TYPE_RENUMBERING,
TYPE_PARTITION_SEND,
TYPE_PARTITION_RECV,
TYPE_PARTITION_NOTICE
};
enum Entity {
ENTITY_UNKNOWN = -1,
ENTITY_CELL,
ENTITY_INTERFACE
};
struct Info<|fim▁hole|> : type(TYPE_UNKNOWN), entity(ENTITY_UNKNOWN), rank(-1)
{
}
Info(Type user_type, Entity user_entity, int user_rank = -1)
: type(user_type), entity(user_entity), rank(user_rank)
{
}
Type type;
Entity entity;
int rank;
std::vector<long> previous;
std::vector<long> current;
};
class InfoCollection
{
public:
InfoCollection();
std::size_t create();
std::size_t create(Type type, Entity entity, int rank = -1);
Info & at(std::size_t id);
const Info & at(std::size_t id) const;
const std::vector<Info> & data() const noexcept;
std::vector<Info> & data() noexcept;
Info & operator[](std::size_t id);
const Info & operator[](std::size_t id) const;
std::vector<Info> dump();
private:
typedef std::tuple<int, int, int> infoData_t;
std::unordered_map<infoData_t, std::size_t, utils::hashing::hash<infoData_t>> m_cache;
std::unordered_set<int> m_cachedTypes;
std::vector<Info> m_collection;
};
}
class PatchKernel;
class FlatMapping
{
public:
FlatMapping();
FlatMapping(PatchKernel *patch);
virtual ~FlatMapping() = default;
virtual void update(const std::vector<adaption::Info> &adaptionData) = 0;
const std::vector<long> & getNumbering() const;
const std::vector<long> & getMapping() const;
protected:
PatchKernel *m_patch;
std::vector<long> m_numbering;
std::vector<long> m_mapping;
};
class CellFlatMapping : public FlatMapping
{
public:
CellFlatMapping();
CellFlatMapping(PatchKernel *patch);
void update(const std::vector<adaption::Info> &adaptionData) override;
};
}
#endif<|fim▁end|> | {
Info() |
<|file_name|>ShellServerMBean.java<|end_file_name|><|fim▁begin|>/*
* TeleStax, Open Source Cloud Communications Copyright 2012.
* and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA<|fim▁hole|>/**
* @author sergey.povarnin
*/
public interface ShellServerMBean {
String getAddress();
void setAddress(String address);
int getPort();
void setPort(int port);
String getSecurityDomain();
void setSecurityDomain(String securityDomain);
int getQueueNumber();
}<|fim▁end|> | * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.restcomm.ss7.management.console;
|
<|file_name|>filter.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Jan Christian Grünhage
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gomatrixserverlib
import (
"errors"
"math"
)
// Filter is used by clients to specify how the server should filter responses to e.g. sync requests
// Specified by: https://matrix.org/docs/spec/client_server/r0.5.0.html#filtering
type Filter struct {
EventFields []string `json:"event_fields,omitempty"`
EventFormat string `json:"event_format,omitempty"`
Presence EventFilter `json:"presence,omitempty"`
AccountData EventFilter `json:"account_data,omitempty"`
Room RoomFilter `json:"room,omitempty"`
}
// EventFilter is used to define filtering rules for events
type EventFilter struct {
Limit int `json:"limit,omitempty"`
NotSenders []string `json:"not_senders,omitempty"`
NotTypes []string `json:"not_types,omitempty"`
Senders []string `json:"senders,omitempty"`
Types []string `json:"types,omitempty"`
}
// RoomFilter is used to define filtering rules for room-related events
type RoomFilter struct {
NotRooms []string `json:"not_rooms,omitempty"`
Rooms []string `json:"rooms,omitempty"`
Ephemeral RoomEventFilter `json:"ephemeral,omitempty"`
IncludeLeave bool `json:"include_leave,omitempty"`
State StateFilter `json:"state,omitempty"`
Timeline RoomEventFilter `json:"timeline,omitempty"`
AccountData RoomEventFilter `json:"account_data,omitempty"`
}
// StateFilter is used to define filtering rules for state events
type StateFilter struct {
Limit int `json:"limit,omitempty"`
NotSenders []string `json:"not_senders,omitempty"`
NotTypes []string `json:"not_types,omitempty"`
Senders []string `json:"senders,omitempty"`
Types []string `json:"types,omitempty"`
LazyLoadMembers bool `json:"lazy_load_members,omitempty"`
IncludeRedundantMembers bool `json:"include_redundant_members,omitempty"`
NotRooms []string `json:"not_rooms,omitempty"`
Rooms []string `json:"rooms,omitempty"`
ContainsURL *bool `json:"contains_url,omitempty"`
}
// RoomEventFilter is used to define filtering rules for events in rooms
type RoomEventFilter struct {
Limit int `json:"limit,omitempty"`
NotSenders []string `json:"not_senders,omitempty"`
NotTypes []string `json:"not_types,omitempty"`
Senders []string `json:"senders,omitempty"`
Types []string `json:"types,omitempty"`
LazyLoadMembers bool `json:"lazy_load_members,omitempty"`
IncludeRedundantMembers bool `json:"include_redundant_members,omitempty"`
NotRooms []string `json:"not_rooms,omitempty"`
Rooms []string `json:"rooms,omitempty"`
ContainsURL *bool `json:"contains_url,omitempty"`
}
// Validate checks if the filter contains valid property values
func (filter *Filter) Validate() error {
if filter.EventFormat != "" && filter.EventFormat != "client" && filter.EventFormat != "federation" {
return errors.New("Bad event_format value. Must be one of [\"client\", \"federation\"]")
}
return nil
}
// DefaultFilter returns the default filter used by the Matrix server if no filter is provided in
// the request
func DefaultFilter() Filter {
return Filter{
AccountData: DefaultEventFilter(),
EventFields: nil,
EventFormat: "client",
Presence: DefaultEventFilter(),
Room: RoomFilter{
AccountData: DefaultRoomEventFilter(),
Ephemeral: DefaultRoomEventFilter(),
IncludeLeave: false,
NotRooms: nil,
Rooms: nil,
State: DefaultStateFilter(),
Timeline: DefaultRoomEventFilter(),
},
}
}
// DefaultEventFilter returns the default event filter used by the Matrix server if no filter is
// provided in the request
func DefaultEventFilter() EventFilter {
return EventFilter{
Limit: 20,
NotSenders: nil,
NotTypes: nil,
Senders: nil,
Types: nil,
}
}
// DefaultStateFilter returns the default state event filter used by the Matrix server if no filter
// is provided in the request
func DefaultStateFilter() StateFilter {
return StateFilter{
Limit: math.MaxInt32,
NotSenders: nil,
NotTypes: nil,
Senders: nil,
Types: nil,
LazyLoadMembers: false,
IncludeRedundantMembers: false,
NotRooms: nil,
Rooms: nil,<|fim▁hole|> }
}
// DefaultRoomEventFilter returns the default room event filter used by the Matrix server if no
// filter is provided in the request
func DefaultRoomEventFilter() RoomEventFilter {
return RoomEventFilter{
Limit: 20,
NotSenders: nil,
NotTypes: nil,
Senders: nil,
Types: nil,
NotRooms: nil,
Rooms: nil,
ContainsURL: nil,
}
}<|fim▁end|> | ContainsURL: nil, |
<|file_name|>timelineanimationrow.component.spec.ts<|end_file_name|><|fim▁begin|>import { ComponentFixture, TestBed, async, inject } from '@angular/core/testing';<|fim▁hole|>import { ActionModeService, LayerTimelineService } from 'app/services';
import { Store } from 'app/store';
import { MockStore } from 'test/MockStore';
import { TimelineAnimationRowComponent } from './timelineanimationrow.component';
describe('TimelineAnimationRowComponent', () => {
let component: TimelineAnimationRowComponent;
let fixture: ComponentFixture<TimelineAnimationRowComponent>;
beforeEach(
async(() => {
TestBed.configureTestingModule({
declarations: [TimelineAnimationRowComponent],
imports: [FlexLayoutModule],
providers: [
{ provide: Store, useValue: new MockStore() },
ActionModeService,
LayerTimelineService,
],
}).compileComponents();
}),
);
beforeEach(
inject([Store], (store: MockStore) => {
fixture = TestBed.createComponent(TimelineAnimationRowComponent);
component = fixture.componentInstance;
component.layer = store.getState().present.layers.vectorLayer;
component.ngOnInit();
fixture.detectChanges();
}),
);
it('should be created', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | import { FlexLayoutModule } from '@angular/flex-layout'; |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from stdimage import StdImageField
from django.core.validators import RegexValidator
import datetime
YEAR_CHOICES = []
for r in range(1980, (datetime.datetime.now().year+1)):
YEAR_CHOICES.append((r,r))
S_CHOICE = [('1stYear','1stYear'),('2ndYear','2ndYear'),('3rdYear','3rdYear'),('4thYear','4thYear')]
# Create your models here.
class Hostel(models.Model):
HostelName = models.CharField(max_length=100, primary_key=True)
HostelType = models.CharField(max_length=10)
HostelSeat = models.IntegerField()
HostelImage = StdImageField(upload_to='Hostels/logo/',variations={'large': (675, 300,True)})
HostelAddress = models.CharField(max_length=200)
HostelDescription = models.TextField()
HostelEmail = models.EmailField()
phone_regex = RegexValidator(regex=r'^\+?1?\d{10,13}$', message="Phone number must be entered in the format: '+999999999'. Up to 13 digits allowed.")
HostelPhoneNo = models.CharField(max_length=13,validators=[phone_regex], blank=True)
def __str__(self):
return self.HostelName
<|fim▁hole|> HostelName = models.ForeignKey(Hostel)
HostelEventsName = models.CharField(max_length=100)
HostelEventDescription = models.TextField()
def __str__(self):
return self.HostelEventsName
class HostelPictureGalary(models.Model):
HostelName = models.ForeignKey(Hostel)
PictureName = models.CharField(max_length=100)
PictureLocation = StdImageField(upload_to='Hostels/galary/',variations={'large': (675, 300,True)})
def __str__(self):
return self.PictureName
class HostelBody(models.Model):
HostelName = models.ForeignKey(Hostel)
HostelbodyRole = models.CharField(max_length=100)
HostelbodyRoleYear = models.IntegerField(choices=YEAR_CHOICES, default=datetime.datetime.now().year)
PersonName = models.CharField (max_length=10)
PersonYear = models.CharField (max_length=7, choices=S_CHOICE,default='NA')
PersonImage = StdImageField(upload_to='Hostels/gb/',variations={'thumbnail': (300, 200,True)})
def __str__(self):
return self.HostelbodyRole<|fim▁end|> |
class HostelEvents(models.Model): |
<|file_name|>wrapping_shr.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
// macro_rules! uint_impl {
// ($ActualT:ty, $BITS:expr,
// $ctpop:path,
// $ctlz:path,
// $cttz:path,
// $bswap:path,
// $add_with_overflow:path,
// $sub_with_overflow:path,
// $mul_with_overflow:path) => {
// /// Returns the smallest value that can be represented by this integer type.
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn min_value() -> Self { 0 }
//
// /// Returns the largest value that can be represented by this integer type.
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn max_value() -> Self { !0 }
//
// /// Converts a string slice in a given base to an integer.
// ///
// /// Leading and trailing whitespace represent an error.
// ///
// /// # Arguments
// ///
// /// * src - A string slice
// /// * radix - The base to use. Must lie in the range [2 .. 36]
// ///
// /// # Return value
// ///
// /// `Err(ParseIntError)` if the string did not represent a valid number.
// /// Otherwise, `Ok(n)` where `n` is the integer represented by `src`.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[allow(deprecated)]
// pub fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError> {
// from_str_radix(src, radix)
// }
//
// /// Returns the number of ones in the binary representation of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b01001100u8;
// ///
// /// assert_eq!(n.count_ones(), 3);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn count_ones(self) -> u32 {
// unsafe { $ctpop(self as $ActualT) as u32 }
// }
//
// /// Returns the number of zeros in the binary representation of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b01001100u8;
// ///
// /// assert_eq!(n.count_zeros(), 5);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn count_zeros(self) -> u32 {
// (!self).count_ones()
// }
//
// /// Returns the number of leading zeros in the binary representation
// /// of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b0101000u16;
// ///
// /// assert_eq!(n.leading_zeros(), 10);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn leading_zeros(self) -> u32 {
// unsafe { $ctlz(self as $ActualT) as u32 }
// }
//
// /// Returns the number of trailing zeros in the binary representation
// /// of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b0101000u16;
// ///
// /// assert_eq!(n.trailing_zeros(), 3);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn trailing_zeros(self) -> u32 {
// // As of LLVM 3.6 the codegen for the zero-safe cttz8 intrinsic
// // emits two conditional moves on x86_64. By promoting the value to
// // u16 and setting bit 8, we get better code without any conditional
// // operations.
// // FIXME: There's a LLVM patch (http://reviews.llvm.org/D9284)
// // pending, remove this workaround once LLVM generates better code
// // for cttz8.
// unsafe {
// if $BITS == 8 {
// intrinsics::cttz16(self as u16 | 0x100) as u32
// } else {
// $cttz(self as $ActualT) as u32
// }
// }
// }
//
// /// Shifts the bits to the left by a specified amount, `n`,
// /// wrapping the truncated bits to the end of the resulting integer.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// /// let m = 0x3456789ABCDEF012u64;
// ///
// /// assert_eq!(n.rotate_left(12), m);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn rotate_left(self, n: u32) -> Self {
// // Protect against undefined behaviour for over-long bit shifts
// let n = n % $BITS;
// (self << n) | (self >> (($BITS - n) % $BITS))
// }
//
// /// Shifts the bits to the right by a specified amount, `n`,
// /// wrapping the truncated bits to the beginning of the resulting
// /// integer.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// /// let m = 0xDEF0123456789ABCu64;
// ///
// /// assert_eq!(n.rotate_right(12), m);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn rotate_right(self, n: u32) -> Self {
// // Protect against undefined behaviour for over-long bit shifts
// let n = n % $BITS;
// (self >> n) | (self << (($BITS - n) % $BITS))
// }
//
// /// Reverses the byte order of the integer.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// /// let m = 0xEFCDAB8967452301u64;
// ///
// /// assert_eq!(n.swap_bytes(), m);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn swap_bytes(self) -> Self {
// unsafe { $bswap(self as $ActualT) as Self }
// }
//
// /// Converts an integer from big endian to the target's endianness.
// ///
// /// On big endian this is a no-op. On little endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "big") {
// /// assert_eq!(u64::from_be(n), n)
// /// } else {
// /// assert_eq!(u64::from_be(n), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn from_be(x: Self) -> Self {
// if cfg!(target_endian = "big") { x } else { x.swap_bytes() }
// }
//
// /// Converts an integer from little endian to the target's endianness.
// ///
// /// On little endian this is a no-op. On big endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "little") {
// /// assert_eq!(u64::from_le(n), n)
// /// } else {
// /// assert_eq!(u64::from_le(n), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn from_le(x: Self) -> Self {
// if cfg!(target_endian = "little") { x } else { x.swap_bytes() }
// }
//
// /// Converts `self` to big endian from the target's endianness.
// ///
// /// On big endian this is a no-op. On little endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "big") {
// /// assert_eq!(n.to_be(), n)
// /// } else {
// /// assert_eq!(n.to_be(), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn to_be(self) -> Self { // or not to be?
// if cfg!(target_endian = "big") { self } else { self.swap_bytes() }
// }
//
// /// Converts `self` to little endian from the target's endianness.
// ///
// /// On little endian this is a no-op. On big endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "little") {
// /// assert_eq!(n.to_le(), n)
// /// } else {
// /// assert_eq!(n.to_le(), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn to_le(self) -> Self {
// if cfg!(target_endian = "little") { self } else { self.swap_bytes() }
// }
//
// /// Checked integer addition. Computes `self + other`, returning `None`
// /// if overflow occurred.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!(5u16.checked_add(65530), Some(65535));
// /// assert_eq!(6u16.checked_add(65530), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_add(self, other: Self) -> Option<Self> {
// checked_op!($ActualT, $add_with_overflow, self, other)
// }
//
// /// Checked integer subtraction. Computes `self - other`, returning
// /// `None` if underflow occurred.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!((-127i8).checked_sub(1), Some(-128));
// /// assert_eq!((-128i8).checked_sub(1), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_sub(self, other: Self) -> Option<Self> {
// checked_op!($ActualT, $sub_with_overflow, self, other)
// }
//
// /// Checked integer multiplication. Computes `self * other`, returning
// /// `None` if underflow or overflow occurred.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!(5u8.checked_mul(51), Some(255));
// /// assert_eq!(5u8.checked_mul(52), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_mul(self, other: Self) -> Option<Self> {
// checked_op!($ActualT, $mul_with_overflow, self, other)
// }
//
// /// Checked integer division. Computes `self / other`, returning `None`
// /// if `other == 0` or the operation results in underflow or overflow.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!((-127i8).checked_div(-1), Some(127));
// /// assert_eq!((-128i8).checked_div(-1), None);
// /// assert_eq!((1i8).checked_div(0), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_div(self, v: Self) -> Option<Self> {
// match v {
// 0 => None,
// v => Some(self / v),
// }
// }
//
// /// Saturating integer addition. Computes `self + other`, saturating at
// /// the numeric bounds instead of overflowing.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn saturating_add(self, other: Self) -> Self {
// match self.checked_add(other) {
// Some(x) => x,
// None if other >= Self::zero() => Self::max_value(),
// None => Self::min_value(),
// }
// }
//
// /// Saturating integer subtraction. Computes `self - other`, saturating
// /// at the numeric bounds instead of overflowing.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn saturating_sub(self, other: Self) -> Self {
// match self.checked_sub(other) {
// Some(x) => x,
// None if other >= Self::zero() => Self::min_value(),
// None => Self::max_value(),
// }
// }
//
// /// Wrapping (modular) addition. Computes `self + other`,
// /// wrapping around at the boundary of the type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn wrapping_add(self, rhs: Self) -> Self {
// unsafe {
// intrinsics::overflowing_add(self, rhs)
// }
// }
//
// /// Wrapping (modular) subtraction. Computes `self - other`,
// /// wrapping around at the boundary of the type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn wrapping_sub(self, rhs: Self) -> Self {
// unsafe {
// intrinsics::overflowing_sub(self, rhs)
// }
// }
//
// /// Wrapping (modular) multiplication. Computes `self *
// /// other`, wrapping around at the boundary of the type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn wrapping_mul(self, rhs: Self) -> Self {
// unsafe {
// intrinsics::overflowing_mul(self, rhs)
// }
// }
//
// /// Wrapping (modular) division. Computes `floor(self / other)`,
// /// wrapping around at the boundary of the type.
// ///
// /// The only case where such wrapping can occur is when one
// /// divides `MIN / -1` on a signed type (where `MIN` is the
// /// negative minimal value for the type); this is equivalent
// /// to `-MIN`, a positive value that is too large to represent
// /// in the type. In such a case, this function returns `MIN`
// /// itself..
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_div(self, rhs: Self) -> Self {
// self.overflowing_div(rhs).0
// }
//
// /// Wrapping (modular) remainder. Computes `self % other`,
// /// wrapping around at the boundary of the type.
// ///
// /// Such wrap-around never actually occurs mathematically;
// /// implementation artifacts make `x % y` illegal for `MIN /
// /// -1` on a signed type illegal (where `MIN` is the negative
// /// minimal value). In such a case, this function returns `0`.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_rem(self, rhs: Self) -> Self {
// self.overflowing_rem(rhs).0
// }
//
// /// Wrapping (modular) negation. Computes `-self`,
// /// wrapping around at the boundary of the type.
// ///
// /// The only case where such wrapping can occur is when one
// /// negates `MIN` on a signed type (where `MIN` is the
// /// negative minimal value for the type); this is a positive
// /// value that is too large to represent in the type. In such
// /// a case, this function returns `MIN` itself.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_neg(self) -> Self {
// self.overflowing_neg().0
// }
//
// /// Panic-free bitwise shift-left; yields `self << mask(rhs)`,
// /// where `mask` removes any high-order bits of `rhs` that
// /// would cause the shift to exceed the bitwidth of the type.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_shl(self, rhs: u32) -> Self {
// self.overflowing_shl(rhs).0
// }
//
// /// Panic-free bitwise shift-left; yields `self >> mask(rhs)`,
// /// where `mask` removes any high-order bits of `rhs` that
// /// would cause the shift to exceed the bitwidth of the type.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_shr(self, rhs: u32) -> Self {
// self.overflowing_shr(rhs).0
// }
//
// /// Raises self to the power of `exp`, using exponentiation by squaring.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!(2i32.pow(4), 16);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn pow(self, mut exp: u32) -> Self {
// let mut base = self;
// let mut acc = Self::one();
//
// let mut prev_base = self;
// let mut base_oflo = false;
// while exp > 0 {
// if (exp & 1) == 1 {
// if base_oflo {
// // ensure overflow occurs in the same manner it
// // would have otherwise (i.e. signal any exception
// // it would have otherwise).
// acc = acc * (prev_base * prev_base);
// } else {
// acc = acc * base;
// }
// }
// prev_base = base;
// let (new_base, new_base_oflo) = base.overflowing_mul(base);
// base = new_base;
// base_oflo = new_base_oflo;
// exp /= 2;
// }
// acc
// }
//
// /// Returns `true` iff `self == 2^k` for some `k`.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn is_power_of_two(self) -> bool {
// (self.wrapping_sub(Self::one())) & self == Self::zero() &&
// !(self == Self::zero())
// }
//
// /// Returns the smallest power of two greater than or equal to `self`.
// /// Unspecified behavior on overflow.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn next_power_of_two(self) -> Self {
// let bits = size_of::<Self>() * 8;
// let one: Self = Self::one();
// one << ((bits - self.wrapping_sub(one).leading_zeros() as usize) % bits)
// }
//
// /// Returns the smallest power of two greater than or equal to `n`. If
// /// the next power of two is greater than the type's maximum value,
// /// `None` is returned, otherwise the power of two is wrapped in `Some`.
// #[stable(feature = "rust1", since = "1.0.0")]
// pub fn checked_next_power_of_two(self) -> Option<Self> {
// let npot = self.next_power_of_two();
// if npot >= self {
// Some(npot)
// } else {
// None
// }
// }
// }
// }
// impl u64 {
// uint_impl! { u64, 64,
// intrinsics::ctpop64,
// intrinsics::ctlz64,
// intrinsics::cttz64,
// intrinsics::bswap64,
// intrinsics::u64_add_with_overflow,
// intrinsics::u64_sub_with_overflow,
// intrinsics::u64_mul_with_overflow }
// }
macro_rules! wrapping_shr_test {
($value:expr, $rhs:expr, $result:expr) => ({
let x: u64 = $value;
let rhs: u32 = $rhs;
let result: u64 = x.wrapping_shr(rhs);
assert_eq!(result, $result);
})
}
#[test]
fn wrapping_shr_test1() {
wrapping_shr_test!( 0x8000000000000000, 0, 0x8000000000000000 );
wrapping_shr_test!( 0x8000000000000000, 1, 0x4000000000000000 );
wrapping_shr_test!( 0x8000000000000000, 2, 0x2000000000000000 );
wrapping_shr_test!( 0x8000000000000000, 3, 0x1000000000000000 );
wrapping_shr_test!( 0x8000000000000000, 4, 0x0800000000000000 );
wrapping_shr_test!( 0x8000000000000000, 5, 0x0400000000000000 );
wrapping_shr_test!( 0x8000000000000000, 6, 0x0200000000000000 );
wrapping_shr_test!( 0x8000000000000000, 7, 0x0100000000000000 );
wrapping_shr_test!( 0x8000000000000000, 8, 0x0080000000000000 );
wrapping_shr_test!( 0x8000000000000000, 9, 0x0040000000000000 );
wrapping_shr_test!( 0x8000000000000000, 10, 0x0020000000000000 );
wrapping_shr_test!( 0x8000000000000000, 11, 0x0010000000000000 );
wrapping_shr_test!( 0x8000000000000000, 12, 0x0008000000000000 );
wrapping_shr_test!( 0x8000000000000000, 13, 0x0004000000000000 );
wrapping_shr_test!( 0x8000000000000000, 14, 0x0002000000000000 );
wrapping_shr_test!( 0x8000000000000000, 15, 0x0001000000000000 );
wrapping_shr_test!( 0x8000000000000000, 16, 0x0000800000000000 );
wrapping_shr_test!( 0x8000000000000000, 17, 0x0000400000000000 );
wrapping_shr_test!( 0x8000000000000000, 18, 0x0000200000000000 );
wrapping_shr_test!( 0x8000000000000000, 19, 0x0000100000000000 );
wrapping_shr_test!( 0x8000000000000000, 20, 0x0000080000000000 );
wrapping_shr_test!( 0x8000000000000000, 21, 0x0000040000000000 );
wrapping_shr_test!( 0x8000000000000000, 22, 0x0000020000000000 );
wrapping_shr_test!( 0x8000000000000000, 23, 0x0000010000000000 );
wrapping_shr_test!( 0x8000000000000000, 24, 0x0000008000000000 );
wrapping_shr_test!( 0x8000000000000000, 25, 0x0000004000000000 );
wrapping_shr_test!( 0x8000000000000000, 26, 0x0000002000000000 );
wrapping_shr_test!( 0x8000000000000000, 27, 0x0000001000000000 );
wrapping_shr_test!( 0x8000000000000000, 28, 0x0000000800000000 );
wrapping_shr_test!( 0x8000000000000000, 29, 0x0000000400000000 );
wrapping_shr_test!( 0x8000000000000000, 30, 0x0000000200000000 );
wrapping_shr_test!( 0x8000000000000000, 31, 0x0000000100000000 );
wrapping_shr_test!( 0x8000000000000000, 32, 0x0000000080000000 );
wrapping_shr_test!( 0x8000000000000000, 33, 0x0000000040000000 );
wrapping_shr_test!( 0x8000000000000000, 34, 0x0000000020000000 );
wrapping_shr_test!( 0x8000000000000000, 35, 0x0000000010000000 );
wrapping_shr_test!( 0x8000000000000000, 36, 0x0000000008000000 );
wrapping_shr_test!( 0x8000000000000000, 37, 0x0000000004000000 );
wrapping_shr_test!( 0x8000000000000000, 38, 0x0000000002000000 );
wrapping_shr_test!( 0x8000000000000000, 39, 0x0000000001000000 );
wrapping_shr_test!( 0x8000000000000000, 40, 0x0000000000800000 );
wrapping_shr_test!( 0x8000000000000000, 41, 0x0000000000400000 );
wrapping_shr_test!( 0x8000000000000000, 42, 0x0000000000200000 );
wrapping_shr_test!( 0x8000000000000000, 43, 0x0000000000100000 );
wrapping_shr_test!( 0x8000000000000000, 44, 0x0000000000080000 );
wrapping_shr_test!( 0x8000000000000000, 45, 0x0000000000040000 );
wrapping_shr_test!( 0x8000000000000000, 46, 0x0000000000020000 );
wrapping_shr_test!( 0x8000000000000000, 47, 0x0000000000010000 );
wrapping_shr_test!( 0x8000000000000000, 48, 0x0000000000008000 );
wrapping_shr_test!( 0x8000000000000000, 49, 0x0000000000004000 );<|fim▁hole|> wrapping_shr_test!( 0x8000000000000000, 50, 0x0000000000002000 );
wrapping_shr_test!( 0x8000000000000000, 51, 0x0000000000001000 );
wrapping_shr_test!( 0x8000000000000000, 52, 0x0000000000000800 );
wrapping_shr_test!( 0x8000000000000000, 53, 0x0000000000000400 );
wrapping_shr_test!( 0x8000000000000000, 54, 0x0000000000000200 );
wrapping_shr_test!( 0x8000000000000000, 55, 0x0000000000000100 );
wrapping_shr_test!( 0x8000000000000000, 56, 0x0000000000000080 );
wrapping_shr_test!( 0x8000000000000000, 57, 0x0000000000000040 );
wrapping_shr_test!( 0x8000000000000000, 58, 0x0000000000000020 );
wrapping_shr_test!( 0x8000000000000000, 59, 0x0000000000000010 );
wrapping_shr_test!( 0x8000000000000000, 60, 0x0000000000000008 );
wrapping_shr_test!( 0x8000000000000000, 61, 0x0000000000000004 );
wrapping_shr_test!( 0x8000000000000000, 62, 0x0000000000000002 );
wrapping_shr_test!( 0x8000000000000000, 63, 0x0000000000000001 );
wrapping_shr_test!( 0x8000000000000000, 64, 0x8000000000000000 );
}
}<|fim▁end|> | |
<|file_name|>test_play_file.py<|end_file_name|><|fim▁begin|># coding: UTF-8
import unittest
import play_file
class TestAssemblyReader(unittest.TestCase):
def test_version_reader(self):
assembly_reader = play_file.AssemblyReader()
version = assembly_reader.get_assembly_version('AssemblyInfo.cs')
self.assertEqual(version, '7.3.1.0210')
def test_version_writer(self):
new_version = '7.3.1.0228'<|fim▁hole|><|fim▁end|> | assembly_writer = play_file.AssemblyWriter()
version = assembly_writer.update_assembly_version('AssemblyInfo.cs', new_version)
self.assertEqual(version, new_version) |
<|file_name|>me.process.resolver.js<|end_file_name|><|fim▁begin|>const userService = require('../../../services/user.service');<|fim▁hole|><|fim▁end|> |
module.exports = (_, args, ctx) => userService.getById(ctx.user.id); |
<|file_name|>WeatherData.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | //# sourceMappingURL=WeatherData.js.map |
<|file_name|>index.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#coding:utf-8
import cyclone.auth
import cyclone.escape
import cyclone.web<|fim▁hole|>import datetime
import time
import os
from beaker.cache import cache_managers
from toughradius.manage.base import BaseHandler
from toughlib.permit import permit
from toughradius.manage import models
from toughradius.manage.settings import *
from toughradius.common import tools
import psutil
@permit.route(r"/admin")
class HomeHandler(BaseHandler):
@cyclone.web.authenticated
def get(self):
# cpuuse = psutil.cpu_percent(interval=None, percpu=True)
# memuse = psutil.virtual_memory()
# online_count = self.db.query(models.TrOnline.id).count()
# user_total = self.db.query(models.TrAccount.account_number).filter_by(status=1).count()
# self.render("index.html",config=self.settings.config,
# cpuuse=cpuuse,memuse=memuse,online_count=online_count,user_total=user_total)
self.redirect("/admin/dashboard")
@permit.route(r"/")
class HomeHandler(BaseHandler):
@cyclone.web.authenticated
def get(self):
self.redirect("/admin/dashboard")
@permit.route(r"/about")
class HomeHandler(BaseHandler):
@cyclone.web.authenticated
def get(self):
self.render("about.html")
@permit.route(r"/toughcloud/service/register")
class ToughcloudRegisterHandler(BaseHandler):
def get_toughcloud_url(self):
if os.environ.get("TR_DEV"):
return 'http://127.0.0.1:9079/customer/license/request?sid=%s'%tools.get_sys_uuid()
else:
return 'https://www.toughcloud.net/customer/license/request?sid=%s'%tools.get_sys_uuid()
@cyclone.web.authenticated
def get(self):
self.redirect(self.get_toughcloud_url())<|fim▁end|> | |
<|file_name|>test_boids.py<|end_file_name|><|fim▁begin|>import yaml
import os
from ..boids import Boids
from nose.tools import assert_equal
import random
import numpy as np
from unittest.mock import patch
import unittest.mock as mock
def test_Boids():
flock = Boids(boid_number=10,move_to_middle_strength=0.1,alert_distance=100,formation_flying_distance=900,formation_flying_strength=0.5,
x_position_min=0,x_position_max=200,y_position_min=-5,y_position_max=5,
x_velocity_min=-10,x_velocity_max=30,y_velocity_min=-20,y_velocity_max=20)
# make sure the class is initialised correctly:
assert_equal(flock.boid_number,10)
assert_equal(flock.move_to_middle_strength,0.1)
assert_equal(flock.all_the_boids,range(10))
def test_fly_to_middle():
flock = Boids(boid_number=2,move_to_middle_strength=0.1,alert_distance=100,formation_flying_distance=900,formation_flying_strength=0.5,
x_position_min=0,x_position_max=200,y_position_min=-5,y_position_max=5,
x_velocity_min=-10,x_velocity_max=30,y_velocity_min=-20,y_velocity_max=20)
# make sure self.all_the_boids corresponds to the right thing, i.e. range(self.boid_number)
np.testing.assert_array_equal(range(2),flock.all_the_boids)
assert_equal(flock.move_to_middle_strength,0.1)
# make sure arrays are updated to what we expect them to - #1
flock.x_velocities = [1, 2]
flock.x_positions = [2, 1]
flock.fly_to_middle()<|fim▁hole|> flock.x_velocities = [5, 2]
flock.x_positions = [2, 46]
flock.fly_to_middle()
np.testing.assert_array_almost_equal(flock.x_velocities,[7.2, -0.2])
def test_fly_away():
flock = Boids(boid_number=2,move_to_middle_strength=0.1,alert_distance=100,formation_flying_distance=900,formation_flying_strength=0.5,
x_position_min=0,x_position_max=200,y_position_min=-5,y_position_max=5,
x_velocity_min=-10,x_velocity_max=30,y_velocity_min=-20,y_velocity_max=20)
# make sure self.all_the_boids corresponds to the right thing, i.e. range(self.boid_number)
np.testing.assert_array_equal(range(2),flock.all_the_boids)
assert_equal(flock.alert_distance,100)
# make sure arrays are updated to what we expect them to - #1
flock.x_velocities = [1, 2]
flock.x_positions = [2, 1]
flock.fly_away()
np.testing.assert_array_almost_equal(flock.x_velocities,[2, 1])
# make sure arrays are updated to what we expect them to - #2
flock.x_velocities = [5, 2]
flock.x_positions = [2, 46]
flock.fly_away()
np.testing.assert_array_almost_equal(flock.x_velocities,[5, 2])
def test_match_speed():
flock = Boids(boid_number=2,move_to_middle_strength=0.1,alert_distance=100,formation_flying_distance=900,formation_flying_strength=0.5,
x_position_min=0,x_position_max=200,y_position_min=-5,y_position_max=5,
x_velocity_min=-10,x_velocity_max=30,y_velocity_min=-20,y_velocity_max=20)
# make sure self.all_the_boids corresponds to the right thing, i.e. range(self.boid_number)
np.testing.assert_array_equal(range(2),flock.all_the_boids)
assert_equal(flock.formation_flying_distance,900)
assert_equal(flock.formation_flying_strength,0.5)
# make sure arrays are updated to what we expect them to - #1
flock.y_velocities = [1, 2]
flock.match_speed()
np.testing.assert_array_almost_equal(flock.y_velocities,[1., 2.] )
# make sure arrays are updated to what we expect them to - #2
flock.y_velocities = [14, 15]
flock.match_speed()
np.testing.assert_array_almost_equal(flock.y_velocities,[14., 15.])
def test_update_boids():
flock = Boids(boid_number=2,move_to_middle_strength=0.1,alert_distance=100,formation_flying_distance=900,formation_flying_strength=0.5,
x_position_min=0,x_position_max=200,y_position_min=-5,y_position_max=5,
x_velocity_min=-10,x_velocity_max=30,y_velocity_min=-20,y_velocity_max=20)
# test that update_boids() is called all right
with mock.patch.object(flock,'update_boids') as mock_update:
updated = flock.update_boids('')
mock_update.assert_called_with('')
# test that fly_to_middle() works
with mock.patch.object(flock,'fly_to_middle') as mock_middle:
flown_to_middle = flock.fly_to_middle('')
mock_middle.assert_called_with('')
# test that fly_away() works
with mock.patch.object(flock,'fly_away') as mock_away:
flown_away = flock.fly_away('')
mock_away.assert_called_with('')
# test that match_speed() works
with mock.patch.object(flock,'match_speed') as mock_match:
matched = flock.match_speed('')
mock_match.assert_called_with('')
# test that move() works
with mock.patch.object(flock,'move') as mock_move:
moved = flock.move('')
mock_move.assert_called_with('')
def test_animate():
flock = Boids(boid_number=2,move_to_middle_strength=0.1,alert_distance=100,formation_flying_distance=900,formation_flying_strength=0.5,
x_position_min=0,x_position_max=200,y_position_min=-5,y_position_max=5,
x_velocity_min=-10,x_velocity_max=30,y_velocity_min=-20,y_velocity_max=20)
# test that animate() is called correctly
with mock.patch.object(flock,'animate') as mock_animate:
animated = flock.animate('frame')
mock_animate.assert_called_with('frame')<|fim▁end|> | np.testing.assert_array_almost_equal(flock.x_velocities,[0.95, 2.05])
# make sure arrays are updated to what we expect them to - #2 |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*-
"""
The ``config`` module
=====================
Create a named logger and return it so users can log in different log files : one for each module.
"""
__author__ = 'Salas'
__copyright__ = 'Copyright 2014 LTL'
__credits__ = ['Salas']
__license__ = 'MIT'
__version__ = '0.2.0'
__maintainer__ = 'Salas'
__email__ = '[email protected]'
__status__ = 'Pre-Alpha'
import os
import os.path as path
import errno
import sys
import yaml<|fim▁hole|>
import utils.log
logger = utils.log.get_logger('config')
def get_config(config_file_path=None, config_prefix='bot'):
"""
Return the config from a yaml file as a dictionary. Create one file if not existing, using example file as template.
:param config_file_path:
:return:
"""
if config_file_path is None:
config_dir_path = path.abspath(path.join(os.sep, path.dirname(__file__), path.pardir, 'config'))
config_file_path = path.join(config_dir_path, '{}.config.yaml'.format(config_prefix))
config_example_path = path.join(config_dir_path, '{}.example.yaml'.format(config_prefix))
try:
with open(config_file_path, 'rb') as config_stream:
config_dict = yaml.load(config_stream, Loader=Loader)
except IOError:
logger.info('')
try:
os.makedirs(config_dir_path)
except OSError as exc:
if exc.errno == errno.EEXIST and path.isdir(config_dir_path):
pass
else:
raise
with open(config_file_path, 'a'):
os.utime(config_file_path, None)
try:
with open(config_example_path, 'rb') as config_example_stream:
config_dict_example = yaml.load(config_example_stream, Loader=Loader)
# TODO : console based example file modification
with open(config_file_path, 'wb') as config_stream:
yaml.dump(config_dict_example, config_stream, Dumper=Dumper, encoding='utf-8')
except IOError:
logger.critical("No example file. Exiting.")
sys.exit(0)
try:
with open(config_file_path, 'rb') as config_stream:
config_dict = yaml.load(config_stream, Loader=Loader)
except IOError:
sys.exit(0)
else:
with open(config_file_path, 'rb') as config_stream:
config_dict = yaml.load(config_stream, Loader=Loader)
return config_dict<|fim▁end|> | try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper |
<|file_name|>exe.rs<|end_file_name|><|fim▁begin|>use collections::Vec;
use context;
use syscall::error::{Error, ESRCH, Result};
pub fn resource() -> Result<Vec<u8>> {
let mut name = {
let contexts = context::contexts();
let context_lock = contexts.current().ok_or(Error::new(ESRCH))?;
let context = context_lock.read();
let name = context.name.lock();
name.clone()<|fim▁hole|> Ok(name)
}<|fim▁end|> | };
name.push(b'\n'); |
<|file_name|>normal.py<|end_file_name|><|fim▁begin|>"""This example samples from a simple bivariate normal distribution."""
import jass.mcmc as mcmc
import jass.samplers as samplers
import numpy as np
import scipy.stats as stats<|fim▁hole|>normal_rv = stats.multivariate_normal(cov=np.identity(2))
# Initialise the chain at the mean
initial = [0.0, 0.0]
sampler = samplers.ComponentWiseSlice()
samples = mcmc.run(sampler, normal_rv.logpdf, initial, 5000)
# Plot the the samples
triangle.corner(samples)
pl.show()<|fim▁end|> | import triangle
import matplotlib.pyplot as pl
# Define the log-likelihood function to be a bivariate normal |
<|file_name|>DistributedCogdoElevatorInt.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.cogdominium.DistributedCogdoElevatorInt
from toontown.building.DistributedElevatorInt import DistributedElevatorInt
class DistributedCogdoElevatorInt(DistributedElevatorInt):
def _getDoorsClosedInfo(self):
return ('cogdoInterior', 'cogdoInterior')<|fim▁end|> | |
<|file_name|>TestSanity.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010-2017 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.testing.sanity;
import static com.evolveum.midpoint.prism.util.PrismAsserts.assertEqualsPolyString;
import static com.evolveum.midpoint.prism.util.PrismAsserts.assertParentConsistency;
import static com.evolveum.midpoint.test.IntegrationTestTools.assertAttributeNotNull;
import static com.evolveum.midpoint.test.IntegrationTestTools.assertNoRepoCache;
import static com.evolveum.midpoint.test.IntegrationTestTools.assertNotEmpty;
import static com.evolveum.midpoint.test.IntegrationTestTools.displayJaxb;
import static com.evolveum.midpoint.test.IntegrationTestTools.getAttributeValues;
import static com.evolveum.midpoint.test.IntegrationTestTools.waitFor;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import javax.xml.bind.JAXBException;
import javax.xml.namespace.QName;
import javax.xml.ws.Holder;
import com.evolveum.midpoint.common.refinery.RefinedResourceSchemaImpl;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.delta.*;
import com.evolveum.midpoint.prism.path.ItemName;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.xnode.MapXNode;
import com.evolveum.midpoint.prism.xnode.XNode;
import com.evolveum.midpoint.util.exception.*;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.Validate;
import org.opends.server.core.ModifyOperation;
import org.opends.server.protocols.internal.InternalSearchOperation;
import org.opends.server.types.*;
import org.opends.server.types.ModificationType;
import org.opends.server.util.ChangeRecordEntry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.testng.AssertJUnit;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.evolveum.midpoint.common.refinery.RefinedAttributeDefinition;
import com.evolveum.midpoint.common.refinery.RefinedObjectClassDefinition;
import com.evolveum.midpoint.common.refinery.RefinedResourceSchema;
import com.evolveum.midpoint.model.test.AbstractModelIntegrationTest;
import com.evolveum.midpoint.prism.crypto.EncryptionException;
import com.evolveum.midpoint.prism.match.MatchingRule;
import com.evolveum.midpoint.prism.match.MatchingRuleRegistry;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.schema.SchemaRegistry;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.CapabilityUtil;
import com.evolveum.midpoint.schema.DeltaConvertor;
import com.evolveum.midpoint.schema.ResultHandler;
import com.evolveum.midpoint.schema.constants.ObjectTypes;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.processor.ObjectClassComplexTypeDefinition;
import com.evolveum.midpoint.schema.processor.ResourceAttributeDefinition;
import com.evolveum.midpoint.schema.processor.ResourceSchema;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.result.OperationResultStatus;
import com.evolveum.midpoint.schema.util.ObjectQueryUtil;
import com.evolveum.midpoint.schema.util.ObjectTypeUtil;
import com.evolveum.midpoint.schema.util.ResourceTypeUtil;
import com.evolveum.midpoint.schema.util.SchemaDebugUtil;
import com.evolveum.midpoint.schema.util.SchemaTestConstants;
import com.evolveum.midpoint.schema.util.ShadowUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.task.api.TaskExecutionStatus;
import com.evolveum.midpoint.test.Checker;
import com.evolveum.midpoint.test.IntegrationTestTools;
import com.evolveum.midpoint.test.ObjectChecker;
import com.evolveum.midpoint.test.ldap.OpenDJController;
import com.evolveum.midpoint.test.util.DerbyController;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.DebugUtil;
import com.evolveum.midpoint.util.JAXBUtil;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectDeltaListType;
import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectDeltaOperationListType;
import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectListType;
import com.evolveum.midpoint.xml.ns._public.common.api_types_3.PropertyReferenceListType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentPolicyEnforcementType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.CapabilityCollectionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.GenericObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ModelExecuteOptionsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectDeltaOperationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultStatusType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ProjectionPolicyType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceObjectShadowChangeDescriptionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceObjectTypeDefinitionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SchemaHandlingType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SelectorQualifiedGetOptionsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.TaskType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import com.evolveum.midpoint.xml.ns._public.common.fault_3.FaultMessage;
import com.evolveum.midpoint.xml.ns._public.common.fault_3.FaultType;
import com.evolveum.midpoint.xml.ns._public.common.fault_3.ObjectAlreadyExistsFaultType;
import com.evolveum.midpoint.xml.ns._public.resource.capabilities_3.ActivationCapabilityType;
import com.evolveum.midpoint.xml.ns._public.resource.capabilities_3.CredentialsCapabilityType;
import com.evolveum.prism.xml.ns._public.query_3.QueryType;
import com.evolveum.prism.xml.ns._public.types_3.ChangeTypeType;
import com.evolveum.prism.xml.ns._public.types_3.EncryptedDataType;
import com.evolveum.prism.xml.ns._public.types_3.ItemDeltaType;
import com.evolveum.prism.xml.ns._public.types_3.ItemPathType;
import com.evolveum.prism.xml.ns._public.types_3.ModificationTypeType;
import com.evolveum.prism.xml.ns._public.types_3.ObjectDeltaType;
import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType;
import com.evolveum.prism.xml.ns._public.types_3.RawType;
/**
* Sanity test suite.
* <p/>
* It tests the very basic representative test cases. It does not try to be
* complete. It rather should be quick to execute and pass through the most
* representative cases. It should test all the system components except for
* GUI. Therefore the test cases are selected to pass through most of the
* components.
* <p/>
* It is using embedded H2 repository and embedded OpenDJ instance as a testing
* resource. The repository is instantiated from the Spring context in the
* same way as all other components. OpenDJ instance is started explicitly using
* BeforeClass method. Appropriate resource definition to reach the OpenDJ
* instance is provided in the test data and is inserted in the repository as
* part of test initialization.
*
* @author Radovan Semancik
*/
@ContextConfiguration(locations = {"classpath:ctx-sanity-test-main.xml"})
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class TestSanity extends AbstractModelIntegrationTest {
private static final String REPO_DIR_NAME = "src/test/resources/repo/";
private static final String REQUEST_DIR_NAME = "src/test/resources/request/";
private static final File REQUEST_DIR = new File(REQUEST_DIR_NAME);
private static final String SYSTEM_CONFIGURATION_FILENAME = REPO_DIR_NAME + "system-configuration.xml";
private static final String SYSTEM_CONFIGURATION_OID = "00000000-0000-0000-0000-000000000001";
private static final String ROLE_SUPERUSER_FILENAME = REPO_DIR_NAME + "role-superuser.xml";
private static final String ROLE_SUPERUSER_OID = "00000000-0000-0000-0000-000000000004";
private static final String RESOURCE_OPENDJ_FILENAME = REPO_DIR_NAME + "resource-opendj.xml";
private static final String RESOURCE_OPENDJ_OID = "ef2bc95b-76e0-59e2-86d6-3d4f02d3ffff";
private static final String RESOURCE_OPENDJ_NS = "http://midpoint.evolveum.com/xml/ns/public/resource/instance/ef2bc95b-76e0-59e2-86d6-3d4f02d3ffff";
protected static final QName RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS = new QName(RESOURCE_OPENDJ_NS,"inetOrgPerson");
private static final String RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME = "entryUUID";
private static final String RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME = "dn";
private static final String RESOURCE_DERBY_FILENAME = REPO_DIR_NAME + "resource-derby.xml";
private static final String RESOURCE_DERBY_OID = "ef2bc95b-76e0-59e2-86d6-999902d3abab";
private static final String RESOURCE_BROKEN_FILENAME = REPO_DIR_NAME + "resource-broken.xml";
private static final String RESOURCE_BROKEN_OID = "ef2bc95b-76e0-59e2-ffff-ffffffffffff";
private static final String RESOURCE_DUMMY_FILENAME = REPO_DIR_NAME + "resource-dummy.xml";
private static final String RESOURCE_DUMMY_OID = "10000000-0000-0000-0000-000000000004";
private static final String CONNECTOR_LDAP_NAMESPACE = "http://midpoint.evolveum.com/xml/ns/public/connector/icf-1/bundle/com.evolveum.polygon.connector-ldap/com.evolveum.polygon.connector.ldap.LdapConnector";
private static final String CONNECTOR_DBTABLE_NAMESPACE = "http://midpoint.evolveum.com/xml/ns/public/connector/icf-1/bundle/com.evolveum.polygon.connector-databasetable/org.identityconnectors.databasetable.DatabaseTableConnector";
private static final String CONNECTOR_BROKEN_FILENAME = REPO_DIR_NAME + "connector-broken.xml";
private static final String CONNECTOR_BROKEN_OID = "cccccccc-76e0-59e2-ffff-ffffffffffff";
private static final String TASK_OPENDJ_SYNC_FILENAME = REPO_DIR_NAME + "task-opendj-sync.xml";
private static final String TASK_OPENDJ_SYNC_OID = "91919191-76e0-59e2-86d6-3d4f02d3ffff";
private static final String TASK_USER_RECOMPUTE_FILENAME = REPO_DIR_NAME + "task-user-recompute.xml";
private static final String TASK_USER_RECOMPUTE_OID = "91919191-76e0-59e2-86d6-3d4f02d3aaaa";
private static final String TASK_OPENDJ_RECON_FILENAME = REPO_DIR_NAME + "task-opendj-reconciliation.xml";
private static final String TASK_OPENDJ_RECON_OID = "91919191-76e0-59e2-86d6-3d4f02d30000";
private static final String SAMPLE_CONFIGURATION_OBJECT_FILENAME = REPO_DIR_NAME + "sample-configuration-object.xml";
private static final String SAMPLE_CONFIGURATION_OBJECT_OID = "c0c010c0-d34d-b33f-f00d-999111111111";
private static final String USER_TEMPLATE_FILENAME = REPO_DIR_NAME + "user-template.xml";
private static final String USER_TEMPLATE_OID = "c0c010c0-d34d-b33f-f00d-777111111111";
private static final String USER_ADMINISTRATOR_FILENAME = REPO_DIR_NAME + "user-administrator.xml";
private static final String USER_ADMINISTRATOR_NAME = "administrator";
private static final String USER_ADMINISTRATOR_OID = "00000000-0000-0000-0000-000000000002";
private static final String USER_JACK_FILENAME = REPO_DIR_NAME + "user-jack.xml";
private static final File USER_JACK_FILE = new File(USER_JACK_FILENAME);
private static final String USER_JACK_OID = "c0c010c0-d34d-b33f-f00d-111111111111";
private static final String USER_JACK_LDAP_UID = "jack";
private static final String USER_JACK_LDAP_DN = "uid=" + USER_JACK_LDAP_UID
+ "," + OPENDJ_PEOPLE_SUFFIX;
private static final String USER_GUYBRUSH_FILENAME = REPO_DIR_NAME + "user-guybrush.xml";
private static final File USER_GUYBRUSH_FILE = new File(USER_GUYBRUSH_FILENAME);
private static final String USER_GUYBRUSH_OID = "c0c010c0-d34d-b33f-f00d-111111111222";
private static final String USER_GUYBRUSH_USERNAME = "guybrush";
private static final String USER_GUYBRUSH_LDAP_UID = "guybrush";
private static final String USER_GUYBRUSH_LDAP_DN = "uid=" + USER_GUYBRUSH_LDAP_UID
+ "," + OPENDJ_PEOPLE_SUFFIX;
private static final String USER_E_LINK_ACTION_FILENAME = REPO_DIR_NAME + "user-e.xml";
private static final File USER_E_LINK_ACTION_FILE = new File(USER_E_LINK_ACTION_FILENAME);
private static final String LDIF_E_FILENAME_LINK = "src/test/resources/request/e-create.ldif";
private static final String ROLE_PIRATE_FILENAME = REPO_DIR_NAME + "role-pirate.xml";
private static final String ROLE_PIRATE_OID = "12345678-d34d-b33f-f00d-987987987988";
private static final String ROLE_SAILOR_FILENAME = REPO_DIR_NAME + "role-sailor.xml";
private static final String ROLE_SAILOR_OID = "12345678-d34d-b33f-f00d-987955553535";
private static final String ROLE_CAPTAIN_FILENAME = REPO_DIR_NAME + "role-captain.xml";
private static final String ROLE_CAPTAIN_OID = "12345678-d34d-b33f-f00d-987987cccccc";
private static final String ROLE_JUDGE_FILENAME = REPO_DIR_NAME + "role-judge.xml";
private static final String ROLE_JUDGE_OID = "12345111-1111-2222-1111-121212111111";
private static final String REQUEST_USER_MODIFY_ADD_ACCOUNT_OPENDJ_FILENAME = REQUEST_DIR_NAME + "user-modify-add-account.xml";
private static final String REQUEST_USER_MODIFY_ADD_ACCOUNT_DERBY_FILENAME = REQUEST_DIR_NAME + "user-modify-add-account-derby.xml";
private static final String USER_JACK_DERBY_LOGIN = "jsparrow";
private static final String REQUEST_USER_MODIFY_FULLNAME_LOCALITY_FILENAME = REQUEST_DIR_NAME + "user-modify-fullname-locality.xml";
private static final String REQUEST_USER_MODIFY_GIVENNAME_FILENAME = REQUEST_DIR_NAME + "user-modify-givenname.xml";
private static final String REQUEST_USER_MODIFY_PASSWORD_FILENAME = REQUEST_DIR_NAME + "user-modify-password.xml";
private static final String REQUEST_USER_MODIFY_ACTIVATION_DISABLE_FILENAME = REQUEST_DIR_NAME + "user-modify-activation-disable.xml";
private static final String REQUEST_USER_MODIFY_ACTIVATION_ENABLE_FILENAME = REQUEST_DIR_NAME + "user-modify-activation-enable.xml";
private static final String REQUEST_USER_MODIFY_NAME_FILENAME = REQUEST_DIR_NAME + "user-modify-name.xml";
private static final String REQUEST_USER_MODIFY_ADD_ROLE_PIRATE_FILENAME = REQUEST_DIR_NAME + "user-modify-add-role-pirate.xml";
private static final String REQUEST_USER_MODIFY_ADD_ROLE_CAPTAIN_1_FILENAME = REQUEST_DIR_NAME + "user-modify-add-role-captain-1.xml";
private static final String REQUEST_USER_MODIFY_ADD_ROLE_CAPTAIN_2_FILENAME = REQUEST_DIR_NAME + "user-modify-add-role-captain-2.xml";
private static final String REQUEST_USER_MODIFY_ADD_ROLE_JUDGE_FILENAME = REQUEST_DIR_NAME + "user-modify-add-role-judge.xml";
private static final String REQUEST_USER_MODIFY_DELETE_ROLE_PIRATE_FILENAME = REQUEST_DIR_NAME + "user-modify-delete-role-pirate.xml";
private static final String REQUEST_USER_MODIFY_DELETE_ROLE_CAPTAIN_1_FILENAME = REQUEST_DIR_NAME + "user-modify-delete-role-captain-1.xml";
private static final String REQUEST_USER_MODIFY_DELETE_ROLE_CAPTAIN_2_FILENAME = REQUEST_DIR_NAME + "user-modify-delete-role-captain-2.xml";
private static final File REQUEST_ACCOUNT_MODIFY_ATTRS_FILE = new File(REQUEST_DIR, "account-modify-attrs.xml");
private static final File REQUEST_ACCOUNT_MODIFY_ROOM_NUMBER_FILE = new File(REQUEST_DIR, "account-modify-roomnumber.xml");
private static final File REQUEST_ACCOUNT_MODIFY_ROOM_NUMBER_EXPLICIT_TYPE_FILE = new File(REQUEST_DIR, "account-modify-roomnumber-explicit-type.xml");
private static final File REQUEST_ACCOUNT_MODIFY_BAD_PATH_FILE = new File(REQUEST_DIR, "account-modify-bad-path.xml");
private static final String LDIF_WILL_FILENAME = REQUEST_DIR_NAME + "will.ldif";
private static final File LDIF_WILL_MODIFY_FILE = new File (REQUEST_DIR_NAME, "will-modify.ldif");
private static final String LDIF_WILL_WITHOUT_LOCATION_FILENAME = REQUEST_DIR_NAME + "will-without-location.ldif";
private static final String WILL_NAME = "wturner";
private static final String LDIF_ANGELIKA_FILENAME = REQUEST_DIR_NAME + "angelika.ldif";
private static final String ANGELIKA_NAME = "angelika";
private static final String ACCOUNT_ANGELIKA_FILENAME = REQUEST_DIR_NAME + "account-angelika.xml";
private static final String LDIF_ELAINE_FILENAME = REQUEST_DIR_NAME + "elaine.ldif";
private static final String ELAINE_NAME = "elaine";
private static final File LDIF_GIBBS_MODIFY_FILE = new File (REQUEST_DIR_NAME, "gibbs-modify.ldif");
private static final String LDIF_HERMAN_FILENAME = REQUEST_DIR_NAME + "herman.ldif";
private static final Trace LOGGER = TraceManager.getTrace(TestSanity.class);
private static final String NS_MY = "http://whatever.com/my";
private static final ItemName MY_SHIP_STATE = new ItemName(NS_MY, "shipState");
private static final ItemName MY_DEAD = new ItemName(NS_MY, "dead");
private static final long WAIT_FOR_LOOP_SLEEP_MILIS = 1000;
/**
* Unmarshalled resource definition to reach the embedded OpenDJ instance.
* Used for convenience - the tests method may find it handy.
*/
private static ResourceType resourceTypeOpenDjrepo;
private static ResourceType resourceDerby;
private static String accountShadowOidOpendj;
private static String accountShadowOidDerby;
private static String accountShadowOidGuybrushOpendj;
private static String accountGuybrushOpendjEntryUuuid = null;
private static String originalJacksLdapPassword;
private static String lastJacksLdapPassword = null;
private int lastSyncToken;
@Autowired(required = true)
private MatchingRuleRegistry matchingRuleRegistry;
// This will get called from the superclass to init the repository
// It will be called only once
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
LOGGER.trace("initSystem");
try{
super.initSystem(initTask, initResult);
repoAddObjectFromFile(ROLE_SUPERUSER_FILENAME, initResult);
repoAddObjectFromFile(USER_ADMINISTRATOR_FILENAME, initResult);
<|fim▁hole|>
login(USER_ADMINISTRATOR_NAME);
// We need to add config after calling postInit() so it will not be applied.
// we want original logging configuration from the test logback config file, not
// the one from the system config.
repoAddObjectFromFile(SYSTEM_CONFIGURATION_FILENAME, initResult);
// Add broken connector before importing resources
repoAddObjectFromFile(CONNECTOR_BROKEN_FILENAME, initResult);
// Need to import instead of add, so the (dynamic) connector reference
// will be resolved
// correctly
importObjectFromFile(RESOURCE_OPENDJ_FILENAME, initResult);
importObjectFromFile(RESOURCE_BROKEN_FILENAME, initResult);
repoAddObjectFromFile(SAMPLE_CONFIGURATION_OBJECT_FILENAME, initResult);
repoAddObjectFromFile(USER_TEMPLATE_FILENAME, initResult);
repoAddObjectFromFile(ROLE_SAILOR_FILENAME, initResult);
repoAddObjectFromFile(ROLE_PIRATE_FILENAME, initResult);
repoAddObjectFromFile(ROLE_CAPTAIN_FILENAME, initResult);
repoAddObjectFromFile(ROLE_JUDGE_FILENAME, initResult);
} catch (Exception ex){
LOGGER.error("erro: {}", ex);
throw ex;
}
}
/**
* Initialize embedded OpenDJ instance Note: this is not in the abstract
* superclass so individual tests may avoid starting OpenDJ.
*/
@Override
public void startResources() throws Exception {
openDJController.startCleanServer();
derbyController.startCleanServer();
}
/**
* Shutdown embedded OpenDJ instance Note: this is not in the abstract
* superclass so individual tests may avoid starting OpenDJ.
*/
@AfterClass
public static void stopResources() throws Exception {
openDJController.stop();
derbyController.stop();
}
/**
* Test integrity of the test setup.
*
* @throws SchemaException
* @throws ObjectNotFoundException
* @throws CommunicationException
*/
@Test
public void test000Integrity() throws Exception {
final String TEST_NAME = "test000Integrity";
TestUtil.displayTestTitle(this, TEST_NAME);
assertNotNull(modelWeb);
assertNotNull(modelService);
assertNotNull(repositoryService);
assertTrue(isSystemInitialized());
assertNotNull(taskManager);
assertNotNull(prismContext);
SchemaRegistry schemaRegistry = prismContext.getSchemaRegistry();
assertNotNull(schemaRegistry);
// This is defined in extra schema. So this effectively checks whether the extra schema was loaded
PrismPropertyDefinition shipStateDefinition = schemaRegistry.findPropertyDefinitionByElementName(MY_SHIP_STATE);
assertNotNull("No my:shipState definition", shipStateDefinition);
assertEquals("Wrong maxOccurs in my:shipState definition", 1, shipStateDefinition.getMaxOccurs());
assertNoRepoCache();
Task task = taskManager.createTaskInstance(TestSanity.class.getName() + ".test000Integrity");
OperationResult result = task.getResult();
// Check if OpenDJ resource was imported correctly
PrismObject<ResourceType> openDjResource = repositoryService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, result);
display("Imported OpenDJ resource (repository)", openDjResource);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, openDjResource.getOid());
assertNoRepoCache();
String ldapConnectorOid = openDjResource.asObjectable().getConnectorRef().getOid();
PrismObject<ConnectorType> ldapConnector = repositoryService.getObject(ConnectorType.class, ldapConnectorOid, null, result);
display("LDAP Connector: ", ldapConnector);
// TODO: test if OpenDJ and Derby are running
repositoryService.getObject(GenericObjectType.class, SAMPLE_CONFIGURATION_OBJECT_OID, null, result);
}
/**
* Repeat self-test when we have all the dependencies on the classpath.
*/
@Test
public void test001SelfTests() throws Exception {
final String TEST_NAME = "test001SelfTests";
displayTestTitle(TEST_NAME);
// GIVEN
Task task = taskManager.createTaskInstance(TestSanity.class.getName()+"."+TEST_NAME);
// WHEN
OperationResult repositorySelfTestResult = modelDiagnosticService.repositorySelfTest(task);
// THEN
assertSuccess("Repository self test", repositorySelfTestResult);
// WHEN
OperationResult provisioningSelfTestResult = modelDiagnosticService.provisioningSelfTest(task);
// THEN
display("Repository self test result", provisioningSelfTestResult);
// There may be warning about illegal key size on some platforms. As far as it is warning and not error we are OK
// the system will fall back to a interoperable key size
if (provisioningSelfTestResult.getStatus() != OperationResultStatus.SUCCESS && provisioningSelfTestResult.getStatus() != OperationResultStatus.WARNING) {
AssertJUnit.fail("Provisioning self-test failed: "+provisioningSelfTestResult);
}
}
/**
* Test the testResource method. Expect a complete success for now.
*/
@Test
public void test001TestConnectionOpenDJ() throws Exception {
final String TEST_NAME = "test001TestConnectionOpenDJ";
displayTestTitle(TEST_NAME);
// GIVEN
try{
assertNoRepoCache();
// WHEN
OperationResultType result = modelWeb.testResource(RESOURCE_OPENDJ_OID);
// THEN
assertNoRepoCache();
displayJaxb("testResource result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("testResource has failed", result);
OperationResult opResult = new OperationResult(TestSanity.class.getName() + ".test001TestConnectionOpenDJ");
PrismObject<ResourceType> resourceOpenDjRepo = repositoryService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, opResult);
resourceTypeOpenDjrepo = resourceOpenDjRepo.asObjectable();
assertNoRepoCache();
assertEquals(RESOURCE_OPENDJ_OID, resourceTypeOpenDjrepo.getOid());
display("Initialized OpenDJ resource (respository)", resourceTypeOpenDjrepo);
assertNotNull("Resource schema was not generated", resourceTypeOpenDjrepo.getSchema());
Element resourceOpenDjXsdSchemaElement = ResourceTypeUtil.getResourceXsdSchema(resourceTypeOpenDjrepo);
assertNotNull("Resource schema was not generated", resourceOpenDjXsdSchemaElement);
PrismObject<ResourceType> openDjResourceProvisioninig = provisioningService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID,
null, null, opResult);
display("Initialized OpenDJ resource resource (provisioning)", openDjResourceProvisioninig);
PrismObject<ResourceType> openDjResourceModel = provisioningService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, null, opResult);
display("Initialized OpenDJ resource OpenDJ resource (model)", openDjResourceModel);
checkOpenDjResource(resourceTypeOpenDjrepo, "repository");
System.out.println("------------------------------------------------------------------");
display("OpenDJ resource schema (repo XML)", DOMUtil.serializeDOMToString(ResourceTypeUtil.getResourceXsdSchema(resourceOpenDjRepo)));
System.out.println("------------------------------------------------------------------");
checkOpenDjResource(openDjResourceProvisioninig.asObjectable(), "provisioning");
checkOpenDjResource(openDjResourceModel.asObjectable(), "model");
// TODO: model web
} catch (Exception ex){
LOGGER.info("exception: " + ex);
throw ex;
}
}
private void checkRepoOpenDjResource() throws ObjectNotFoundException, SchemaException {
OperationResult result = new OperationResult(TestSanity.class.getName()+".checkRepoOpenDjResource");
PrismObject<ResourceType> resource = repositoryService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, result);
checkOpenDjResource(resource.asObjectable(), "repository");
}
/**
* Checks if the resource is internally consistent, if it has everything it should have.
*
* @throws SchemaException
*/
private void checkOpenDjResource(ResourceType resource, String source) throws SchemaException {
assertNotNull("Resource from " + source + " is null", resource);
ObjectReferenceType connectorRefType = resource.getConnectorRef();
assertNotNull("Resource from " + source + " has null connectorRef", connectorRefType);
assertFalse("Resource from " + source + " has no OID in connectorRef", StringUtils.isBlank(connectorRefType.getOid()));
assertNotNull("Resource from " + source + " has null description in connectorRef", connectorRefType.getDescription());
assertNotNull("Resource from " + source + " has null filter in connectorRef", connectorRefType.getFilter());
assertNotNull("Resource from " + source + " has null filter element in connectorRef", connectorRefType.getFilter().getFilterClauseXNode());
assertNotNull("Resource from " + source + " has null configuration", resource.getConnectorConfiguration());
assertNotNull("Resource from " + source + " has null schema", resource.getSchema());
checkOpenDjSchema(resource, source);
assertNotNull("Resource from " + source + " has null schemahandling", resource.getSchemaHandling());
checkOpenDjSchemaHandling(resource, source);
if (!source.equals("repository")) {
// This is generated on the fly in provisioning
assertNotNull("Resource from " + source + " has null nativeCapabilities", resource.getCapabilities().getNative());
assertFalse("Resource from " + source + " has empty nativeCapabilities",
resource.getCapabilities().getNative().getAny().isEmpty());
}
assertNotNull("Resource from " + source + " has null configured capabilities", resource.getCapabilities().getConfigured());
assertFalse("Resource from " + source + " has empty capabilities", resource.getCapabilities().getConfigured().getAny().isEmpty());
assertNotNull("Resource from " + source + " has null synchronization", resource.getSynchronization());
checkOpenDjConfiguration(resource.asPrismObject(), source);
}
private void checkOpenDjSchema(ResourceType resource, String source) throws SchemaException {
ResourceSchema schema = RefinedResourceSchemaImpl.getResourceSchema(resource, prismContext);
ObjectClassComplexTypeDefinition accountDefinition = schema.findObjectClassDefinition(RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS);
assertNotNull("Schema does not define any account (resource from " + source + ")", accountDefinition);
Collection<? extends ResourceAttributeDefinition> identifiers = accountDefinition.getPrimaryIdentifiers();
assertFalse("No account identifiers (resource from " + source + ")", identifiers == null || identifiers.isEmpty());
// TODO: check for naming attributes and display names, etc
ActivationCapabilityType capActivation = ResourceTypeUtil.getEffectiveCapability(resource, ActivationCapabilityType.class);
if (capActivation != null && capActivation.getStatus() != null && capActivation.getStatus().getAttribute() != null) {
// There is simulated activation capability, check if the attribute is in schema.
QName enableAttrName = capActivation.getStatus().getAttribute();
ResourceAttributeDefinition enableAttrDef = accountDefinition.findAttributeDefinition(enableAttrName);
display("Simulated activation attribute definition", enableAttrDef);
assertNotNull("No definition for enable attribute " + enableAttrName + " in account (resource from " + source + ")", enableAttrDef);
assertTrue("Enable attribute " + enableAttrName + " is not ignored (resource from " + source + ")", enableAttrDef.isIgnored());
}
}
private void checkOpenDjSchemaHandling(ResourceType resource, String source) {
SchemaHandlingType schemaHandling = resource.getSchemaHandling();
for (ResourceObjectTypeDefinitionType resObjectTypeDef: schemaHandling.getObjectType()) {
if (resObjectTypeDef.getKind() == ShadowKindType.ACCOUNT) {
String name = resObjectTypeDef.getIntent();
assertNotNull("Resource "+resource+" from "+source+" has an schemaHandlig account definition without intent", name);
assertNotNull("Account type "+name+" in "+resource+" from "+source+" does not have object class", resObjectTypeDef.getObjectClass());
}
if (resObjectTypeDef.getKind() == ShadowKindType.ENTITLEMENT) {
String name = resObjectTypeDef.getIntent();
assertNotNull("Resource "+resource+" from "+source+" has an schemaHandlig entitlement definition without intent", name);
assertNotNull("Entitlement type "+name+" in "+resource+" from "+source+" does not have object class", resObjectTypeDef.getObjectClass());
}
}
}
private void checkOpenDjConfiguration(PrismObject<ResourceType> resource, String source) {
checkOpenResourceConfiguration(resource, CONNECTOR_LDAP_NAMESPACE, "bindPassword", 8, source);
}
private void checkOpenResourceConfiguration(PrismObject<ResourceType> resource, String connectorNamespace, String credentialsPropertyName,
int numConfigProps, String source) {
PrismContainer<Containerable> configurationContainer = resource.findContainer(ResourceType.F_CONNECTOR_CONFIGURATION);
assertNotNull("No configuration container in "+resource+" from "+source, configurationContainer);
PrismContainer<Containerable> configPropsContainer = configurationContainer.findContainer(SchemaTestConstants.ICFC_CONFIGURATION_PROPERTIES);
assertNotNull("No configuration properties container in "+resource+" from "+source, configPropsContainer);
Collection<? extends Item<?,?>> configProps = configPropsContainer.getValue().getItems();
assertEquals("Wrong number of config properties in "+resource+" from "+source, numConfigProps, configProps.size());
PrismProperty<Object> credentialsProp = configPropsContainer.findProperty(new ItemName(connectorNamespace,credentialsPropertyName));
if (credentialsProp == null) {
// The is the heisenbug we are looking for. Just dump the entire damn thing.
display("Configuration with the heisenbug", configurationContainer.debugDump());
}
assertNotNull("No "+credentialsPropertyName+" property in "+resource+" from "+source, credentialsProp);
assertEquals("Wrong number of "+credentialsPropertyName+" property value in "+resource+" from "+source, 1, credentialsProp.getValues().size());
PrismPropertyValue<Object> credentialsPropertyValue = credentialsProp.getValues().iterator().next();
assertNotNull("No "+credentialsPropertyName+" property value in "+resource+" from "+source, credentialsPropertyValue);
if (credentialsPropertyValue.isRaw()) {
Object rawElement = credentialsPropertyValue.getRawElement();
assertTrue("Wrong element class "+rawElement.getClass()+" in "+resource+" from "+source, rawElement instanceof MapXNode);
// Element rawDomElement = (Element)rawElement;
MapXNode xmap = (MapXNode) rawElement;
try{
ProtectedStringType protectedType = new ProtectedStringType();
prismContext.hacks().parseProtectedType(protectedType, xmap, prismContext, prismContext.getDefaultParsingContext());
// display("LDAP credentials raw element", DOMUtil.serializeDOMToString(rawDomElement));
// assertEquals("Wrong credentials element namespace in "+resource+" from "+source, connectorNamespace, rawDomElement.getNamespaceURI());
// assertEquals("Wrong credentials element local name in "+resource+" from "+source, credentialsPropertyName, rawDomElement.getLocalName());
// Element encryptedDataElement = DOMUtil.getChildElement(rawDomElement, new QName(DOMUtil.NS_XML_ENC, "EncryptedData"));
EncryptedDataType encryptedDataType = protectedType.getEncryptedDataType();
assertNotNull("No EncryptedData element", encryptedDataType);
} catch (SchemaException ex){
throw new IllegalArgumentException(ex);
}
// assertEquals("Wrong EncryptedData element namespace in "+resource+" from "+source, DOMUtil.NS_XML_ENC, encryptedDataType.getNamespaceURI());
// assertEquals("Wrong EncryptedData element local name in "+resource+" from "+source, "EncryptedData", encryptedDataType.getLocalName());
} else {
Object credentials = credentialsPropertyValue.getValue();
assertTrue("Wrong type of credentials configuration property in "+resource+" from "+source+": "+credentials.getClass(), credentials instanceof ProtectedStringType);
ProtectedStringType credentialsPs = (ProtectedStringType)credentials;
EncryptedDataType encryptedData = credentialsPs.getEncryptedDataType();
assertNotNull("No EncryptedData element", encryptedData);
}
}
@Test
public void test002AddDerbyResource() throws Exception {
final String TEST_NAME = "test002AddDerbyResource";
displayTestTitle(TEST_NAME);
// GIVEN
OperationResult result = new OperationResult(TestSanity.class.getName() + "." + TEST_NAME);
checkRepoOpenDjResource();
assertNoRepoCache();
PrismObject<ResourceType> resource = PrismTestUtil.parseObject(new File(RESOURCE_DERBY_FILENAME));
assertParentConsistency(resource);
fillInConnectorRef(resource, IntegrationTestTools.DBTABLE_CONNECTOR_TYPE, result);
OperationResultType resultType = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(resultType);
Holder<String> oidHolder = new Holder<>();
display("Adding Derby Resource", resource);
// WHEN
addObjectViaModelWS(resource.asObjectable(), null, oidHolder, resultHolder);
// THEN
// Check if Derby resource was imported correctly
PrismObject<ResourceType> derbyResource = repositoryService.getObject(ResourceType.class, RESOURCE_DERBY_OID, null, result);
AssertJUnit.assertEquals(RESOURCE_DERBY_OID, derbyResource.getOid());
assertNoRepoCache();
String dbConnectorOid = derbyResource.asObjectable().getConnectorRef().getOid();
PrismObject<ConnectorType> dbConnector = repositoryService.getObject(ConnectorType.class, dbConnectorOid, null, result);
display("DB Connector: ", dbConnector);
// Check if password was encrypted during import
// via JAXB
Object configurationPropertiesElement = JAXBUtil.findElement(derbyResource.asObjectable().getConnectorConfiguration().getAny(),
new QName(dbConnector.asObjectable().getNamespace(), "configurationProperties"));
Object passwordElement = JAXBUtil.findElement(JAXBUtil.listChildElements(configurationPropertiesElement),
new QName(dbConnector.asObjectable().getNamespace(), "password"));
System.out.println("Password element: " + passwordElement);
// via prisms
PrismContainerValue configurationProperties = derbyResource.findContainer(
ItemPath.create(
ResourceType.F_CONNECTOR_CONFIGURATION,
new QName("configurationProperties")))
.getValue();
PrismProperty password = configurationProperties.findProperty(new ItemName(dbConnector.asObjectable().getNamespace(), "password"));
System.out.println("Password property: " + password);
}
private void addObjectViaModelWS(ObjectType objectType, ModelExecuteOptionsType options, Holder<String> oidHolder, Holder<OperationResultType> resultHolder) throws FaultMessage {
ObjectDeltaListType deltaList = new ObjectDeltaListType();
ObjectDeltaType objectDelta = new ObjectDeltaType();
objectDelta.setObjectToAdd(objectType);
QName type = objectType.asPrismObject().getDefinition().getTypeName();
objectDelta.setObjectType(type);
objectDelta.setChangeType(ChangeTypeType.ADD);
deltaList.getDelta().add(objectDelta);
ObjectDeltaOperationListType objectDeltaOperationListType = modelWeb.executeChanges(deltaList, options);
ObjectDeltaOperationType objectDeltaOperationType = getOdoFromDeltaOperationList(objectDeltaOperationListType, objectDelta);
resultHolder.value = objectDeltaOperationType.getExecutionResult();
oidHolder.value = ((ObjectType) objectDeltaOperationType.getObjectDelta().getObjectToAdd()).getOid();
}
// ugly hack...
private static ObjectDeltaOperationType getOdoFromDeltaOperationList(ObjectDeltaOperationListType operationListType, ObjectDeltaType originalDelta) {
Validate.notNull(operationListType);
Validate.notNull(originalDelta);
for (ObjectDeltaOperationType operationType : operationListType.getDeltaOperation()) {
ObjectDeltaType objectDeltaType = operationType.getObjectDelta();
if (originalDelta.getChangeType() == ChangeTypeType.ADD) {
if (objectDeltaType.getChangeType() == originalDelta.getChangeType() &&
objectDeltaType.getObjectToAdd() != null) {
ObjectType objectAdded = (ObjectType) objectDeltaType.getObjectToAdd();
if (objectAdded.getClass().equals(originalDelta.getObjectToAdd().getClass())) {
return operationType;
}
}
} else {
if (objectDeltaType.getChangeType() == originalDelta.getChangeType() &&
originalDelta.getOid().equals(objectDeltaType.getOid())) {
return operationType;
}
}
}
throw new IllegalStateException("No suitable ObjectDeltaOperationType found");
}
private void checkRepoDerbyResource() throws ObjectNotFoundException, SchemaException {
OperationResult result = new OperationResult(TestSanity.class.getName()+".checkRepoDerbyResource");
PrismObject<ResourceType> resource = repositoryService.getObject(ResourceType.class, RESOURCE_DERBY_OID, null, result);
checkDerbyResource(resource, "repository");
}
private void checkDerbyResource(PrismObject<ResourceType> resource, String source) {
checkDerbyConfiguration(resource, source);
}
private void checkDerbyConfiguration(PrismObject<ResourceType> resource, String source) {
checkOpenResourceConfiguration(resource, CONNECTOR_DBTABLE_NAMESPACE, "password", 10, source);
}
/**
* Test the testResource method. Expect a complete success for now.
*/
@Test
public void test003TestConnectionDerby() throws Exception {
TestUtil.displayTestTitle("test003TestConnectionDerby");
// GIVEN
checkRepoDerbyResource();
assertNoRepoCache();
// WHEN
OperationResultType result = modelWeb.testResource(RESOURCE_DERBY_OID);
// THEN
assertNoRepoCache();
displayJaxb("testResource result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("testResource has failed", result.getPartialResults().get(0));
OperationResult opResult = new OperationResult(TestSanity.class.getName() + ".test002TestConnectionDerby");
PrismObject<ResourceType> rObject = repositoryService.getObject(ResourceType.class, RESOURCE_DERBY_OID, null, opResult);
resourceDerby = rObject.asObjectable();
checkDerbyResource(rObject, "repository(after test)");
assertNoRepoCache();
assertEquals(RESOURCE_DERBY_OID, resourceDerby.getOid());
display("Initialized Derby resource (respository)", resourceDerby);
assertNotNull("Resource schema was not generated", resourceDerby.getSchema());
Element resourceDerbyXsdSchemaElement = ResourceTypeUtil.getResourceXsdSchema(resourceDerby);
assertNotNull("Resource schema was not generated", resourceDerbyXsdSchemaElement);
PrismObject<ResourceType> derbyResourceProvisioninig = provisioningService.getObject(ResourceType.class, RESOURCE_DERBY_OID,
null, null, opResult);
display("Initialized Derby resource (provisioning)", derbyResourceProvisioninig);
PrismObject<ResourceType> derbyResourceModel = provisioningService.getObject(ResourceType.class, RESOURCE_DERBY_OID,
null, null, opResult);
display("Initialized Derby resource (model)", derbyResourceModel);
// TODO: check
// checkOpenDjResource(resourceOpenDj,"repository");
// checkOpenDjResource(openDjResourceProvisioninig,"provisioning");
// checkOpenDjResource(openDjResourceModel,"model");
// TODO: model web
}
@Test
public void test004Capabilities() throws ObjectNotFoundException, CommunicationException, SchemaException,
FaultMessage {
TestUtil.displayTestTitle("test004Capabilities");
// GIVEN
checkRepoOpenDjResource();
assertNoRepoCache();
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<ObjectType> objectHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
// WHEN
modelWeb.getObject(ObjectTypes.RESOURCE.getTypeQName(), RESOURCE_OPENDJ_OID,
options , objectHolder, resultHolder);
ResourceType resource = (ResourceType) objectHolder.value;
// THEN
display("Resource", resource);
assertNoRepoCache();
CapabilityCollectionType nativeCapabilities = resource.getCapabilities().getNative();
List<Object> capabilities = nativeCapabilities.getAny();
assertFalse("Empty capabilities returned", capabilities.isEmpty());
for (Object capability : nativeCapabilities.getAny()) {
System.out.println("Native Capability: " + CapabilityUtil.getCapabilityDisplayName(capability) + " : " + capability);
}
if (resource.getCapabilities() != null) {
for (Object capability : resource.getCapabilities().getConfigured().getAny()) {
System.out.println("Configured Capability: " + CapabilityUtil.getCapabilityDisplayName(capability) + " : " + capability);
}
}
List<Object> effectiveCapabilities = ResourceTypeUtil.getEffectiveCapabilities(resource);
for (Object capability : effectiveCapabilities) {
System.out.println("Efective Capability: " + CapabilityUtil.getCapabilityDisplayName(capability) + " : " + capability);
}
CredentialsCapabilityType capCred = CapabilityUtil.getCapability(capabilities, CredentialsCapabilityType.class);
assertNotNull("password capability not present", capCred.getPassword());
// Connector cannot do activation, this should be null
ActivationCapabilityType capAct = CapabilityUtil.getCapability(capabilities, ActivationCapabilityType.class);
assertNull("Found activation capability while not expecting it", capAct);
capCred = ResourceTypeUtil.getEffectiveCapability(resource, CredentialsCapabilityType.class);
assertNotNull("password capability not found", capCred.getPassword());
// Although connector does not support activation, the resource specifies a way how to simulate it.
// Therefore the following should succeed
capAct = ResourceTypeUtil.getEffectiveCapability(resource, ActivationCapabilityType.class);
assertNotNull("activation capability not found", capAct);
}
@Test
public void test005resolveConnectorRef() throws Exception{
TestUtil.displayTestTitle("test005resolveConnectorRef");
PrismObject<ResourceType> resource = PrismTestUtil.parseObject(new File(RESOURCE_DUMMY_FILENAME));
ModelExecuteOptionsType options = new ModelExecuteOptionsType();
options.setIsImport(Boolean.TRUE);
addObjectViaModelWS(resource.asObjectable(), options, new Holder<>(), new Holder<>());
OperationResult repoResult = new OperationResult("getObject");
PrismObject<ResourceType> uObject = repositoryService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, repoResult);
assertNotNull(uObject);
ResourceType resourceType = uObject.asObjectable();
assertNotNull("Reference on the connector must not be null in resource.",resourceType.getConnectorRef());
assertNotNull("Missing oid reference on the connector",resourceType.getConnectorRef().getOid());
}
@Test
public void test006reimportResourceDummy() throws Exception{
TestUtil.displayTestTitle("test006reimportResourceDummy");
//get object from repo (with version set and try to add it - it should be re-added, without error)
OperationResult repoResult = new OperationResult("getObject");
PrismObject<ResourceType> resource = repositoryService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, repoResult);
assertNotNull(resource);
ModelExecuteOptionsType options = new ModelExecuteOptionsType();
options.setOverwrite(Boolean.TRUE);
options.setIsImport(Boolean.TRUE);
addObjectViaModelWS(resource.asObjectable(), options, new Holder<>(), new Holder<>());
//TODO: add some asserts
//parse object from file again and try to add it - this should fail, becasue the same object already exists)
resource = PrismTestUtil.parseObject(new File(RESOURCE_DUMMY_FILENAME));
try {
Holder<OperationResultType> resultHolder = new Holder<>();
options = new ModelExecuteOptionsType();
options.setIsImport(Boolean.TRUE);
addObjectViaModelWS(resource.asObjectable(), options, new Holder<>(),
resultHolder);
OperationResultType result = resultHolder.value;
TestUtil.assertFailure(result);
fail("Expected object already exists exception, but haven't got one.");
} catch (FaultMessage ex) {
LOGGER.info("fault {}", ex.getFaultInfo());
LOGGER.info("fault {}", ex.getCause());
if (ex.getFaultInfo() instanceof ObjectAlreadyExistsFaultType) {
// this is OK, we expect this
} else{
fail("Expected object already exists exception, but got: " + ex.getFaultInfo());
}
}
// ResourceType resourceType = uObject.asObjectable();
// assertNotNull("Reference on the connector must not be null in resource.",resourceType.getConnectorRef());
// assertNotNull("Missing oid reference on the connector",resourceType.getConnectorRef().getOid());
}
/**
* Attempt to add new user. It is only added to the repository, so check if
* it is in the repository after the operation.
*/
@Test
public void test010AddUser() throws Exception {
final String TEST_NAME = "test010AddUser";
displayTestTitle(TEST_NAME);
// GIVEN
checkRepoOpenDjResource();
assertNoRepoCache();
PrismObject<UserType> user = PrismTestUtil.parseObject(USER_JACK_FILE);
UserType userType = user.asObjectable();
assertParentConsistency(user);
// Encrypt Jack's password
protector.encrypt(userType.getCredentials().getPassword().getValue());
assertParentConsistency(user);
OperationResultType result = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(result);
Holder<String> oidHolder = new Holder<>();
display("Adding user object", userType);
// WHEN
addObjectViaModelWS(userType, null, oidHolder, resultHolder);
// THEN
assertNoRepoCache();
displayJaxb("addObject result:", resultHolder.value, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("addObject has failed", resultHolder.value);
// AssertJUnit.assertEquals(USER_JACK_OID, oid);
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("repository.getObject result", repoResult);
TestUtil.assertSuccess("getObject has failed", repoResult);
AssertJUnit.assertEquals(USER_JACK_OID, repoUser.getOid());
assertEqualsPolyString("fullName", userType.getFullName(), repoUser.getFullName());
// TODO: better checks
}
/**
* Add account to user. This should result in account provisioning. Check if
* that happens in repo and in LDAP.
*/
@Test
public void test013AddOpenDjAccountToUser() throws Exception {
final String TEST_NAME = "test013AddOpenDjAccountToUser";
displayTestTitle(TEST_NAME);
try{
// GIVEN
checkRepoOpenDjResource();
assertNoRepoCache();
// IMPORTANT! SWITCHING OFF ASSIGNMENT ENFORCEMENT HERE!
setAssignmentEnforcement(AssignmentPolicyEnforcementType.NONE);
// This is not redundant. It checks that the previous command set the policy correctly
assertSyncSettingsAssignmentPolicyEnforcement(AssignmentPolicyEnforcementType.NONE);
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ACCOUNT_OPENDJ_FILENAME, ObjectDeltaType.class);
// WHEN
displayWhen(TEST_NAME);
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
displayThen(TEST_NAME);
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals("No accountRefs", 1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
accountShadowOidOpendj = accountRef.getOid();
assertFalse(accountShadowOidOpendj.isEmpty());
// Check if shadow was created in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("Shadow (repository)", repoShadow);
assertNotNull(repoShadowType);
assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
assertNotNull("Shadow stored in repository has no name", repoShadowType.getName());
// Check the "name" property, it should be set to DN, not entryUUID
assertEquals("Wrong name property", USER_JACK_LDAP_DN.toLowerCase(), repoShadowType.getName().getOrig().toLowerCase());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// check if account was created in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(uid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "jack");
OpenDJController.assertAttribute(entry, "givenName", "Jack");
OpenDJController.assertAttribute(entry, "sn", "Sparrow");
OpenDJController.assertAttribute(entry, "cn", "Jack Sparrow");
OpenDJController.assertAttribute(entry, "displayName", "Jack Sparrow");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Black Pearl");
assertTrue("LDAP account is not enabled", openDJController.isAccountEnabled(entry));
originalJacksLdapPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword was not set on create", originalJacksLdapPassword);
System.out.println("password after create: " + originalJacksLdapPassword);
// Use getObject to test fetch of complete shadow
assertNoRepoCache();
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<ObjectType> objectHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
// WHEN
modelWeb.getObject(ObjectTypes.SHADOW.getTypeQName(), accountShadowOidOpendj,
options, objectHolder, resultHolder);
// THEN
assertNoRepoCache();
displayJaxb("getObject result", resultHolder.value, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("getObject has failed", resultHolder.value);
ShadowType modelShadow = (ShadowType) objectHolder.value;
display("Shadow (model)", modelShadow);
AssertJUnit.assertNotNull(modelShadow);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, modelShadow.getResourceRef().getOid());
assertAttributeNotNull(modelShadow, getOpenDjPrimaryIdentifierQName());
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "uid", "jack");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "givenName", "Jack");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "sn", "Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "cn", "Jack Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "displayName", "Jack Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "l", "Black Pearl");
assertNull("carLicense attribute sneaked to LDAP", OpenDJController.getAttributeValue(entry, "carLicense"));
assertNull("postalAddress attribute sneaked to LDAP", OpenDJController.getAttributeValue(entry, "postalAddress"));
assertNotNull("Activation is null (model)", modelShadow.getActivation());
assertEquals("Wrong administrativeStatus in the shadow (model)", ActivationStatusType.ENABLED, modelShadow.getActivation().getAdministrativeStatus());
} catch (Exception ex){
LOGGER.info("ERROR: {}", ex);
throw ex;
}
}
private OperationResultType modifyObjectViaModelWS(ObjectDeltaType objectChange) throws FaultMessage {
ObjectDeltaListType deltaList = new ObjectDeltaListType();
deltaList.getDelta().add(objectChange);
ObjectDeltaOperationListType list = modelWeb.executeChanges(deltaList, null);
return getOdoFromDeltaOperationList(list, objectChange).getExecutionResult();
}
/**
* Add Derby account to user. This should result in account provisioning. Check if
* that happens in repo and in Derby.
*/
@Test
public void test014AddDerbyAccountToUser() throws IOException, JAXBException, FaultMessage,
ObjectNotFoundException, SchemaException, DirectoryException, SQLException {
TestUtil.displayTestTitle("test014AddDerbyAccountToUser");
// GIVEN
checkRepoDerbyResource();
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ACCOUNT_DERBY_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
// OpenDJ account was added in previous test, hence 2 accounts
assertEquals(2, accountRefs.size());
ObjectReferenceType accountRef = null;
for (ObjectReferenceType ref : accountRefs) {
if (!ref.getOid().equals(accountShadowOidOpendj)) {
accountRef = ref;
}
}
accountShadowOidDerby = accountRef.getOid();
assertFalse(accountShadowOidDerby.isEmpty());
// Check if shadow was created in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidDerby,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("addObject has failed", repoResult);
display("Shadow (repository)", repoShadowType);
assertNotNull(repoShadowType);
assertEquals(RESOURCE_DERBY_OID, repoShadowType.getResourceRef().getOid());
// Check the "name" property, it should be set to DN, not entryUUID
assertEquals("Wrong name property", PrismTestUtil.createPolyStringType(USER_JACK_DERBY_LOGIN),
repoShadowType.getName());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// check if account was created in DB Table
Statement stmt = derbyController.getExecutedStatementWhereLoginName(uid);
ResultSet rs = stmt.getResultSet();
System.out.println("RS: " + rs);
assertTrue("No records found for login name " + uid, rs.next());
assertEquals(USER_JACK_DERBY_LOGIN, rs.getString(DerbyController.COLUMN_LOGIN));
assertEquals("Cpt. Jack Sparrow", rs.getString(DerbyController.COLUMN_FULL_NAME));
// TODO: check password
//assertEquals("3lizab3th",rs.getString(DerbyController.COLUMN_PASSWORD));
System.out.println("Password: " + rs.getString(DerbyController.COLUMN_PASSWORD));
assertFalse("Too many records found for login name " + uid, rs.next());
rs.close();
stmt.close();
// Use getObject to test fetch of complete shadow
assertNoRepoCache();
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<ObjectType> objectHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType ();
// WHEN
modelWeb.getObject(ObjectTypes.SHADOW.getTypeQName(), accountShadowOidDerby,
options, objectHolder, resultHolder);
// THEN
assertNoRepoCache();
displayJaxb("getObject result", resultHolder.value, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("getObject has failed", resultHolder.value);
ShadowType modelShadow = (ShadowType) objectHolder.value;
display("Shadow (model)", modelShadow);
AssertJUnit.assertNotNull(modelShadow);
AssertJUnit.assertEquals(RESOURCE_DERBY_OID, modelShadow.getResourceRef().getOid());
assertAttribute(modelShadow, SchemaConstants.ICFS_UID, USER_JACK_DERBY_LOGIN);
assertAttribute(modelShadow, SchemaConstants.ICFS_NAME, USER_JACK_DERBY_LOGIN);
assertAttribute(resourceDerby, modelShadow, "FULL_NAME", "Cpt. Jack Sparrow");
}
@Test
public void test015AccountOwner() throws FaultMessage, ObjectNotFoundException, SchemaException, JAXBException {
TestUtil.displayTestTitle("test015AccountOwner");
// GIVEN
checkRepoOpenDjResource();
assertNoRepoCache();
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<UserType> userHolder = new Holder<>();
// WHEN
modelWeb.findShadowOwner(accountShadowOidOpendj, userHolder, resultHolder);
// THEN
display("listAccountShadowOwner result", resultHolder.value);
TestUtil.assertSuccess("listAccountShadowOwner has failed (result)", resultHolder.value);
UserType user = userHolder.value;
assertNotNull("No owner", user);
assertEquals(USER_JACK_OID, user.getOid());
System.out.println("Account " + accountShadowOidOpendj + " has owner " + ObjectTypeUtil.toShortString(user));
}
@Test
public void test016ProvisioningSearchAccountsIterative() throws Exception {
TestUtil.displayTestTitle("test016ProvisioningSearchAccountsIterative");
// GIVEN
OperationResult result = new OperationResult(TestSanity.class.getName() + ".test016ProvisioningSearchAccountsIterative");
RefinedResourceSchema refinedSchema = RefinedResourceSchemaImpl.getRefinedSchema(resourceTypeOpenDjrepo, prismContext);
final RefinedObjectClassDefinition refinedAccountDefinition = refinedSchema.getDefaultRefinedDefinition(ShadowKindType.ACCOUNT);
QName objectClass = refinedAccountDefinition.getObjectClassDefinition().getTypeName();
ObjectQuery q = ObjectQueryUtil.createResourceAndObjectClassQuery(resourceTypeOpenDjrepo.getOid(), objectClass, prismContext);
final Collection<ObjectType> objects = new HashSet<>();
final MatchingRule caseIgnoreMatchingRule = matchingRuleRegistry.getMatchingRule(PrismConstants.STRING_IGNORE_CASE_MATCHING_RULE_NAME, DOMUtil.XSD_STRING);
ResultHandler handler = new ResultHandler<ObjectType>() {
@Override
public boolean handle(PrismObject<ObjectType> prismObject, OperationResult parentResult) {
ObjectType objectType = prismObject.asObjectable();
objects.add(objectType);
display("Found object", objectType);
assertTrue(objectType instanceof ShadowType);
ShadowType shadow = (ShadowType) objectType;
assertNotNull(shadow.getOid());
assertNotNull(shadow.getName());
assertEquals(RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS, shadow.getObjectClass());
assertEquals(RESOURCE_OPENDJ_OID, shadow.getResourceRef().getOid());
String icfUid = getAttributeValue(shadow, getOpenDjPrimaryIdentifierQName());
assertNotNull("No ICF UID", icfUid);
String icfName = getNormalizedAttributeValue(shadow, refinedAccountDefinition, getOpenDjSecondaryIdentifierQName());
assertNotNull("No ICF NAME", icfName);
try {
PrismAsserts.assertEquals("Wrong shadow name", caseIgnoreMatchingRule, shadow.getName().getOrig(), icfName);
} catch (SchemaException e) {
throw new IllegalArgumentException(e.getMessage(),e);
}
assertNotNull("Missing LDAP uid", getAttributeValue(shadow, new QName(ResourceTypeUtil.getResourceNamespace(resourceTypeOpenDjrepo), "uid")));
assertNotNull("Missing LDAP cn", getAttributeValue(shadow, new QName(ResourceTypeUtil.getResourceNamespace(resourceTypeOpenDjrepo), "cn")));
assertNotNull("Missing LDAP sn", getAttributeValue(shadow, new QName(ResourceTypeUtil.getResourceNamespace(resourceTypeOpenDjrepo), "sn")));
assertNotNull("Missing activation", shadow.getActivation());
assertNotNull("Missing activation status", shadow.getActivation().getAdministrativeStatus());
return true;
}
};
// WHEN
provisioningService.searchObjectsIterative(ShadowType.class, q, null, handler, null, result);
// THEN
display("Count", objects.size());
}
/**
* We are going to modify the user. As the user has an account, the user
* changes should be also applied to the account (by schemaHandling).
*/
@Test
public void test020ModifyUser() throws Exception {
final String TEST_NAME = "test020ModifyUser";
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_FULLNAME_LOCALITY_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
display("repository user", repoUser);
PrismAsserts.assertEqualsPolyString("wrong value for fullName", "Cpt. Jack Sparrow", repoUserType.getFullName());
PrismAsserts.assertEqualsPolyString("wrong value for locality", "somewhere", repoUserType.getLocality());
assertEquals("wrong value for employeeNumber", "1", repoUserType.getEmployeeNumber());
// Check if appropriate accountRef is still there
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(2, accountRefs.size());
for (ObjectReferenceType accountRef : accountRefs) {
assertTrue("No OID in "+accountRef+" in "+repoUserType,
accountRef.getOid().equals(accountShadowOidOpendj) ||
accountRef.getOid().equals(accountShadowOidDerby));
}
// Check if shadow is still in the repo and that it is untouched
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
display("repository shadow", repoShadow);
AssertJUnit.assertNotNull(repoShadow);
ShadowType repoShadowType = repoShadow.asObjectable();
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
assertOpenDJAccountJack(uid, "jack");
}
private Entry assertOpenDJAccountJack(String entryUuid, String uid) throws DirectoryException {
Entry entry = openDJController.searchAndAssertByEntryUuid(entryUuid);
return assertOpenDJAccountJack(entry, uid);
}
private Entry assertOpenDJAccountJack(Entry entry, String uid) throws DirectoryException {
return assertOpenDJAccountJack(entry, uid, "Jack");
}
private Entry assertOpenDJAccountJack(Entry entry, String uid, String givenName) throws DirectoryException {
display(entry);
OpenDJController.assertDn(entry, "uid="+uid+",ou=people,dc=example,dc=com");
OpenDJController.assertAttribute(entry, "uid", uid);
if (givenName == null) {
OpenDJController.assertAttribute(entry, "givenName");
} else {
OpenDJController.assertAttribute(entry, "givenName", givenName);
}
OpenDJController.assertAttribute(entry, "sn", "Sparrow");
// These two should be assigned from the User modification by
// schemaHandling
OpenDJController.assertAttribute(entry, "cn", "Cpt. Jack Sparrow");
OpenDJController.assertAttribute(entry, "displayName", "Cpt. Jack Sparrow");
// This will get translated from "somewhere" to this (outbound
// expression in schemeHandling) -> this is not more supported...we
// don't support complex run-time properties. the value will be
// evaluated from outbound expression
OpenDJController.assertAttribute(entry, "l", "somewhere");
OpenDJController.assertAttribute(entry, "postalAddress", "Number 1");
return entry;
}
/**
* We are going to change user's password. As the user has an account, the password change
* should be also applied to the account (by schemaHandling).
*/
@Test
public void test022ChangeUserPassword() throws Exception {
final String TEST_NAME = "test022ChangeUserPassword";
displayTestTitle(TEST_NAME);
// GIVEN
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_PASSWORD_FILENAME, ObjectDeltaType.class);
System.out.println("In modification: " + objectChange.getItemDelta().get(0).getValue().get(0));
assertNoRepoCache();
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertUserPasswordChange("butUnd3dM4yT4lkAL0t", result);
}
/**
* Similar to previous test just the request is constructed a bit differently.
*/
@Test
public void test023ChangeUserPasswordJAXB() throws Exception {
final String TEST_NAME = "test023ChangeUserPasswordJAXB";
displayTestTitle(TEST_NAME);
// GIVEN
final String NEW_PASSWORD = "abandonSHIP";
Document doc = ModelClientUtil.getDocumnent();
ObjectDeltaType userDelta = new ObjectDeltaType();
userDelta.setOid(USER_JACK_OID);
userDelta.setChangeType(ChangeTypeType.MODIFY);
userDelta.setObjectType(UserType.COMPLEX_TYPE);
ItemDeltaType passwordDelta = new ItemDeltaType();
passwordDelta.setModificationType(ModificationTypeType.REPLACE);
passwordDelta.setPath(ModelClientUtil.createItemPathType("credentials/password/value", prismContext));
ProtectedStringType pass = new ProtectedStringType();
pass.setClearValue(NEW_PASSWORD);
XNode passValue = prismContext.xnodeSerializer().root(new QName("dummy")).serializeRealValue(pass).getSubnode();
System.out.println("PASSWORD VALUE: " + passValue.debugDump());
RawType passwordValue = new RawType(passValue, prismContext);
passwordDelta.getValue().add(passwordValue);
userDelta.getItemDelta().add(passwordDelta);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(userDelta);
// THEN
assertUserPasswordChange(NEW_PASSWORD, result);
}
private void assertUserPasswordChange(String expectedUserPassword, OperationResultType result) throws JAXBException, ObjectNotFoundException, SchemaException, DirectoryException, EncryptionException {
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
display("repository user", repoUser);
// Check if nothing else was modified
PrismAsserts.assertEqualsPolyString("wrong repo fullName", "Cpt. Jack Sparrow", repoUserType.getFullName());
PrismAsserts.assertEqualsPolyString("wrong repo locality", "somewhere", repoUserType.getLocality());
// Check if appropriate accountRef is still there
assertLinks(repoUser, 2);
assertLinked(repoUser, accountShadowOidOpendj);
assertLinked(repoUser, accountShadowOidDerby);
assertPassword(repoUser, expectedUserPassword);
// Check if shadow is still in the repo and that it is untouched
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
display("repository shadow", repoShadow);
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
AssertJUnit.assertNotNull(repoShadowType);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
Entry entry = assertOpenDJAccountJack(uid, "jack");
String ldapPasswordAfter = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull(ldapPasswordAfter);
display("LDAP password after change", ldapPasswordAfter);
assertFalse("No change in password (original)", ldapPasswordAfter.equals(originalJacksLdapPassword));
if (lastJacksLdapPassword != null) {
assertFalse("No change in password (last)", ldapPasswordAfter.equals(lastJacksLdapPassword));
}
lastJacksLdapPassword = ldapPasswordAfter;
}
@Test
public void test027ModifyAccountDj() throws Exception {
final String TEST_NAME = "test027ModifyAccountDj";
testModifyAccountDjRoomNumber(TEST_NAME, REQUEST_ACCOUNT_MODIFY_ROOM_NUMBER_FILE, "quarterdeck");
}
@Test
public void test028ModifyAccountDjExplicitType() throws Exception {
final String TEST_NAME = "test028ModifyAccountDjExplicitType";
testModifyAccountDjRoomNumber(TEST_NAME, REQUEST_ACCOUNT_MODIFY_ROOM_NUMBER_EXPLICIT_TYPE_FILE, "upperdeck");
}
public void testModifyAccountDjRoomNumber(final String TEST_NAME, File reqFile, String expectedVal) throws Exception {
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(reqFile, ObjectDeltaType.class);
objectChange.setOid(accountShadowOidOpendj);
// WHEN
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
OperationResult repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
display("repository shadow", repoShadow);
AssertJUnit.assertNotNull(repoShadow);
ShadowType repoShadowType = repoShadow.asObjectable();
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
Entry jackLdapEntry = assertOpenDJAccountJack(uid, "jack");
OpenDJController.assertAttribute(jackLdapEntry, "roomNumber", expectedVal);
}
@Test
public void test029ModifyAccountDjBadPath() throws Exception {
final String TEST_NAME = "test029ModifyAccountDjBadPath";
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_ACCOUNT_MODIFY_BAD_PATH_FILE, ObjectDeltaType.class);
objectChange.setOid(accountShadowOidOpendj);
OperationResultType result;
try {
// WHEN
result = modifyObjectViaModelWS(objectChange);
AssertJUnit.fail("Unexpected success");
} catch (FaultMessage f) {
// this is expected
FaultType faultInfo = f.getFaultInfo();
result = faultInfo.getOperationResult();
}
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertFailure(result);
OperationResult repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
display("repository shadow", repoShadow);
AssertJUnit.assertNotNull(repoShadow);
ShadowType repoShadowType = repoShadow.asObjectable();
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
Entry jackLdapEntry = assertOpenDJAccountJack(uid, "jack");
OpenDJController.assertAttribute(jackLdapEntry, "roomNumber", "upperdeck");
}
/**
* Try to disable user. As the user has an account, the account should be disabled as well.
*/
@Test
public void test030DisableUser() throws Exception {
final String TEST_NAME = "test030DisableUser";
displayTestTitle(TEST_NAME);
// GIVEN
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ACTIVATION_DISABLE_FILENAME, ObjectDeltaType.class);
Entry entry = openDJController.searchByUid("jack");
assertOpenDJAccountJack(entry, "jack");
String pwpAccountDisabled = OpenDJController.getAttributeValue(entry, "ds-pwp-account-disabled");
display("ds-pwp-account-disabled before change", pwpAccountDisabled);
assertTrue("LDAP account is not enabled (precondition)", openDJController.isAccountEnabled(entry));
assertNoRepoCache();
// WHEN
displayWhen(TEST_NAME);
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
displayThen(TEST_NAME);
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
display("repository user", repoUser);
UserType repoUserType = repoUser.asObjectable();
// Check if nothing else was modified
assertEqualsPolyString("wrong repo fullName", "Cpt. Jack Sparrow", repoUserType.getFullName());
assertEqualsPolyString("wrong repo locality", "somewhere", repoUserType.getLocality());
// Check if appropriate accountRef is still there
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(2, accountRefs.size());
for (ObjectReferenceType accountRef : accountRefs) {
assertTrue("No OID in "+accountRef+" in "+repoUserType,
accountRef.getOid().equals(accountShadowOidOpendj) ||
accountRef.getOid().equals(accountShadowOidDerby));
}
// Check if shadow is still in the repo and that it is untouched
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
display("repo shadow", repoShadow);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
AssertJUnit.assertNotNull(repoShadowType);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
entry = openDJController.searchAndAssertByEntryUuid(uid);
assertOpenDJAccountJack(entry, "jack");
pwpAccountDisabled = OpenDJController.getAttributeValue(entry, "ds-pwp-account-disabled");
display("ds-pwp-account-disabled after change", pwpAccountDisabled);
assertFalse("LDAP account was not disabled", openDJController.isAccountEnabled(entry));
// Use getObject to test fetch of complete shadow
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<ObjectType> objectHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
assertNoRepoCache();
// WHEN
displayWhen(TEST_NAME);
modelWeb.getObject(ObjectTypes.SHADOW.getTypeQName(), accountShadowOidOpendj,
options, objectHolder, resultHolder);
// THEN
displayThen(TEST_NAME);
assertNoRepoCache();
displayJaxb("getObject result", resultHolder.value, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("getObject has failed", resultHolder.value);
ShadowType modelShadow = (ShadowType) objectHolder.value;
display("Shadow (model)", modelShadow);
AssertJUnit.assertNotNull(modelShadow);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, modelShadow.getResourceRef().getOid());
assertAttributeNotNull(modelShadow, getOpenDjPrimaryIdentifierQName());
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "uid", "jack");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "givenName", "Jack");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "sn", "Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "cn", "Cpt. Jack Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "displayName", "Cpt. Jack Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "l", "somewhere");
assertNotNull("The account activation is null in the shadow", modelShadow.getActivation());
assertNotNull("The account activation status was not present in shadow", modelShadow.getActivation().getAdministrativeStatus());
assertEquals("The account was not disabled in the shadow", ActivationStatusType.DISABLED, modelShadow.getActivation().getAdministrativeStatus());
}
/**
* Try to enable user after it has been disabled. As the user has an account, the account should be enabled as well.
*/
@Test
public void test031EnableUser() throws Exception {
final String TEST_NAME = "test031EnableUser";
displayTestTitle(TEST_NAME);
// GIVEN
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ACTIVATION_ENABLE_FILENAME, ObjectDeltaType.class);
assertNoRepoCache();
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
display("repo user", repoUser);
// Check if nothing else was modified
PrismAsserts.assertEqualsPolyString("wrong repo fullName", "Cpt. Jack Sparrow", repoUser.getFullName());
PrismAsserts.assertEqualsPolyString("wrong repo locality", "somewhere", repoUser.getLocality());
// Check if appropriate accountRef is still there
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals(2, accountRefs.size());
for (ObjectReferenceType accountRef : accountRefs) {
assertTrue("No OID in "+accountRef+" in "+repoUser,
accountRef.getOid().equals(accountShadowOidOpendj) ||
accountRef.getOid().equals(accountShadowOidDerby));
}
// Check if shadow is still in the repo and that it is untouched
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
display("repo shadow", repoShadowType);
AssertJUnit.assertNotNull(repoShadowType);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Use getObject to test fetch of complete shadow
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<ObjectType> objectHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
assertNoRepoCache();
// WHEN
modelWeb.getObject(ObjectTypes.SHADOW.getTypeQName(), accountShadowOidOpendj,
options, objectHolder, resultHolder);
// THEN
assertNoRepoCache();
displayJaxb("getObject result", resultHolder.value, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("getObject has failed", resultHolder.value);
ShadowType modelShadow = (ShadowType) objectHolder.value;
display("Shadow (model)", modelShadow);
AssertJUnit.assertNotNull(modelShadow);
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, modelShadow.getResourceRef().getOid());
assertAttributeNotNull(modelShadow, getOpenDjPrimaryIdentifierQName());
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "uid", "jack");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "givenName", "Jack");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "sn", "Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "cn", "Cpt. Jack Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "displayName", "Cpt. Jack Sparrow");
assertAttribute(resourceTypeOpenDjrepo, modelShadow, "l", "somewhere");
assertNotNull("The account activation is null in the shadow", modelShadow.getActivation());
assertNotNull("The account activation status was not present in shadow", modelShadow.getActivation().getAdministrativeStatus());
assertEquals("The account was not enabled in the shadow", ActivationStatusType.ENABLED, modelShadow.getActivation().getAdministrativeStatus());
// Check if LDAP account was updated
Entry entry = openDJController.searchAndAssertByEntryUuid(uid);
assertOpenDJAccountJack(entry, "jack");
// The value of ds-pwp-account-disabled should have been removed
String pwpAccountDisabled = OpenDJController.getAttributeValue(entry, "ds-pwp-account-disabled");
System.out.println("ds-pwp-account-disabled after change: " + pwpAccountDisabled);
assertTrue("LDAP account was not enabled", openDJController.isAccountEnabled(entry));
}
/**
* Unlink account by removing the accountRef from the user.
* The account will not be deleted, just the association to user will be broken.
*/
@Test
public void test040UnlinkDerbyAccountFromUser() throws FileNotFoundException, JAXBException, FaultMessage,
ObjectNotFoundException, SchemaException, DirectoryException, SQLException {
TestUtil.displayTestTitle("test040UnlinkDerbyAccountFromUser");
// GIVEN
ObjectDeltaType objectChange = new ObjectDeltaType();
objectChange.setOid(USER_JACK_OID);
ItemDeltaType modificationDeleteAccountRef = new ItemDeltaType();
modificationDeleteAccountRef.setModificationType(ModificationTypeType.DELETE);
ObjectReferenceType accountRefToDelete = new ObjectReferenceType();
accountRefToDelete.setOid(accountShadowOidDerby);
RawType modificationValue = new RawType(prismContext.xnodeSerializer().root(new QName("dummy")).serializeRealValue(accountRefToDelete).getSubnode(), prismContext);
modificationDeleteAccountRef.getValue().add(modificationValue);
modificationDeleteAccountRef.setPath(new ItemPathType(UserType.F_LINK_REF));
objectChange.getItemDelta().add(modificationDeleteAccountRef);
objectChange.setChangeType(ChangeTypeType.MODIFY);
objectChange.setObjectType(UserType.COMPLEX_TYPE);
displayJaxb("modifyObject input", objectChange, new QName(SchemaConstants.NS_C, "change"));
assertNoRepoCache();
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
// only OpenDJ account should be left now
assertEquals(1, accountRefs.size());
ObjectReferenceType ref = accountRefs.get(0);
assertEquals("Wrong OID in accountRef in "+repoUser, accountShadowOidOpendj, ref.getOid());
}
/**
* Delete the shadow which will cause deletion of associated account.
* The account was unlinked in the previous test, therefore no operation with user is needed.
*/
@Test
public void test041DeleteDerbyAccount() throws FileNotFoundException, JAXBException, FaultMessage,
ObjectNotFoundException, SchemaException, DirectoryException, SQLException {
TestUtil.displayTestTitle("test041DeleteDerbyAccount");
// GIVEN
assertNoRepoCache();
// WHEN
OperationResultType result = deleteObjectViaModelWS(ObjectTypes.SHADOW.getTypeQName(), accountShadowOidDerby);
// THEN
assertNoRepoCache();
displayJaxb("deleteObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("deleteObject has failed", result);
// Check if shadow was deleted
OperationResult repoResult = new OperationResult("getObject");
try {
repositoryService.getObject(ShadowType.class, accountShadowOidDerby,
null, repoResult);
AssertJUnit.fail("Shadow was not deleted");
} catch (ObjectNotFoundException ex) {
display("Caught expected exception from getObject(shadow): " + ex);
}
// check if account was deleted in DB Table
Statement stmt = derbyController.getExecutedStatementWhereLoginName(USER_JACK_DERBY_LOGIN);
ResultSet rs = stmt.getResultSet();
System.out.println("RS: " + rs);
assertFalse("Account was not deleted in database", rs.next());
}
private OperationResultType deleteObjectViaModelWS(QName typeQName, String oid) throws FaultMessage {
ObjectDeltaListType deltaList = new ObjectDeltaListType();
ObjectDeltaType objectDelta = new ObjectDeltaType();
objectDelta.setOid(oid);
objectDelta.setObjectType(typeQName);
objectDelta.setChangeType(ChangeTypeType.DELETE);
deltaList.getDelta().add(objectDelta);
ObjectDeltaOperationListType list = modelWeb.executeChanges(deltaList, null);
return getOdoFromDeltaOperationList(list, objectDelta).getExecutionResult();
}
@Test
public void test047RenameUser() throws Exception {
final String TEST_NAME = "test047RenameUser";
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_NAME_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
display("repository user", repoUser);
PrismAsserts.assertEqualsPolyString("wrong value for User name", "jsparrow", repoUserType.getName());
PrismAsserts.assertEqualsPolyString("wrong value for User fullName", "Cpt. Jack Sparrow", repoUserType.getFullName());
PrismAsserts.assertEqualsPolyString("wrong value for User locality", "somewhere", repoUserType.getLocality());
assertEquals("wrong value for employeeNumber", "1", repoUserType.getEmployeeNumber());
// Check if appropriate accountRef is still there
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.iterator().next();
assertEquals("Wrong OID in "+accountRef+" in "+repoUserType,
accountShadowOidOpendj, accountRef.getOid());
// Check if shadow is still in the repo and that it is untouched
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
display("repository shadow", repoShadow);
AssertJUnit.assertNotNull(repoShadow);
ShadowType repoShadowType = repoShadow.asObjectable();
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
assertOpenDJAccountJack(uid, "jsparrow");
}
/**
* We are going to modify the user. As the user has an account, the user
* changes should be also applied to the account (by schemaHandling).
*
* @throws DirectoryException
*/
@Test
public void test048ModifyUserRemoveGivenName() throws Exception {
final String TEST_NAME = "test048ModifyUserRemoveGivenName";
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_GIVENNAME_FILENAME, ObjectDeltaType.class);
displayJaxb("objectChange:", objectChange, SchemaConstants.T_OBJECT_DELTA);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result:", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
display("repository user", repoUser);
PrismAsserts.assertEqualsPolyString("wrong value for fullName", "Cpt. Jack Sparrow", repoUserType.getFullName());
assertNull("Value for givenName still present", repoUserType.getGivenName());
// Check if appropriate accountRef is still there
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.iterator().next();
accountRef.getOid().equals(accountShadowOidOpendj);
// Check if shadow is still in the repo and that it is untouched
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
repoResult.computeStatus();
TestUtil.assertSuccess("getObject(repo) has failed", repoResult);
display("repository shadow", repoShadow);
AssertJUnit.assertNotNull(repoShadow);
ShadowType repoShadowType = repoShadow.asObjectable();
AssertJUnit.assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check attributes in the shadow: should be only identifiers (ICF UID)
String uid = checkRepoShadow(repoShadow);
// Check if LDAP account was updated
Entry entry = openDJController.searchAndAssertByEntryUuid(uid);
assertOpenDJAccountJack(entry, "jsparrow", null);
}
/**
* The user should have an account now. Let's try to delete the user. The
* account should be gone as well.
*
* @throws JAXBException
*/
@Test
public void test049DeleteUser() throws SchemaException, FaultMessage, DirectoryException, JAXBException {
TestUtil.displayTestTitle("test049DeleteUser");
// GIVEN
assertNoRepoCache();
// WHEN
OperationResultType result = deleteObjectViaModelWS(ObjectTypes.USER.getTypeQName(), USER_JACK_OID);
// THEN
assertNoRepoCache();
displayJaxb("deleteObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("deleteObject has failed", result);
// User should be gone from the repository
OperationResult repoResult = new OperationResult("getObject");
try {
repositoryService.getObject(UserType.class, USER_JACK_OID, null, repoResult);
AssertJUnit.fail("User still exists in repo after delete");
} catch (ObjectNotFoundException e) {
// This is expected
}
// Account shadow should be gone from the repository
repoResult = new OperationResult("getObject");
try {
repositoryService.getObject(ShadowType.class, accountShadowOidOpendj, null, repoResult);
AssertJUnit.fail("Shadow still exists in repo after delete");
} catch (ObjectNotFoundException e) {
// This is expected, but check also the result
AssertJUnit.assertFalse("getObject failed as expected, but the result indicates success",
repoResult.isSuccess());
}
// Account should be deleted from LDAP
InternalSearchOperation op = openDJController.getInternalConnection().processSearch(
"dc=example,dc=com", SearchScope.WHOLE_SUBTREE, DereferencePolicy.NEVER_DEREF_ALIASES, 100,
100, false, "(uid=" + USER_JACK_LDAP_UID + ")", null);
AssertJUnit.assertEquals(0, op.getEntriesSent());
}
@Test
public void test100AssignRolePirate() throws Exception {
final String TEST_NAME = "test100AssignRolePirate";
displayTestTitle(TEST_NAME);
// GIVEN
// IMPORTANT! Assignment enforcement is FULL now
setAssignmentEnforcement(AssignmentPolicyEnforcementType.FULL);
// This is not redundant. It checks that the previous command set the policy correctly
assertSyncSettingsAssignmentPolicyEnforcement(AssignmentPolicyEnforcementType.FULL);
PrismObject<UserType> user = PrismTestUtil.parseObject(USER_GUYBRUSH_FILE);
UserType userType = user.asObjectable();
// Encrypt the password
protector.encrypt(userType.getCredentials().getPassword().getValue());
OperationResultType result = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(result);
Holder<String> oidHolder = new Holder<>();
assertNoRepoCache();
addObjectViaModelWS(userType, null, oidHolder, resultHolder);
assertNoRepoCache();
TestUtil.assertSuccess("addObject has failed", resultHolder.value);
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ROLE_PIRATE_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
accountShadowOidGuybrushOpendj = accountRef.getOid();
assertFalse(accountShadowOidGuybrushOpendj.isEmpty());
// Check if shadow was created in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("Shadow (repository)", repoShadowType);
assertNotNull(repoShadowType);
assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
accountGuybrushOpendjEntryUuuid = checkRepoShadow(repoShadow);
// check if account was created in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Bloody Pirate");
OpenDJController.assertAttribute(entry, "businessCategory", "loot", "murder");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword was not set on create", guybrushPassword);
// TODO: Derby
}
@Test
public void test101AccountOwnerAfterRole() throws Exception {
final String TEST_NAME = "test101AccountOwnerAfterRole";
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<UserType> userHolder = new Holder<>();
// WHEN
modelWeb.findShadowOwner(accountShadowOidGuybrushOpendj, userHolder, resultHolder);
// THEN
TestUtil.assertSuccess("listAccountShadowOwner has failed (result)", resultHolder.value);
UserType user = userHolder.value;
assertNotNull("No owner", user);
assertEquals(USER_GUYBRUSH_OID, user.getOid());
System.out.println("Account " + accountShadowOidGuybrushOpendj + " has owner " + ObjectTypeUtil.toShortString(user));
}
@Test
public void test102AssignRoleCaptain() throws Exception {
final String TEST_NAME = "test102AssignRoleCaptain";
displayTestTitle(TEST_NAME);
// GIVEN
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ROLE_CAPTAIN_1_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
assertEquals(accountShadowOidGuybrushOpendj, accountRef.getOid());
// Check if shadow is still in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> aObject = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadow = aObject.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("Shadow (repository)", repoShadow);
assertNotNull(repoShadow);
assertEquals(RESOURCE_OPENDJ_OID, repoShadow.getResourceRef().getOid());
// check if account is still in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Bloody Pirate", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "loot", "murder", "cruise");
// Expression in the role taking that from the user
OpenDJController.assertAttribute(entry, "destinationIndicator", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "departmentNumber", "Department of Guybrush");
// Expression in the role taking that from the assignment
OpenDJController.assertAttribute(entry, "physicalDeliveryOfficeName", "The Sea Monkey");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword disappeared", guybrushPassword);
// TODO: Derby
}
/**
* Assign the same "captain" role again, this time with a slightly different assignment parameters.
*/
@Test
public void test103AssignRoleCaptainAgain() throws Exception {
final String TEST_NAME = "test103AssignRoleCaptainAgain";
displayTestTitle(TEST_NAME);
// GIVEN
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ROLE_CAPTAIN_2_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
assertEquals(accountShadowOidGuybrushOpendj, accountRef.getOid());
// Check if shadow is still in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> aObject = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadow = aObject.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("Shadow (repository)", repoShadow);
assertNotNull(repoShadow);
assertEquals(RESOURCE_OPENDJ_OID, repoShadow.getResourceRef().getOid());
// check if account is still in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Bloody Pirate", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "loot", "murder", "cruise");
// Expression in the role taking that from the user
OpenDJController.assertAttribute(entry, "destinationIndicator", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "departmentNumber", "Department of Guybrush");
// Expression in the role taking that from the assignments (both of them)
OpenDJController.assertAttribute(entry, "physicalDeliveryOfficeName", "The Sea Monkey", "The Dainty Lady");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword disappeared", guybrushPassword);
// TODO: Derby
}
@Test
public void test105ModifyAccount() throws Exception {
final String TEST_NAME = "test105ModifyAccount";
displayTestTitle(TEST_NAME);
// GIVEN
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_ACCOUNT_MODIFY_ATTRS_FILE, ObjectDeltaType.class);
objectChange.setOid(accountShadowOidGuybrushOpendj);
// WHEN ObjectTypes.SHADOW.getTypeQName(),
OperationResultType result = modifyObjectViaModelWS(objectChange);
Task task = taskManager.createTaskInstance();
OperationResult parentResult = new OperationResult(TEST_NAME + "-get after first modify");
PrismObject<ShadowType> shadow= modelService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj, null, task, parentResult);
assertNotNull("shadow must not be null", shadow);
ShadowType shadowType = shadow.asObjectable();
QName employeeTypeQName = new QName(resourceTypeOpenDjrepo.getNamespace(), "employeeType");
ItemPath employeeTypePath = ItemPath.create(ShadowType.F_ATTRIBUTES, employeeTypeQName);
PrismProperty item = shadow.findProperty(employeeTypePath);
PropertyDelta deleteDelta = prismContext.deltaFactory().property().create(ShadowType.F_ATTRIBUTES, item.getDefinition().getItemName(), item.getDefinition());
// PropertyDelta deleteDelta = PropertyDelta.createDelta(employeeTypePath, shadow.getDefinition());
// PrismPropertyValue valToDelte = new PrismPropertyValue("A");
// valToDelte.setParent(deleteDelta);
Collection<PrismPropertyValue> values= item.getValues();
for (PrismPropertyValue val : values){
if ("A".equals(val.getValue())){
deleteDelta.addValueToDelete(val.clone());
}
}
ObjectDelta delta = prismContext.deltaFactory().object().create(ShadowType.class, ChangeType.MODIFY);
delta.addModification(deleteDelta);
delta.setOid(accountShadowOidGuybrushOpendj);
Collection<ObjectDelta<? extends ObjectType>> deltas = new ArrayList<>();
deltas.add(delta);
LOGGER.info("-------->>EXECUTE DELETE MODIFICATION<<------------");
modelService.executeChanges(deltas, null, task, parentResult);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// check if LDAP account was modified
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
OpenDJController.assertAttribute(entry, "roomNumber", "captain's cabin");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Bloody Pirate", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "loot", "murder", "cruise", "fighting", "capsize");
// Expression in the role taking that from the user
OpenDJController.assertAttribute(entry, "destinationIndicator", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "departmentNumber", "Department of Guybrush");
// Expression in the role taking that from the assignments (both of them)
OpenDJController.assertAttribute(entry, "physicalDeliveryOfficeName", "The Sea Monkey", "The Dainty Lady");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword disappeared", guybrushPassword);
}
/**
* Judge role excludes pirate role. This assignment should fail.
*/
@Test
public void test104AssignRoleJudge() throws Exception {
final String TEST_NAME = "test104AssignRoleJudge";
displayTestTitle(TEST_NAME);
// GIVEN
OperationResultType result = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(result);
Holder<String> oidHolder = new Holder<>();
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ROLE_JUDGE_FILENAME, ObjectDeltaType.class);
try {
// WHEN ObjectTypes.USER.getTypeQName(),
result = modifyObjectViaModelWS(objectChange);
// THEN
AssertJUnit.fail("Expected a failure after assigning conflicting roles but nothing happened and life goes on");
} catch (FaultMessage f) {
// This is expected
// TODO: check if the fault is the right one
}
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object remain unmodified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals("Unexpected number or accountRefs", 1, accountRefs.size());
}
@Test
public void test107UnassignRolePirate() throws Exception {
final String TEST_NAME = "test107UnassignRolePirate";
displayTestTitle(TEST_NAME);
// GIVEN
OperationResultType result = new OperationResultType();
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_DELETE_ROLE_PIRATE_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
assertEquals(accountShadowOidGuybrushOpendj, accountRef.getOid());
// Check if shadow is still in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> aObject = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadow = aObject.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("Shadow (repository)", repoShadow);
assertNotNull(repoShadow);
assertEquals(RESOURCE_OPENDJ_OID, repoShadow.getResourceRef().getOid());
// check if account is still in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "cruise", "fighting", "capsize");
// Expression in the role taking that from the user
OpenDJController.assertAttribute(entry, "destinationIndicator", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "departmentNumber", "Department of Guybrush");
// Expression in the role taking that from the assignments (both of them)
OpenDJController.assertAttribute(entry, "physicalDeliveryOfficeName", "The Sea Monkey", "The Dainty Lady");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword disappeared", guybrushPassword);
// TODO: Derby
}
@Test
public void test108UnassignRoleCaptain() throws Exception {
final String TEST_NAME = "test108UnassignRoleCaptain";
displayTestTitle(TEST_NAME);
// GIVEN
OperationResultType result = new OperationResultType();
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_DELETE_ROLE_CAPTAIN_1_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUser);
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
assertEquals(accountShadowOidGuybrushOpendj, accountRef.getOid());
// Check if shadow is still in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
display("Shadow (repository)", repoShadow);
assertNotNull(repoShadowType);
assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
// check if account is still in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "cruise", "fighting", "capsize");
// Expression in the role taking that from the user
OpenDJController.assertAttribute(entry, "destinationIndicator", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "departmentNumber", "Department of Guybrush");
// Expression in the role taking that from the assignments (both of them)
OpenDJController.assertAttribute(entry, "physicalDeliveryOfficeName", "The Dainty Lady");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword disappeared", guybrushPassword);
// TODO: Derby
}
/**
* Captain role was assigned twice. It has to also be unassigned twice.
*/
@Test
public void test109UnassignRoleCaptainAgain() throws Exception {
final String TEST_NAME = "test109UnassignRoleCaptainAgain";
displayTestTitle(TEST_NAME);
// GIVEN
OperationResultType result = new OperationResultType();
assertNoRepoCache();
ObjectDeltaType objectChange = unmarshallValueFromFile(
REQUEST_USER_MODIFY_DELETE_ROLE_CAPTAIN_2_FILENAME, ObjectDeltaType.class);
// WHEN ObjectTypes.USER.getTypeQName(),
result = modifyObjectViaModelWS(objectChange);
// THEN
assertNoRepoCache();
displayJaxb("modifyObject result", result, SchemaConstants.C_RESULT);
//TODO TODO TODO TODO operation result from repostiory.getObject is unknown...find out why..
// assertSuccess("modifyObject has failed", result);
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PropertyReferenceListType resolve = new PropertyReferenceListType();
PrismObject<UserType> repoUser = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUserType = repoUser.asObjectable();
repoResult.computeStatus();
display("User (repository)", repoUserType);
List<ObjectReferenceType> accountRefs = repoUserType.getLinkRef();
assertEquals(0, accountRefs.size());
// Check if shadow was deleted from the repo
repoResult = new OperationResult("getObject");
try {
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
AssertJUnit.fail("Account shadow was not deleted from repo");
} catch (ObjectNotFoundException ex) {
// This is expected
}
// check if account was deleted from LDAP
Entry entry = openDJController.searchByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
assertNull("LDAP account was not deleted", entry);
// TODO: Derby
}
// Synchronization tests
/**
* Test initialization of synchronization. It will create a cycle task and
* check if the cycle executes No changes are synchronized yet.
*/
@Test
public void test300LiveSyncInit() throws Exception {
final String TEST_NAME = "test300LiveSyncInit";
displayTestTitle(TEST_NAME);
// Now it is the right time to add task definition to the repository
// We don't want it there any sooner, as it may interfere with the
// previous tests
checkAllShadows();
// IMPORTANT! Assignment enforcement is POSITIVE now
setAssignmentEnforcement(AssignmentPolicyEnforcementType.POSITIVE);
// This is not redundant. It checks that the previous command set the policy correctly
assertSyncSettingsAssignmentPolicyEnforcement(AssignmentPolicyEnforcementType.POSITIVE);
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
repoAddObjectFromFile(TASK_OPENDJ_SYNC_FILENAME, result);
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this
// task
waitFor("Waiting for task manager to pick up the task", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(TASK_OPENDJ_SYNC_OID, result);
display("Task while waiting for task manager to pick up the task", task);
// wait until the task is picked up
return task.getLastRunFinishTimestamp() != null;
// if (TaskExclusivityStatus.CLAIMED == task.getExclusivityStatus()) {
// // wait until the first run is finished
// if (task.getLastRunFinishTimestamp() == null) {
// return false;
// }
// return true;
// }
// return false;
}
@Override
public void timeout() {
// No reaction, the test will fail right after return from this
}
}, 20000);
// Check task status
Task task = taskManager.getTask(TASK_OPENDJ_SYNC_OID, retrieveTaskResult(), result);
result.computeStatus();
display("getTask result", result);
TestUtil.assertSuccess("getTask has failed", result);
AssertJUnit.assertNotNull(task);
display("Task after pickup", task);
PrismObject<TaskType> o = repositoryService.getObject(TaskType.class, TASK_OPENDJ_SYNC_OID, null, result);
display("Task after pickup in the repository", o.asObjectable());
// .. it should be running
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// .. and claimed
// AssertJUnit.assertEquals(TaskExclusivityStatus.CLAIMED, task.getExclusivityStatus());
// .. and last run should not be zero
assertNotNull("No lastRunStartTimestamp", task.getLastRunStartTimestamp());
assertFalse("Zero lastRunStartTimestamp", task.getLastRunStartTimestamp().longValue() == 0);
assertNotNull("No lastRunFinishedTimestamp", task.getLastRunFinishTimestamp());
assertFalse("Zero lastRunFinishedTimestamp", task.getLastRunFinishTimestamp().longValue() == 0);
// Test for extension. This will also roughly test extension processor
// and schema processor
PrismContainer<?> taskExtension = task.getExtensionOrClone();
AssertJUnit.assertNotNull(taskExtension);
display("Task extension", taskExtension);
PrismProperty<String> shipStateProp = taskExtension.findProperty(MY_SHIP_STATE);
AssertJUnit.assertEquals("Wrong 'shipState' property value", "capsized", shipStateProp.getValue().getValue());
PrismProperty<Integer> deadProp = taskExtension.findProperty(MY_DEAD);
PrismPropertyValue<Integer> deadPVal = deadProp.getValues().iterator().next();
AssertJUnit.assertEquals("Wrong 'dead' property class", Integer.class, deadPVal.getValue().getClass());
AssertJUnit.assertEquals("Wrong 'dead' property value", Integer.valueOf(42), deadPVal.getValue());
// The progress should be 0, as there were no changes yet
AssertJUnit.assertEquals(0, task.getProgress());
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
AssertJUnit.assertNotNull(taskResult);
assertTrue("Task result is not a success, it is "+taskResult, taskResult.isSuccess());
final Object tokenAfter = findSyncToken(task);
display("Sync token after", tokenAfter.toString());
lastSyncToken = (Integer)tokenAfter;
checkAllShadows();
// Try without options. The results should NOT be there
// MID-4670
task = taskManager.getTask(TASK_OPENDJ_SYNC_OID, null, result);
taskResult = task.getResult();
AssertJUnit.assertNull("Unexpected task result", taskResult);
}
/**
* Create LDAP object. That should be picked up by liveSync and a user
* should be created in repo.
*/
@Test
public void test301LiveSyncCreate() throws Exception {
final String TEST_NAME = "test301LiveSyncCreate";
displayTestTitle(TEST_NAME);
// Sync task should be running (tested in previous test), so just create
// new LDAP object.
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
final Task syncCycle = taskManager.getTask(TASK_OPENDJ_SYNC_OID, result);
AssertJUnit.assertNotNull(syncCycle);
final Object tokenBefore = findSyncToken(syncCycle);
display("Sync token before", tokenBefore.toString());
// WHEN
displayWhen(TEST_NAME);
Entry entry = openDJController.addEntryFromLdifFile(LDIF_WILL_FILENAME);
display("Entry from LDIF", entry);
// Wait a bit to give the sync cycle time to detect the change
basicWaitForSyncChangeDetection(syncCycle, tokenBefore, 2, result);
// THEN
displayThen(TEST_NAME);
// Search for the user that should be created now
UserType user = searchUserByName(WILL_NAME);
PrismAsserts.assertEqualsPolyString("Wrong name.", WILL_NAME, user.getName());
assertNotNull(user.getLinkRef());
assertFalse(user.getLinkRef().isEmpty());
// AssertJUnit.assertEquals(user.getName(), WILL_NAME);
// TODO: more checks
assertAndStoreSyncTokenIncrement(syncCycle, 2);
checkAllShadows();
}
@Test
public void test302LiveSyncModify() throws Exception {
final String TEST_NAME = "test302LiveSyncModify";
displayTestTitle(TEST_NAME);
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
final Task syncCycle = taskManager.getTask(TASK_OPENDJ_SYNC_OID, result);
AssertJUnit.assertNotNull(syncCycle);
int tokenBefore = findSyncToken(syncCycle);
display("Sync token before", tokenBefore);
// WHEN
display("Modifying LDAP entry");
ChangeRecordEntry entry = openDJController.executeLdifChange(LDIF_WILL_MODIFY_FILE);
// THEN
display("Entry from LDIF", entry);
// Wait a bit to give the sync cycle time to detect the change
basicWaitForSyncChangeDetection(syncCycle, tokenBefore, 1, result);
// Search for the user that should be created now
UserType user = searchUserByName (WILL_NAME);
// AssertJUnit.assertEquals(WILL_NAME, user.getName());
PrismAsserts.assertEqualsPolyString("Wrong name.", WILL_NAME, user.getName());
PrismAsserts.assertEqualsPolyString("wrong givenName", "asdf", user.getGivenName());
assertAndStoreSyncTokenIncrement(syncCycle, 1);
checkAllShadows();
}
@Test
public void test303LiveSyncLink() throws Exception {
final String TEST_NAME = "test303LiveSyncLink";
displayTestTitle(TEST_NAME);
// GIVEN
assertNoRepoCache();
PrismObject<UserType> user = PrismTestUtil.parseObject(USER_E_LINK_ACTION_FILE);
UserType userType = user.asObjectable();
final String userOid = userType.getOid();
// Encrypt e's password
protector.encrypt(userType.getCredentials().getPassword().getValue());
// create user in repository
OperationResultType resultType = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(resultType);
Holder<String> oidHolder = new Holder<>();
display("Adding user object", userType);
addObjectViaModelWS(userType, null, oidHolder, resultHolder);
//check results
assertNoRepoCache();
displayJaxb("addObject result:", resultHolder.value, SchemaConstants.C_RESULT);
TestUtil.assertSuccess("addObject has failed", resultHolder.value);
// AssertJUnit.assertEquals(userOid, oidHolder.value);
//WHEN
displayWhen(TEST_NAME);
//create account for e which should be correlated
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
final Task syncCycle = taskManager.getTask(TASK_OPENDJ_SYNC_OID, result);
AssertJUnit.assertNotNull(syncCycle);
int tokenBefore = findSyncToken(syncCycle);
display("Sync token before", tokenBefore);
Entry entry = openDJController.addEntryFromLdifFile(LDIF_E_FILENAME_LINK);
display("Entry from LDIF", entry);
// Wait a bit to give the sync cycle time to detect the change
basicWaitForSyncChangeDetection(syncCycle, tokenBefore, 1, result);
// THEN
displayThen(TEST_NAME);
//check user and account ref
userType = searchUserByName("e");
List<ObjectReferenceType> accountRefs = userType.getLinkRef();
assertEquals("Account ref not found, or found too many", 1, accountRefs.size());
//check account defined by account ref
String accountOid = accountRefs.get(0).getOid();
ShadowType account = searchAccountByOid(accountOid);
assertEqualsPolyString("Name doesn't match", "uid=e,ou=People,dc=example,dc=com", account.getName());
assertAndStoreSyncTokenIncrement(syncCycle, 1);
checkAllShadows();
}
/**
* Create LDAP object. That should be picked up by liveSync and a user
* should be created in repo.
* Also location (ldap l) should be updated through outbound
*/
@Test
public void test304LiveSyncCreateNoLocation() throws Exception {
final String TEST_NAME = "test304LiveSyncCreateNoLocation";
displayTestTitle(TEST_NAME);
// Sync task should be running (tested in previous test), so just create
// new LDAP object.
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
final Task syncCycle = taskManager.getTask(TASK_OPENDJ_SYNC_OID, result);
AssertJUnit.assertNotNull(syncCycle);
int tokenBefore = findSyncToken(syncCycle);
display("Sync token before", tokenBefore);
// WHEN
Entry entry = openDJController.addEntryFromLdifFile(LDIF_WILL_WITHOUT_LOCATION_FILENAME);
display("Entry from LDIF", entry);
// THEN
// Wait a bit to give the sync cycle time to detect the change
basicWaitForSyncChangeDetection(syncCycle, tokenBefore, 3, result, 60000);
// Search for the user that should be created now
final String userName = "wturner1";
UserType user = searchUserByName(userName);
List<ObjectReferenceType> accountRefs = user.getLinkRef();
assertEquals("Account ref not found, or found too many", 1, accountRefs.size());
//check account defined by account ref
String accountOid = accountRefs.get(0).getOid();
ShadowType account = searchAccountByOid(accountOid);
assertEqualsPolyString("Name doesn't match", "uid=" + userName + ",ou=People,dc=example,dc=com", account.getName());
// assertEquals("Name doesn't match", "uid=" + userName + ",ou=People,dc=example,dc=com", account.getName());
Collection<String> localities = getAttributeValues(account, new QName(RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS.getNamespaceURI(), "l"));
assertNotNull("null value list for attribute 'l'", localities);
assertEquals("unexpected number of values of attribute 'l'", 1, localities.size());
assertEquals("Locality doesn't match", "middle of nowhere", localities.iterator().next());
assertAndStoreSyncTokenIncrement(syncCycle, 3);
checkAllShadows();
}
private void assertAndStoreSyncTokenIncrement(Task syncCycle, int increment) {
final Object tokenAfter = findSyncToken(syncCycle);
display("Sync token after", tokenAfter.toString());
int tokenAfterInt = (Integer)tokenAfter;
int expectedToken = lastSyncToken + increment;
lastSyncToken = tokenAfterInt;
assertEquals("Unexpected sync toke value", expectedToken, tokenAfterInt);
}
private int findSyncToken(Task syncCycle) {
return (Integer)findSyncTokenObject(syncCycle);
}
private Object findSyncTokenObject(Task syncCycle) {
Object token = null;
PrismProperty<?> tokenProperty = syncCycle.getExtensionOrClone().findProperty(SchemaConstants.SYNC_TOKEN);
if (tokenProperty != null) {
Collection<?> values = tokenProperty.getRealValues();
if (values.size() > 1) {
throw new IllegalStateException("Too must values in token "+tokenProperty);
}
token = values.iterator().next();
}
return token;
}
/**
* Not really a test. Just cleans up after live sync.
*/
@Test
public void test399LiveSyncCleanup() throws Exception {
final String TEST_NAME = "test399LiveSyncCleanup";
displayTestTitle(TEST_NAME);
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
taskManager.deleteTask(TASK_OPENDJ_SYNC_OID, result);
// TODO: check if the task is really stopped
}
@Test
public void test400ImportFromResource() throws Exception {
final String TEST_NAME = "test400ImportFromResource";
displayTestTitle(TEST_NAME);
// GIVEN
checkAllShadows();
assertNoRepoCache();
OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
// Make sure Mr. Gibbs has "l" attribute set to the same value as an outbound expression is setting
ChangeRecordEntry entry = openDJController.executeLdifChange(LDIF_GIBBS_MODIFY_FILE);
display("Entry from LDIF", entry);
// Let's add an entry with multiple uids.
Entry addEntry = openDJController.addEntryFromLdifFile(LDIF_HERMAN_FILENAME);
display("Entry from LDIF", addEntry);
// WHEN
displayWhen(TEST_NAME);
TaskType taskType = modelWeb.importFromResource(RESOURCE_OPENDJ_OID, RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS);
// THEN
displayThen(TEST_NAME);
assertNoRepoCache();
displayJaxb("importFromResource result", taskType.getResult(), SchemaConstants.C_RESULT);
AssertJUnit.assertEquals("importFromResource has failed", OperationResultStatusType.IN_PROGRESS, taskType.getResult().getStatus());
// Convert the returned TaskType to a more usable Task
Task task = taskManager.createTaskInstance(taskType.asPrismObject(), result);
AssertJUnit.assertNotNull(task);
assertNotNull(task.getOid());
AssertJUnit.assertTrue(task.isAsynchronous());
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// AssertJUnit.assertEquals(TaskExclusivityStatus.CLAIMED, task.getExclusivityStatus());
display("Import task after launch", task);
PrismObject<TaskType> tObject = repositoryService.getObject(TaskType.class, task.getOid(), null, result);
TaskType taskAfter = tObject.asObjectable();
display("Import task in repo after launch", taskAfter);
result.computeStatus();
TestUtil.assertSuccess("getObject has failed", result);
final String taskOid = task.getOid();
waitFor("Waiting for import to complete", new Checker() {
@Override
public boolean check() throws CommonException {
Holder<OperationResultType> resultHolder = new Holder<>();
Holder<ObjectType> objectHolder = new Holder<>();
OperationResult opResult = new OperationResult("import check");
assertNoRepoCache();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
try {
modelWeb.getObject(ObjectTypes.TASK.getTypeQName(), taskOid,
options, objectHolder, resultHolder);
} catch (FaultMessage faultMessage) {
throw new SystemException(faultMessage);
}
assertNoRepoCache();
// display("getObject result (wait loop)",resultHolder.value);
TestUtil.assertSuccess("getObject has failed", resultHolder.value);
Task task = taskManager.createTaskInstance((PrismObject<TaskType>) objectHolder.value.asPrismObject(), opResult);
System.out.println(new Date() + ": Import task status: " + task.getExecutionStatus() + ", progress: " + task.getProgress());
if (task.getExecutionStatus() == TaskExecutionStatus.CLOSED) {
// Task closed, wait finished
return true;
}
// IntegrationTestTools.display("Task result while waiting: ", task.getResult());
return false;
}
@Override
public void timeout() {
// No reaction, the test will fail right after return from this
}
}, 180000);
// wait a second until the task will be definitely saved
Thread.sleep(1000);
//### Check task state after the task is finished ###
Holder<ObjectType> objectHolder = new Holder<>();
Holder<OperationResultType> resultHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
assertNoRepoCache();
modelWeb.getObject(ObjectTypes.TASK.getTypeQName(), task.getOid(),
options, objectHolder, resultHolder);
assertNoRepoCache();
TestUtil.assertSuccess("getObject has failed", resultHolder.value);
task = taskManager.createTaskInstance((PrismObject<TaskType>) objectHolder.value.asPrismObject(), result);
display("Import task after finish (fetched from model)", task);
AssertJUnit.assertEquals(TaskExecutionStatus.CLOSED, task.getExecutionStatus());
assertNotNull("Null lastRunStartTimestamp in "+task, task.getLastRunStartTimestamp());
assertNotNull("Null lastRunFinishTimestamp in "+task, task.getLastRunFinishTimestamp());
long importDuration = task.getLastRunFinishTimestamp() - task.getLastRunStartTimestamp();
double usersPerSec = (task.getProgress() * 1000) / importDuration;
display("Imported " + task.getProgress() + " users in " + importDuration + " milliseconds (" + usersPerSec + " users/sec)");
OperationResultStatusType taskResultStatus = task.getResultStatus();
AssertJUnit.assertNotNull("Task has no result status", taskResultStatus);
assertEquals("Import task result is not success", OperationResultStatusType.SUCCESS, taskResultStatus);
AssertJUnit.assertTrue("No progress", task.getProgress() > 0);
//### Check if the import created users and shadows ###
// Listing of shadows is not supported by the provisioning. So we need
// to look directly into repository
List<PrismObject<ShadowType>> sobjects = repositoryService.searchObjects(ShadowType.class, null, null, result);
result.computeStatus();
TestUtil.assertSuccess("listObjects has failed", result);
AssertJUnit.assertFalse("No shadows created", sobjects.isEmpty());
for (PrismObject<ShadowType> aObject : sobjects) {
ShadowType shadow = aObject.asObjectable();
display("Shadow object after import (repo)", shadow);
assertNotEmpty("No OID in shadow", shadow.getOid()); // This would be really strange ;-)
assertNotEmpty("No name in shadow", shadow.getName());
AssertJUnit.assertNotNull("No objectclass in shadow", shadow.getObjectClass());
AssertJUnit.assertNotNull("Null attributes in shadow", shadow.getAttributes());
String resourceOid = shadow.getResourceRef().getOid();
if (resourceOid.equals(RESOURCE_OPENDJ_OID)) {
assertAttributeNotNull("No identifier in shadow", shadow, getOpenDjPrimaryIdentifierQName());
} else {
assertAttributeNotNull("No UID in shadow", shadow, SchemaConstants.ICFS_UID);
}
}
Holder<ObjectListType> listHolder = new Holder<>();
assertNoRepoCache();
modelWeb.searchObjects(ObjectTypes.USER.getTypeQName(), null, null,
listHolder, resultHolder);
assertNoRepoCache();
ObjectListType uobjects = listHolder.value;
TestUtil.assertSuccess("listObjects has failed", resultHolder.value);
AssertJUnit.assertFalse("No users created", uobjects.getObject().isEmpty());
// TODO: use another account, not guybrush
display("Users after import "+uobjects.getObject().size());
for (ObjectType oo : uobjects.getObject()) {
UserType user = (UserType) oo;
if (SystemObjectsType.USER_ADMINISTRATOR.value().equals(user.getOid())) {
//skip administrator check
continue;
}
display("User after import (repo)", user);
assertNotEmpty("No OID in user", user.getOid()); // This would be
// really
// strange ;-)
assertNotEmpty("No name in user", user.getName());
assertNotNull("No fullName in user", user.getFullName());
assertNotEmpty("No fullName in user", user.getFullName().getOrig());
assertNotEmpty("No familyName in user", user.getFamilyName().getOrig());
// givenName is not mandatory in LDAP, therefore givenName may not
// be present on user
if (user.getName().getOrig().equals(USER_GUYBRUSH_USERNAME)) {
// skip the rest of checks for guybrush, he does not have LDAP account now
continue;
}
assertTrue("User "+user.getName()+" is disabled ("+user.getActivation().getAdministrativeStatus()+")", user.getActivation() == null ||
user.getActivation().getAdministrativeStatus() == ActivationStatusType.ENABLED);
List<ObjectReferenceType> accountRefs = user.getLinkRef();
AssertJUnit.assertEquals("Wrong accountRef for user " + user.getName(), 1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
boolean found = false;
for (PrismObject<ShadowType> aObject : sobjects) {
ShadowType acc = aObject.asObjectable();
if (accountRef.getOid().equals(acc.getOid())) {
found = true;
break;
}
}
if (!found) {
AssertJUnit.fail("accountRef does not point to existing account " + accountRef.getOid());
}
PrismObject<ShadowType> aObject = modelService.getObject(ShadowType.class, accountRef.getOid(), null, task, result);
ShadowType account = aObject.asObjectable();
display("Account after import ", account);
String attributeValueL = ShadowUtil.getMultiStringAttributeValueAsSingle(account,
new QName(ResourceTypeUtil.getResourceNamespace(resourceTypeOpenDjrepo), "l"));
// assertEquals("Unexcpected value of l", "middle of nowhere", attributeValueL);
assertEquals("Unexcpected value of l", getUserLocality(user), attributeValueL);
}
// This also includes "idm" user imported from LDAP. Later we need to ignore that one.
assertEquals("Wrong number of users after import", 10, uobjects.getObject().size());
checkAllShadows();
}
private String getUserLocality(UserType user){
return user.getLocality() != null ? user.getLocality().getOrig() :"middle of nowhere";
}
@Test
public void test420RecomputeUsers() throws Exception {
final String TEST_NAME = "test420RecomputeUsers";
displayTestTitle(TEST_NAME);
// GIVEN
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
// Assign role to a user, but we do this using a repository instead of model.
// The role assignment will not be executed and this created an inconsistent state.
ObjectDeltaType changeAddRoleCaptain = unmarshallValueFromFile(
REQUEST_USER_MODIFY_ADD_ROLE_CAPTAIN_1_FILENAME, ObjectDeltaType.class);
Collection<? extends ItemDelta> modifications = DeltaConvertor.toModifications(changeAddRoleCaptain.getItemDelta(),
getUserDefinition());
repositoryService.modifyObject(UserType.class, changeAddRoleCaptain.getOid(), modifications, result);
// TODO: setup more "inconsistent" state
// Add reconciliation task. This will trigger reconciliation
importObjectFromFile(TASK_USER_RECOMPUTE_FILENAME, result);
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this
// task
waitFor("Waiting for task to finish", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(TASK_USER_RECOMPUTE_OID, result);
//display("Task while waiting for task manager to pick up the task", task);
// wait until the task is finished
if (TaskExecutionStatus.CLOSED == task.getExecutionStatus()) {
return true;
}
return false;
}
@Override
public void timeout() {
// No reaction, the test will fail right after return from this
}
}, 40000);
// wait a second until the task will be definitely saved
Thread.sleep(1000);
// Check task status
Task task = taskManager.getTask(TASK_USER_RECOMPUTE_OID, retrieveTaskResult(), result);
result.computeStatus();
display("getTask result", result);
TestUtil.assertSuccess("getTask has failed", result);
AssertJUnit.assertNotNull(task);
display("Task after finish", task);
AssertJUnit.assertNotNull(task.getTaskIdentifier());
assertFalse(task.getTaskIdentifier().isEmpty());
PrismObject<TaskType> o = repositoryService.getObject(TaskType.class, TASK_USER_RECOMPUTE_OID, null, result);
display("Task after pickup in the repository", o.asObjectable());
AssertJUnit.assertEquals(TaskExecutionStatus.CLOSED, task.getExecutionStatus());
// .. and last run should not be zero
assertNotNull(task.getLastRunStartTimestamp());
AssertJUnit.assertFalse(task.getLastRunStartTimestamp().longValue() == 0);
assertNotNull(task.getLastRunFinishTimestamp());
AssertJUnit.assertFalse(task.getLastRunFinishTimestamp().longValue() == 0);
AssertJUnit.assertEquals(10, task.getProgress());
// Test for presence of a result. It should be there and it should
// indicate success
OperationResult taskResult = task.getResult();
display("Recompute task result", taskResult);
AssertJUnit.assertNotNull(taskResult);
TestUtil.assertSuccess("Recompute task result", taskResult);
// STOP the task. We don't need it any more and we don't want to give it a chance to run more than once
taskManager.deleteTask(TASK_USER_RECOMPUTE_OID, result);
// CHECK RESULT: account created for user guybrush
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> object = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUser = object.asObjectable();
repoResult.computeStatus();
displayJaxb("User (repository)", repoUser, new QName("user"));
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals("Wrong number of accountRefs after recompute for user "+repoUser.getName(), 1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
accountShadowOidGuybrushOpendj = accountRef.getOid();
assertFalse(accountShadowOidGuybrushOpendj.isEmpty());
// Check if shadow was created in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
displayJaxb("Shadow (repository)", repoShadowType, new QName("shadow"));
assertNotNull(repoShadowType);
assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
accountGuybrushOpendjEntryUuuid = checkRepoShadow(repoShadow);
// check if account was created in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
OpenDJController.assertAttribute(entry, "title", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "cruise");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword was not set on create", guybrushPassword);
checkAllShadows();
}
@Test
public void test440ReconcileResourceOpenDj() throws Exception {
final String TEST_NAME = "test440ReconcileResourceOpenDj";
displayTestTitle(TEST_NAME);
// GIVEN
final OperationResult result = new OperationResult(TestSanity.class.getName()
+ "." + TEST_NAME);
// Create LDAP account without an owner. The liveSync is off, so it will not be picked up
Entry ldifEntry = openDJController.addEntryFromLdifFile(LDIF_ELAINE_FILENAME);
display("Entry from LDIF", ldifEntry);
// Guybrush's attributes were set up by a role in the previous test. Let's mess the up a bit. Recon should sort it out.
List<RawModification> modifications = new ArrayList<>();
// Expect that a correct title will be added to this one
RawModification titleMod = RawModification.create(ModificationType.REPLACE, "title", "Scurvy earthworm");
modifications.add(titleMod);
// Expect that the correct location will replace this one
RawModification lMod = RawModification.create(ModificationType.REPLACE, "l", "Davie Jones' locker");
modifications.add(lMod);
// Expect that this will be untouched
RawModification poMod = RawModification.create(ModificationType.REPLACE, "postOfficeBox", "X marks the spot");
modifications.add(poMod);
ModifyOperation modifyOperation = openDJController.getInternalConnection().processModify(USER_GUYBRUSH_LDAP_DN, modifications);
if (ResultCode.SUCCESS != modifyOperation.getResultCode()) {
AssertJUnit.fail("LDAP operation failed: " + modifyOperation.getErrorMessage());
}
// TODO: setup more "inconsistent" state
// Add reconciliation task. This will trigger reconciliation
repoAddObjectFromFile(TASK_OPENDJ_RECON_FILENAME, result);
// We need to wait for a sync interval, so the task scanner has a chance
// to pick up this
// task
waitFor("Waiting for task to finish first run", new Checker() {
public boolean check() throws ObjectNotFoundException, SchemaException {
Task task = taskManager.getTask(TASK_OPENDJ_RECON_OID, result);
display("Task while waiting for task manager to pick up the task", task);
// wait until the task is finished
return task.getLastRunFinishTimestamp() != null;
}
@Override
public void timeout() {
// No reaction, the test will fail right after return from this
}
}, 180000);
// Check task status
Task task = taskManager.getTask(TASK_OPENDJ_RECON_OID, result);
result.computeStatus();
display("getTask result", result);
TestUtil.assertSuccess("getTask has failed", result);
AssertJUnit.assertNotNull(task);
display("Task after pickup", task);
PrismObject<TaskType> o = repositoryService.getObject(TaskType.class, TASK_OPENDJ_RECON_OID, null, result);
display("Task after pickup in the repository", o.asObjectable());
// .. it should be running
AssertJUnit.assertEquals(TaskExecutionStatus.RUNNABLE, task.getExecutionStatus());
// .. and claimed
// AssertJUnit.assertEquals(TaskExclusivityStatus.CLAIMED, task.getExclusivityStatus());
// .. and last run should not be zero
assertNotNull("Null last run start in recon task", task.getLastRunStartTimestamp());
AssertJUnit.assertFalse("Zero last run start in recon task", task.getLastRunStartTimestamp().longValue() == 0);
assertNotNull("Null last run finish in recon task", task.getLastRunFinishTimestamp());
AssertJUnit.assertFalse("Zero last run finish in recon task", task.getLastRunFinishTimestamp().longValue() == 0);
// The progress should be 0, as there were no changes yet
// [pm] commented out, as progress in recon task is now determined not only using # of changes
//AssertJUnit.assertEquals(0, task.getProgress());
// Test for presence of a result. It was not fetched - so it should NOT be there
OperationResult taskResult = task.getResult();
AssertJUnit.assertNull(taskResult);
// However, the task should indicate success
AssertJUnit.assertEquals(OperationResultStatusType.SUCCESS, task.getResultStatus());
// STOP the task. We don't need it any more and we don't want to give it a chance to run more than once
taskManager.deleteTask(TASK_OPENDJ_RECON_OID, result);
// CHECK RESULT: account for user guybrush should be still there and unchanged
// Check if user object was modified in the repo
OperationResult repoResult = new OperationResult("getObject");
PrismObject<UserType> uObject = repositoryService.getObject(UserType.class, USER_GUYBRUSH_OID, null, repoResult);
UserType repoUser = uObject.asObjectable();
repoResult.computeStatus();
displayJaxb("User (repository)", repoUser, new QName("user"));
List<ObjectReferenceType> accountRefs = repoUser.getLinkRef();
assertEquals("Guybrush has wrong number of accounts", 1, accountRefs.size());
ObjectReferenceType accountRef = accountRefs.get(0);
accountShadowOidGuybrushOpendj = accountRef.getOid();
assertFalse(accountShadowOidGuybrushOpendj.isEmpty());
// Check if shadow was created in the repo
repoResult = new OperationResult("getObject");
PrismObject<ShadowType> repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidGuybrushOpendj,
null, repoResult);
ShadowType repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
displayJaxb("Shadow (repository)", repoShadowType, new QName("shadow"));
assertNotNull(repoShadowType);
assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
accountGuybrushOpendjEntryUuuid = checkRepoShadow(repoShadow);
// check if account was created in LDAP
Entry entry = openDJController.searchAndAssertByEntryUuid(accountGuybrushOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", "guybrush");
OpenDJController.assertAttribute(entry, "givenName", "Guybrush");
OpenDJController.assertAttribute(entry, "sn", "Threepwood");
OpenDJController.assertAttribute(entry, "cn", "Guybrush Threepwood");
OpenDJController.assertAttribute(entry, "displayName", "Guybrush Threepwood");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object. It is not tolerant, therefore the other value should be gone now
OpenDJController.assertAttribute(entry, "l", "Deep in the Caribbean");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "sailor");
// "title" is tolerant, so it will retain the original value as well as the one provided by the role
OpenDJController.assertAttribute(entry, "title", "Scurvy earthworm", "Honorable Captain");
OpenDJController.assertAttribute(entry, "carLicense", "C4PT41N");
OpenDJController.assertAttribute(entry, "businessCategory", "cruise");
// No setting for "postOfficeBox", so the value should be unchanged
OpenDJController.assertAttribute(entry, "postOfficeBox", "X marks the spot");
String guybrushPassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Pasword was not set on create", guybrushPassword);
// QueryType query = QueryUtil.createNameQuery(ELAINE_NAME);
// ObjectQuery query = ObjectQuery.createObjectQuery(EqualsFilter.createEqual(UserType.class, prismContext, UserType.F_NAME, ELAINE_NAME));
ObjectQuery query = ObjectQueryUtil.createNameQuery(ELAINE_NAME, prismContext);
List<PrismObject<UserType>> users = repositoryService.searchObjects(UserType.class, query, null, repoResult);
assertEquals("Wrong number of Elaines", 1, users.size());
repoUser = users.get(0).asObjectable();
repoResult.computeStatus();
displayJaxb("User Elaine (repository)", repoUser, new QName("user"));
assertNotNull(repoUser.getOid());
assertEquals(PrismTestUtil.createPolyStringType(ELAINE_NAME), repoUser.getName());
PrismAsserts.assertEqualsPolyString("wrong repo givenName", "Elaine", repoUser.getGivenName());
PrismAsserts.assertEqualsPolyString("wrong repo familyName", "Marley", repoUser.getFamilyName());
PrismAsserts.assertEqualsPolyString("wrong repo fullName", "Elaine Marley", repoUser.getFullName());
accountRefs = repoUser.getLinkRef();
assertEquals("Elaine has wrong number of accounts", 1, accountRefs.size());
accountRef = accountRefs.get(0);
String accountShadowOidElaineOpendj = accountRef.getOid();
assertFalse(accountShadowOidElaineOpendj.isEmpty());
// Check if shadow was created in the repo
repoResult = new OperationResult("getObject");
repoShadow = repositoryService.getObject(ShadowType.class, accountShadowOidElaineOpendj,
null, repoResult);
repoShadowType = repoShadow.asObjectable();
repoResult.computeStatus();
TestUtil.assertSuccess("getObject has failed", repoResult);
displayJaxb("Shadow (repository)", repoShadowType, new QName("shadow"));
assertNotNull(repoShadowType);
assertEquals(RESOURCE_OPENDJ_OID, repoShadowType.getResourceRef().getOid());
String accountElainehOpendjEntryUuuid = checkRepoShadow(repoShadow);
// check if account is still in LDAP
entry = openDJController.searchAndAssertByEntryUuid(accountElainehOpendjEntryUuuid);
display("LDAP account", entry);
OpenDJController.assertAttribute(entry, "uid", ELAINE_NAME);
OpenDJController.assertAttribute(entry, "givenName", "Elaine");
OpenDJController.assertAttribute(entry, "sn", "Marley");
OpenDJController.assertAttribute(entry, "cn", "Elaine Marley");
OpenDJController.assertAttribute(entry, "displayName", "Elaine Marley");
// The "l" attribute is assigned indirectly through schemaHandling and
// config object
// FIXME
//OpenDJController.assertAttribute(entry, "l", "middle of nowhere");
// Set by the role
OpenDJController.assertAttribute(entry, "employeeType", "governor");
OpenDJController.assertAttribute(entry, "title", "Governor");
OpenDJController.assertAttribute(entry, "businessCategory", "state");
String elainePassword = OpenDJController.getAttributeValue(entry, "userPassword");
assertNotNull("Password of Elaine has disappeared", elainePassword);
checkAllShadows();
}
@Test
public void test480ListResources() throws Exception {
final String TEST_NAME = "test480ListResources";
displayTestTitle(TEST_NAME);
// GIVEN
OperationResultType result = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(result);
Holder<ObjectListType> objectListHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
// WHEN
modelWeb.searchObjects(ObjectTypes.RESOURCE.getTypeQName(), null, options, objectListHolder, resultHolder);
// THEN
display("Resources", objectListHolder.value);
assertEquals("Unexpected number of resources", 4, objectListHolder.value.getObject().size());
// TODO
for(ObjectType object: objectListHolder.value.getObject()) {
// Marshalling may fail even though the Java object is OK so test for it
String xml = prismContext.serializeObjectToString(object.asPrismObject(), PrismContext.LANG_XML);
}
}
@Test
public void test485ListResourcesWithBrokenResource() throws Exception {
TestUtil.displayTestTitle("test485ListResourcesWithBrokenResource");
// GIVEN
Task task = taskManager.createTaskInstance(TestSanity.class.getName() + ".test410ListResourcesWithBrokenResource");
final OperationResult result = task.getResult();
// WHEN
List<PrismObject<ResourceType>> resources = modelService.searchObjects(ResourceType.class, null, null, task, result);
// THEN
assertNotNull("listObjects returned null list", resources);
for (PrismObject<ResourceType> object : resources) {
ResourceType resource = object.asObjectable();
//display("Resource found",resource);
display("Found " + ObjectTypeUtil.toShortString(resource) + ", result " + (resource.getFetchResult() == null ? "null" : resource.getFetchResult().getStatus()));
assertNotNull(resource.getOid());
assertNotNull(resource.getName());
if (resource.getOid().equals(RESOURCE_BROKEN_OID)) {
assertTrue("No error in fetchResult in " + ObjectTypeUtil.toShortString(resource),
resource.getFetchResult() != null &&
(resource.getFetchResult().getStatus() == OperationResultStatusType.PARTIAL_ERROR ||
resource.getFetchResult().getStatus() == OperationResultStatusType.FATAL_ERROR));
} else {
assertTrue("Unexpected error in fetchResult in " + ObjectTypeUtil.toShortString(resource),
resource.getFetchResult() == null || resource.getFetchResult().getStatus() == OperationResultStatusType.SUCCESS);
}
}
}
@Test
public void test500NotifyChangeCreateAccount() throws Exception{
final String TEST_NAME = "test500NotifyChangeCreateAccount";
displayTestTitle(TEST_NAME);
Entry ldifEntry = openDJController.addEntryFromLdifFile(LDIF_ANGELIKA_FILENAME);
display("Entry from LDIF", ldifEntry);
List<Attribute> attributes = ldifEntry.getAttributes();
List<Attribute> attrs = ldifEntry.getAttribute("entryUUID");
AttributeValue val = null;
if (attrs == null){
for (Attribute a : attributes){
if (a.getName().equals("entryUUID")){
val = a.iterator().next();
}
}
} else{
val = attrs.get(0).iterator().next();
}
String entryUuid = val.toString();
ShadowType anglicaAccount = parseObjectType(new File(ACCOUNT_ANGELIKA_FILENAME), ShadowType.class);
PrismProperty<String> prop = anglicaAccount.asPrismObject().findContainer(ShadowType.F_ATTRIBUTES).getValue().createProperty(
prismContext.definitionFactory().createPropertyDefinition(getOpenDjPrimaryIdentifierQName(), DOMUtil.XSD_STRING));
prop.setRealValue(entryUuid);
anglicaAccount.setResourceRef(ObjectTypeUtil.createObjectRef(RESOURCE_OPENDJ_OID, ObjectTypes.RESOURCE));
display("Angelica shadow: ", anglicaAccount.asPrismObject().debugDump());
ResourceObjectShadowChangeDescriptionType changeDescription = new ResourceObjectShadowChangeDescriptionType();
ObjectDeltaType delta = new ObjectDeltaType();
delta.setChangeType(ChangeTypeType.ADD);
delta.setObjectToAdd(anglicaAccount);
delta.setObjectType(ShadowType.COMPLEX_TYPE);
changeDescription.setObjectDelta(delta);
changeDescription.setChannel(SchemaConstants.CHANNEL_WEB_SERVICE_URI);
// WHEN
TaskType task = modelWeb.notifyChange(changeDescription);
// THEN
OperationResult result = OperationResult.createOperationResult(task.getResult());
display(result);
assertSuccess(result);
PrismObject<UserType> userAngelika = findUserByUsername(ANGELIKA_NAME);
assertNotNull("User with the name angelika must exist.", userAngelika);
UserType user = userAngelika.asObjectable();
assertNotNull("User with the name angelika must have one link ref.", user.getLinkRef());
assertEquals("Expected one account ref in user", 1, user.getLinkRef().size());
String oid = user.getLinkRef().get(0).getOid();
PrismObject<ShadowType> modelShadow = modelService.getObject(ShadowType.class, oid, null, taskManager.createTaskInstance(), result);
assertAttributeNotNull(modelShadow, getOpenDjPrimaryIdentifierQName());
assertAttribute(modelShadow, "uid", "angelika");
assertAttribute(modelShadow, "givenName", "Angelika");
assertAttribute(modelShadow, "sn", "Marley");
assertAttribute(modelShadow, "cn", "Angelika Marley");
}
@Test
public void test501NotifyChangeModifyAccount() throws Exception{
final String TEST_NAME = "test501NotifyChangeModifyAccount";
displayTestTitle(TEST_NAME);
OperationResult parentResult = new OperationResult(TEST_NAME);
PrismObject<UserType> userAngelika = findUserByUsername(ANGELIKA_NAME);
assertNotNull("User with the name angelika must exist.", userAngelika);
UserType user = userAngelika.asObjectable();
assertNotNull("User with the name angelika must have one link ref.", user.getLinkRef());
assertEquals("Expected one account ref in user", 1, user.getLinkRef().size());
String oid = user.getLinkRef().get(0).getOid();
ResourceObjectShadowChangeDescriptionType changeDescription = new ResourceObjectShadowChangeDescriptionType();
ObjectDeltaType delta = new ObjectDeltaType();
delta.setChangeType(ChangeTypeType.MODIFY);
delta.setObjectType(ShadowType.COMPLEX_TYPE);
ItemDeltaType mod1 = new ItemDeltaType();
mod1.setModificationType(ModificationTypeType.REPLACE);
ItemPathType path = new ItemPathType(ItemPath.create(ShadowType.F_ATTRIBUTES, new QName(resourceTypeOpenDjrepo.getNamespace(), "givenName")));
mod1.setPath(path);
RawType value = new RawType(prismContext.xnodeFactory().primitive("newAngelika"), prismContext);
mod1.getValue().add(value);
delta.getItemDelta().add(mod1);
delta.setOid(oid);
LOGGER.info("item delta: {}", SchemaDebugUtil.prettyPrint(mod1));
LOGGER.info("delta: {}", DebugUtil.dump(mod1));
changeDescription.setObjectDelta(delta);
changeDescription.setOldShadowOid(oid);
changeDescription.setChannel(SchemaConstants.CHANNEL_WEB_SERVICE_URI);
// WHEN
TaskType task = modelWeb.notifyChange(changeDescription);
// THEN
OperationResult result = OperationResult.createOperationResult(task.getResult());
display(result);
assertSuccess(result);
PrismObject<UserType> userAngelikaAfterSync = findUserByUsername(ANGELIKA_NAME);
assertNotNull("User with the name angelika must exist.", userAngelikaAfterSync);
UserType userAfterSync = userAngelikaAfterSync.asObjectable();
PrismAsserts.assertEqualsPolyString("wrong given name in user angelika", PrismTestUtil.createPolyStringType("newAngelika"), userAfterSync.getGivenName());
}
@Test
public void test502NotifyChangeModifyAccountPassword() throws Exception{
final String TEST_NAME = "test502NotifyChangeModifyAccountPassword";
displayTestTitle(TEST_NAME);
PrismObject<UserType> userAngelika = findUserByUsername(ANGELIKA_NAME);
assertNotNull("User with the name angelika must exist.", userAngelika);
UserType user = userAngelika.asObjectable();
assertNotNull("User with the name angelika must have one link ref.", user.getLinkRef());
assertEquals("Expected one account ref in user", 1, user.getLinkRef().size());
String oid = user.getLinkRef().get(0).getOid();
String newPassword = "newPassword";
openDJController.modifyReplace("uid="+ANGELIKA_NAME+","+openDJController.getSuffixPeople(), "userPassword", newPassword);
ResourceObjectShadowChangeDescriptionType changeDescription = new ResourceObjectShadowChangeDescriptionType();
ObjectDeltaType delta = new ObjectDeltaType();
delta.setChangeType(ChangeTypeType.MODIFY);
delta.setObjectType(ShadowType.COMPLEX_TYPE);
ItemDeltaType passwordDelta = new ItemDeltaType();
passwordDelta.setModificationType(ModificationTypeType.REPLACE);
passwordDelta.setPath(ModelClientUtil.createItemPathType("credentials/password/value", prismContext));
RawType passwordValue = new RawType(prismContext.xnodeSerializer().root(new QName("dummy")).serializeRealValue(ModelClientUtil.createProtectedString(newPassword)).getSubnode(), prismContext);
passwordDelta.getValue().add(passwordValue);
delta.getItemDelta().add(passwordDelta);
delta.setOid(oid);
LOGGER.info("item delta: {}", SchemaDebugUtil.prettyPrint(passwordDelta));
LOGGER.info("delta: {}", DebugUtil.dump(passwordDelta));
changeDescription.setObjectDelta(delta);
changeDescription.setOldShadowOid(oid);
// changeDescription.setCurrentShadow(angelicaShadowType);
changeDescription.setChannel(SchemaConstants.CHANNEL_WEB_SERVICE_URI);
// WHEN
TaskType task = modelWeb.notifyChange(changeDescription);
// THEN
OperationResult result = OperationResult.createOperationResult(task.getResult());
display(result);
assertSuccess(result);
PrismObject<UserType> userAngelikaAfterSync = findUserByUsername(ANGELIKA_NAME);
assertNotNull("User with the name angelika must exist.", userAngelikaAfterSync);
assertUserLdapPassword(userAngelikaAfterSync, newPassword);
}
@Test
public void test503NotifyChangeDeleteAccount() throws Exception{
final String TEST_NAME = "test503NotifyChangeDeleteAccount";
displayTestTitle(TEST_NAME);
PrismObject<UserType> userAngelika = findUserByUsername(ANGELIKA_NAME);
assertNotNull("User with the name angelika must exist.", userAngelika);
UserType user = userAngelika.asObjectable();
assertNotNull("User with the name angelika must have one link ref.", user.getLinkRef());
assertEquals("Expected one account ref in user", 1, user.getLinkRef().size());
String oid = user.getLinkRef().get(0).getOid();
ResourceObjectShadowChangeDescriptionType changeDescription = new ResourceObjectShadowChangeDescriptionType();
ObjectDeltaType delta = new ObjectDeltaType();
delta.setChangeType(ChangeTypeType.DELETE);
delta.setObjectType(ShadowType.COMPLEX_TYPE);
delta.setOid(oid);
changeDescription.setObjectDelta(delta);
changeDescription.setOldShadowOid(oid);
changeDescription.setChannel(SchemaConstants.CHANNEL_WEB_SERVICE_URI);
// WHEN
TaskType task = modelWeb.notifyChange(changeDescription);
// THEN
OperationResult result = OperationResult.createOperationResult(task.getResult());
display(result);
assertTrue(result.isAcceptable());
PrismObject<UserType> userAngelikaAfterSync = findUserByUsername(ANGELIKA_NAME);
display("User after", userAngelikaAfterSync);
assertNotNull("User with the name angelika must exist.", userAngelikaAfterSync);
UserType userType = userAngelikaAfterSync.asObjectable();
assertNotNull("User with the name angelika must have one link ref.", userType.getLinkRef());
assertEquals("Expected no account ref in user", 0, userType.getLinkRef().size());
}
@Test
public void test999Shutdown() throws Exception {
taskManager.shutdown();
waitFor("waiting for task manager shutdown", new Checker() {
@Override
public boolean check() {
return taskManager.getLocallyRunningTasks(new OperationResult("dummy")).isEmpty();
}
@Override
public void timeout() {
// No reaction, the test will fail right after return from this
}
}, 10000);
AssertJUnit.assertEquals("Some tasks left running after shutdown", new HashSet<Task>(),
new HashSet<>(taskManager.getLocallyRunningTasks(new OperationResult("dummy"))));
}
// TODO: test for missing/corrupt system configuration
// TODO: test for missing sample config (bad reference in expression
// arguments)
private String checkRepoShadow(PrismObject<ShadowType> repoShadow) {
ShadowType repoShadowType = repoShadow.asObjectable();
String uid = null;
boolean hasOthers = false;
List<Object> xmlAttributes = repoShadowType.getAttributes().getAny();
for (Object element : xmlAttributes) {
if (SchemaConstants.ICFS_UID.equals(JAXBUtil.getElementQName(element)) || getOpenDjPrimaryIdentifierQName().equals(JAXBUtil.getElementQName(element))) {
if (uid != null) {
AssertJUnit.fail("Multiple values for ICF UID in shadow attributes");
} else {
uid = ((Element) element).getTextContent();
}
} else if (SchemaConstants.ICFS_NAME.equals(JAXBUtil.getElementQName(element)) || getOpenDjSecondaryIdentifierQName().equals(JAXBUtil.getElementQName(element))) {
// This is OK
} else {
hasOthers = true;
}
}
assertFalse("Shadow "+repoShadow+" has unexpected elements", hasOthers);
assertNotNull(uid);
return uid;
}
private QName getOpenDjPrimaryIdentifierQName() {
return new QName(RESOURCE_OPENDJ_NS, RESOURCE_OPENDJ_PRIMARY_IDENTIFIER_LOCAL_NAME);
}
private QName getOpenDjSecondaryIdentifierQName() {
return new QName(RESOURCE_OPENDJ_NS, RESOURCE_OPENDJ_SECONDARY_IDENTIFIER_LOCAL_NAME);
}
private ShadowType searchAccountByOid(final String accountOid) throws Exception {
OperationResultType resultType = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(resultType);
Holder<ObjectType> accountHolder = new Holder<>();
SelectorQualifiedGetOptionsType options = new SelectorQualifiedGetOptionsType();
modelWeb.getObject(ObjectTypes.SHADOW.getTypeQName(), accountOid, options, accountHolder, resultHolder);
ObjectType object = accountHolder.value;
TestUtil.assertSuccess("searchObjects has failed", resultHolder.value);
assertNotNull("Account is null", object);
if (!(object instanceof ShadowType)) {
fail("Object is not account.");
}
ShadowType account = (ShadowType) object;
assertEquals(accountOid, account.getOid());
return account;
}
private UserType searchUserByName(String name) throws Exception {
// Document doc = DOMUtil.getDocument();
// Element nameElement = doc.createElementNS(SchemaConstants.C_NAME.getNamespaceURI(),
// SchemaConstants.C_NAME.getLocalPart());
// nameElement.setTextContent(name);
// Element filter = QueryUtil.createEqualFilter(doc, null, nameElement);
//
// QueryType query = new QueryType();
// query.setFilter(filter);
ObjectQuery q = ObjectQueryUtil.createNameQuery(UserType.class, prismContext, name);
QueryType query = prismContext.getQueryConverter().createQueryType(q);
OperationResultType resultType = new OperationResultType();
Holder<OperationResultType> resultHolder = new Holder<>(resultType);
Holder<ObjectListType> listHolder = new Holder<>();
assertNoRepoCache();
modelWeb.searchObjects(ObjectTypes.USER.getTypeQName(), query, null, listHolder, resultHolder);
assertNoRepoCache();
ObjectListType objects = listHolder.value;
TestUtil.assertSuccess("searchObjects has failed", resultHolder.value);
AssertJUnit.assertEquals("User not found (or found too many)", 1, objects.getObject().size());
UserType user = (UserType) objects.getObject().get(0);
AssertJUnit.assertEquals(user.getName(), PrismTestUtil.createPolyStringType(name));
return user;
}
private void basicWaitForSyncChangeDetection(Task syncCycle, Object tokenBefore, int increment,
final OperationResult result) throws Exception {
basicWaitForSyncChangeDetection(syncCycle, (int)((Integer)tokenBefore), increment, result);
}
private void basicWaitForSyncChangeDetection(Task syncCycle, int tokenBefore, int increment,
final OperationResult result) throws Exception {
basicWaitForSyncChangeDetection(syncCycle, tokenBefore, increment, result, 40000);
}
private void basicWaitForSyncChangeDetection(final Task syncCycle, final int tokenBefore, final int increment,
final OperationResult result, int timeout) throws Exception {
waitFor("Waiting for sync cycle to detect change", new Checker() {
@Override
public boolean check() throws CommonException {
syncCycle.refresh(result);
display("SyncCycle while waiting for sync cycle to detect change", syncCycle);
if (syncCycle.getExecutionStatus() != TaskExecutionStatus.RUNNABLE) {
throw new IllegalStateException("Task not runnable: "+syncCycle.getExecutionStatus()+"; "+syncCycle);
}
int tokenNow = findSyncToken(syncCycle);
display("tokenNow = " + tokenNow);
if (tokenNow >= tokenBefore + increment) {
return true;
} else {
return false;
}
}
@Override
public void timeout() {
// No reaction, the test will fail right after return from this
}
}, timeout, WAIT_FOR_LOOP_SLEEP_MILIS);
}
private void setAssignmentEnforcement(AssignmentPolicyEnforcementType enforcementType) throws ObjectNotFoundException, SchemaException, ObjectAlreadyExistsException {
assumeAssignmentPolicy(enforcementType);
// AccountSynchronizationSettingsType syncSettings = new AccountSynchronizationSettingsType();
// syncSettings.setAssignmentPolicyEnforcement(enforcementType);
// applySyncSettings(SystemConfigurationType.class, syncSettings);
}
private void assertSyncSettingsAssignmentPolicyEnforcement(AssignmentPolicyEnforcementType assignmentPolicy) throws
ObjectNotFoundException, SchemaException {
OperationResult result = new OperationResult("Asserting sync settings");
PrismObject<SystemConfigurationType> systemConfigurationType = repositoryService.getObject(SystemConfigurationType.class,
SystemObjectsType.SYSTEM_CONFIGURATION.value(), null, result);
result.computeStatus();
TestUtil.assertSuccess("Asserting sync settings failed (result)", result);
ProjectionPolicyType globalAccountSynchronizationSettings = systemConfigurationType.asObjectable().getGlobalAccountSynchronizationSettings();
assertNotNull("globalAccountSynchronizationSettings is null", globalAccountSynchronizationSettings);
AssignmentPolicyEnforcementType assignmentPolicyEnforcement = globalAccountSynchronizationSettings.getAssignmentPolicyEnforcement();
assertNotNull("assignmentPolicyEnforcement is null", assignmentPolicyEnforcement);
assertEquals("Assignment policy mismatch", assignmentPolicy, assignmentPolicyEnforcement);
}
private void checkAllShadows() throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException {
LOGGER.trace("Checking all shadows");
System.out.println("Checking all shadows");
ObjectChecker<ShadowType> checker = null;
IntegrationTestTools.checkAllShadows(resourceTypeOpenDjrepo, repositoryService, checker, prismContext);
}
public static String getNormalizedAttributeValue(ShadowType repoShadow, RefinedObjectClassDefinition objClassDef, QName name) {
String value = getAttributeValue(repoShadow, name);
RefinedAttributeDefinition idDef = objClassDef.getPrimaryIdentifiers().iterator().next();
if (idDef.getMatchingRuleQName() != null && idDef.getMatchingRuleQName().equals(PrismConstants.STRING_IGNORE_CASE_MATCHING_RULE_NAME)){
return value.toLowerCase();
}
return value;
}
protected <T> void assertAttribute(ShadowType shadowType, String attrName, T... expectedValues) {
assertAttribute(resourceTypeOpenDjrepo, shadowType, attrName, expectedValues);
}
protected <T> void assertAttribute(PrismObject<ShadowType> shadow, String attrName, T... expectedValues) {
assertAttribute(resourceTypeOpenDjrepo, shadow.asObjectable(), attrName, expectedValues);
}
}<|fim▁end|> | // This should discover the connectors
LOGGER.trace("initSystem: trying modelService.postInit()");
modelService.postInit(initResult);
LOGGER.trace("initSystem: modelService.postInit() done"); |
<|file_name|>PluginFacet.java<|end_file_name|><|fim▁begin|>package tc.oc.commons.core.plugin;
import java.util.Set;
import tc.oc.commons.core.commands.CommandRegistry;
import tc.oc.commons.core.commands.Commands;
import tc.oc.commons.core.commands.NestedCommands;
import tc.oc.minecraft.api.event.Activatable;
import tc.oc.commons.core.inject.Facet;
/**
* Something that needs to be enabled and disabled (along with a plugin).
*
* Each plugin has a private set of facets, configured through a {@link PluginFacetBinder}.<|fim▁hole|> *
* If a facet implements the {@link tc.oc.minecraft.api.event.Listener} interfaces,
* it will also be registered to receive events.
*
* If it implements {@link Commands} or {@link NestedCommands}, it will be registered
* through a {@link CommandRegistry}.
*
* Specific plugins may do other automatic things with their own facets, be we
* don't yet have a framework for extending facets across all plugins.
*/
public interface PluginFacet extends Facet, Activatable {
}<|fim▁end|> | * To get the instances, @Inject a {@link Set< PluginFacet >}.
*
* Facets are automatically enabled and disabled at the same time as the
* plugin they are bound to. |
<|file_name|>issue-71137.rs<|end_file_name|><|fim▁begin|>// edition:2018
#![feature(must_not_suspend)]
#![allow(must_not_suspend)]
use std::future::Future;<|fim▁hole|>fn fake_spawn<F: Future + Send + 'static>(f: F) { }
async fn wrong_mutex() {
let m = Mutex::new(1);
{
let mut guard = m.lock().unwrap();
(async { "right"; }).await;
*guard += 1;
}
(async { "wrong"; }).await;
}
fn main() {
fake_spawn(wrong_mutex()); //~ Error future cannot be sent between threads safely
}<|fim▁end|> | use std::sync::Mutex;
|
<|file_name|>consts.py<|end_file_name|><|fim▁begin|>from django.utils.translation import ugettext_lazy as _
# Legend Position
def get_legend_class(position):
return 'legend-' + str(position)
class LEGEND_POSITIONS:
BOTTOM = _('bottom')
TOP = _('top')
LEFT = _('left')<|fim▁hole|> get_choices = ((get_legend_class(BOTTOM), BOTTOM),
(get_legend_class(TOP), TOP),
(get_legend_class(LEFT), LEFT),
(get_legend_class(RIGHT), RIGHT),)
def get_chart_position_class(position):
return 'chart-' + str(position)
class CHART_POSITIONS:
CENTER = _('center')
LEFT = _('left')
RIGHT = _('right')
get_choices = ((get_chart_position_class(CENTER), CENTER),
(get_chart_position_class(LEFT), LEFT),
(get_chart_position_class(RIGHT), RIGHT),)<|fim▁end|> | RIGHT = _('right')
|
<|file_name|>volume.go<|end_file_name|><|fim▁begin|>package storage
import (
"fmt"
"os"
"path"
"sync"
"time"
"github.com/chrislusf/seaweedfs/weed/glog"
)
type Volume struct {
Id VolumeId
dir string
Collection string
dataFile *os.File
nm NeedleMapper
needleMapKind NeedleMapType
readOnly bool
SuperBlock
dataFileAccessLock sync.Mutex
lastModifiedTime uint64 //unix time in seconds
lastCompactIndexOffset uint64
lastCompactRevision uint16
}
func NewVolume(dirname string, collection string, id VolumeId, needleMapKind NeedleMapType, replicaPlacement *ReplicaPlacement, ttl *TTL, preallocate int64) (v *Volume, e error) {
v = &Volume{dir: dirname, Collection: collection, Id: id}
v.SuperBlock = SuperBlock{ReplicaPlacement: replicaPlacement, Ttl: ttl}
v.needleMapKind = needleMapKind
e = v.load(true, true, needleMapKind, preallocate)
return
}
func (v *Volume) String() string {
return fmt.Sprintf("Id:%v, dir:%s, Collection:%s, dataFile:%v, nm:%v, readOnly:%v", v.Id, v.dir, v.Collection, v.dataFile, v.nm, v.readOnly)
}
func (v *Volume) FileName() (fileName string) {
if v.Collection == "" {
fileName = path.Join(v.dir, v.Id.String())
} else {
fileName = path.Join(v.dir, v.Collection+"_"+v.Id.String())
}
return
}
func (v *Volume) DataFile() *os.File {
return v.dataFile
}
func (v *Volume) Version() Version {
return v.SuperBlock.Version()
}
func (v *Volume) Size() int64 {
stat, e := v.dataFile.Stat()
if e == nil {
return stat.Size()
}
glog.V(0).Infof("Failed to read file size %s %v", v.dataFile.Name(), e)
return -1
}
// Close cleanly shuts down this volume
func (v *Volume) Close() {
v.dataFileAccessLock.Lock()
defer v.dataFileAccessLock.Unlock()
v.nm.Close()
_ = v.dataFile.Close()
}
func (v *Volume) NeedToReplicate() bool {
return v.ReplicaPlacement.GetCopyCount() > 1
}
func (v *Volume) ContentSize() uint64 {
return v.nm.ContentSize()<|fim▁hole|>// volume is expired if modified time + volume ttl < now
// except when volume is empty
// or when the volume does not have a ttl
// or when volumeSizeLimit is 0 when server just starts
func (v *Volume) expired(volumeSizeLimit uint64) bool {
if volumeSizeLimit == 0 {
//skip if we don't know size limit
return false
}
if v.ContentSize() == 0 {
return false
}
if v.Ttl == nil || v.Ttl.Minutes() == 0 {
return false
}
glog.V(1).Infof("now:%v lastModified:%v", time.Now().Unix(), v.lastModifiedTime)
livedMinutes := (time.Now().Unix() - int64(v.lastModifiedTime)) / 60
glog.V(1).Infof("ttl:%v lived:%v", v.Ttl, livedMinutes)
if int64(v.Ttl.Minutes()) < livedMinutes {
return true
}
return false
}
// wait either maxDelayMinutes or 10% of ttl minutes
func (v *Volume) exiredLongEnough(maxDelayMinutes uint32) bool {
if v.Ttl == nil || v.Ttl.Minutes() == 0 {
return false
}
removalDelay := v.Ttl.Minutes() / 10
if removalDelay > maxDelayMinutes {
removalDelay = maxDelayMinutes
}
if uint64(v.Ttl.Minutes()+removalDelay)*60+v.lastModifiedTime < uint64(time.Now().Unix()) {
return true
}
return false
}<|fim▁end|> | }
|
<|file_name|>bpf.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2014, 2015 Robert Clipsham <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate libc;
use std::collections::VecDeque;
use std::cmp;
use std::ffi::CString;
use std::io;
use std::iter::repeat;
use std::mem;
use std::sync::Arc;
use bindings::bpf;
use packet::Packet;
use packet::ethernet::{EthernetPacket, MutableEthernetPacket};
use datalink::DataLinkChannelType;
use datalink::DataLinkChannelType::{Layer2, Layer3};
use internal;
use util::NetworkInterface;
// NOTE buffer must be word aligned.
pub fn datalink_channel(network_interface: &NetworkInterface,
write_buffer_size: usize,
read_buffer_size: usize,
channel_type: DataLinkChannelType)
-> io::Result<(DataLinkSenderImpl, DataLinkReceiverImpl)> {
#[cfg(target_os = "freebsd")]
fn get_fd() -> libc::c_int {
unsafe {
libc::open(CString::new(&b"/dev/bpf"[..]).unwrap().as_ptr(), libc::O_RDWR, 0)
}
}
#[cfg(target_os = "macos")]
fn get_fd() -> libc::c_int {
// FIXME This is an arbitrary number of attempts
for i in (0..1_000isize) {
let fd = unsafe {
let file_name = format!("/dev/bpf{}", i);
libc::open(CString::new(file_name.as_bytes()).unwrap().as_ptr(), libc::O_RDWR, 0)
};
if fd != -1 {
return fd;
}
}
return -1;
}
#[cfg(target_os = "freebsd")]
fn set_feedback(fd: libc::c_int) -> io::Result<()> {
let one: libc::c_uint = 1;
if unsafe { bpf::ioctl(fd, bpf::BIOCFEEDBACK, &one) } == -1 {
let err = io::Error::last_os_error();
unsafe { libc::close(fd); }
return Err(err);
}
Ok(())
}
#[cfg(target_os = "macos")]
fn set_feedback(_fd: libc::c_int) -> io::Result<()> {
Ok(())
}
match channel_type {
Layer2 => (),
Layer3(_) => unimplemented!(),
}
let fd = get_fd();
if fd == -1 {
return Err(io::Error::last_os_error());
}
let mut iface: bpf::ifreq = unsafe { mem::zeroed() };
let mut i = 0;
for c in network_interface.name.bytes() {
iface.ifr_name[i] = c as i8;
i += 1;
}
let buflen = read_buffer_size as libc::c_uint;
// NOTE Buffer length must be set before binding to an interface
// otherwise this will return Invalid Argument
if unsafe { bpf::ioctl(fd, bpf::BIOCSBLEN, &buflen) } == -1 {
let err = io::Error::last_os_error();
unsafe { libc::close(fd); }
return Err(err);
}
// Set the interface to use
if unsafe { bpf::ioctl(fd, bpf::BIOCSETIF, &iface) } == -1 {
let err = io::Error::last_os_error();
unsafe { libc::close(fd); }
return Err(err);
}
// Return from read as soon as packets are available - don't wait to fill the buffer
let one: libc::c_uint = 1;
if unsafe { bpf::ioctl(fd, bpf::BIOCIMMEDIATE, &one) } == -1 {
let err = io::Error::last_os_error();
unsafe { libc::close(fd); }
return Err(err);
}
let mut header_size = 0;
// Get the device type
let mut dlt: libc::c_uint = 0;
if unsafe { bpf::ioctl(fd, bpf::BIOCGDLT, &mut dlt) } == -1 {
let err = io::Error::last_os_error();
unsafe { libc::close(fd); }
return Err(err);
}
// The loopback device does weird things
// FIXME This should really just be another L2 packet header type
if dlt == bpf::DLT_NULL {
header_size = 4;
// Allow packets to be read back after they are written
match set_feedback(fd) {
Err(e) => return Err(e),
_ => ()
}
} else {
// Don't fill in source MAC
if unsafe { bpf::ioctl(fd, bpf::BIOCSHDRCMPLT, &one) } == -1 {
let err = io::Error::last_os_error();
unsafe { libc::close(fd); }
return Err(err);
}
}
let fd = Arc::new(internal::FileDesc { fd: fd });
let sender = DataLinkSenderImpl {
fd: fd.clone(),
write_buffer: repeat(0u8).take(write_buffer_size).collect(),
header_size: header_size,
};
let receiver = DataLinkReceiverImpl {
fd: fd,
read_buffer: repeat(0u8).take(read_buffer_size).collect(),
header_size: header_size,
};
Ok((sender, receiver))
}
pub struct DataLinkSenderImpl {
fd: Arc<internal::FileDesc>,
write_buffer: Vec<u8>,
header_size: usize,
}
impl DataLinkSenderImpl {
pub fn build_and_send<F>(&mut self, num_packets: usize, packet_size: usize,
func: &mut F) -> Option<io::Result<()>>
where F : FnMut(MutableEthernetPacket)
{
let len = num_packets * (packet_size + self.header_size);
if len >= self.write_buffer.len() {
None
} else {
let min = cmp::min(self.write_buffer.len(), len);
for chunk in self.write_buffer[..min]
.chunks_mut(packet_size + self.header_size) {
// If we're sending on the loopback device, the first 4 bytes must be set to
// AF_INET
if self.header_size == 4 {
unsafe {
*(chunk.as_mut_ptr() as *mut u32) = libc::AF_INET as u32;
}
}
{
let eh = MutableEthernetPacket::new(&mut chunk[self.header_size..]).unwrap();
func(eh);
}
match unsafe { libc::write(self.fd.fd,
chunk.as_ptr() as *const libc::c_void,
chunk.len() as libc::size_t) } {
len if len == -1 => return Some(Err(io::Error::last_os_error())),
_ => ()
}
}
Some(Ok(()))
}
}
pub fn send_to(&mut self, packet: &EthernetPacket, _dst: Option<NetworkInterface>)
-> Option<io::Result<()>> {
match unsafe { libc::write(self.fd.fd,
packet.packet().as_ptr() as *const libc::c_void,
packet.packet().len() as libc::size_t) } {
len if len == -1 => Some(Err(io::Error::last_os_error())),
_ => Some(Ok(()))
}
}
}
pub struct DataLinkReceiverImpl {
fd: Arc<internal::FileDesc>,
read_buffer: Vec<u8>,<|fim▁hole|>
impl DataLinkReceiverImpl {
pub fn iter<'a>(&'a mut self) -> DataLinkChannelIteratorImpl<'a> {
let buflen = self.read_buffer.len();
DataLinkChannelIteratorImpl {
pc: self,
// Enough room for minimally sized packets without reallocating
packets: VecDeque::with_capacity(buflen / 64)
}
}
}
pub struct DataLinkChannelIteratorImpl<'a> {
pc: &'a mut DataLinkReceiverImpl,
packets: VecDeque<(usize, usize)>,
}
impl<'a> DataLinkChannelIteratorImpl<'a> {
pub fn next<'c>(&'c mut self) -> io::Result<EthernetPacket<'c>> {
if self.packets.is_empty() {
let buflen = match unsafe {
libc::read(self.pc.fd.fd,
self.pc.read_buffer.as_ptr() as *mut libc::c_void,
self.pc.read_buffer.len() as libc::size_t)
} {
len if len > 0 => len,
_ => return Err(io::Error::last_os_error())
};
let mut ptr = self.pc.read_buffer.as_mut_ptr();
let end = unsafe { self.pc.read_buffer.as_ptr().offset(buflen as isize) };
while (ptr as *const u8) < end {
unsafe {
let packet: *const bpf::bpf_hdr = mem::transmute(ptr);
let start = ptr as isize +
(*packet).bh_hdrlen as isize -
self.pc.read_buffer.as_ptr() as isize;
self.packets.push_back((start as usize + self.pc.header_size,
(*packet).bh_caplen as usize - self.pc.header_size));
let offset = (*packet).bh_hdrlen as isize + (*packet).bh_caplen as isize;
ptr = ptr.offset(bpf::BPF_WORDALIGN(offset));
}
}
}
let (start, len) = self.packets.pop_front().unwrap();
Ok(EthernetPacket::new(&self.pc.read_buffer[start .. start + len]).unwrap())
}
}<|fim▁end|> | header_size: usize,
} |
<|file_name|>downloadState.js<|end_file_name|><|fim▁begin|>export default class State {
constructor($rootScope) {
this.$rootScope = $rootScope;
this.state = [];
}
setData(data) {
console.log('state set data', data)
this.state = data;
this.$rootScope.$apply();
}
getData() {<|fim▁hole|>}<|fim▁end|> | //console.log('state get data', state)
return this.state;
} |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>"use strict";
let datafire = require('datafire');<|fim▁hole|>module.exports = datafire.Integration.fromOpenAPI(openapi, "azure_sql_failoverdatabases");<|fim▁end|> | let openapi = require('./openapi.json'); |
<|file_name|>test_init.py<|end_file_name|><|fim▁begin|>"""Tests for the SmartThings component init module."""
from uuid import uuid4
from aiohttp import ClientConnectionError, ClientResponseError
from asynctest import Mock, patch
from pysmartthings import InstalledAppStatus, OAuthToken
import pytest
from homeassistant.components import cloud, smartthings
from homeassistant.components.smartthings.const import (
CONF_CLOUDHOOK_URL, CONF_INSTALLED_APP_ID, CONF_REFRESH_TOKEN,
DATA_BROKERS, DOMAIN, EVENT_BUTTON, SIGNAL_SMARTTHINGS_UPDATE,
SUPPORTED_PLATFORMS)
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from tests.common import MockConfigEntry
async def test_migration_creates_new_flow(
hass, smartthings_mock, config_entry):
"""Test migration deletes app and creates new flow."""
config_entry.version = 1
config_entry.add_to_hass(hass)
await smartthings.async_migrate_entry(hass, config_entry)
await hass.async_block_till_done()
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 1
assert not hass.config_entries.async_entries(DOMAIN)
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]['handler'] == 'smartthings'
assert flows[0]['context'] == {'source': 'import'}
async def test_unrecoverable_api_errors_create_new_flow(
hass, config_entry, smartthings_mock):
"""
Test a new config flow is initiated when there are API errors.
401 (unauthorized): Occurs when the access token is no longer valid.
403 (forbidden/not found): Occurs when the app or installed app could
not be retrieved/found (likely deleted?)
"""
config_entry.add_to_hass(hass)
smartthings_mock.app.side_effect = \
ClientResponseError(None, None, status=401)
# Assert setup returns false
result = await smartthings.async_setup_entry(hass, config_entry)
assert not result
# Assert entry was removed and new flow created
await hass.async_block_till_done()
assert not hass.config_entries.async_entries(DOMAIN)
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]['handler'] == 'smartthings'
assert flows[0]['context'] == {'source': 'import'}
hass.config_entries.flow.async_abort(flows[0]['flow_id'])
async def test_recoverable_api_errors_raise_not_ready(
hass, config_entry, smartthings_mock):
"""Test config entry not ready raised for recoverable API errors."""
config_entry.add_to_hass(hass)
smartthings_mock.app.side_effect = \
ClientResponseError(None, None, status=500)
with pytest.raises(ConfigEntryNotReady):
await smartthings.async_setup_entry(hass, config_entry)
async def test_scenes_api_errors_raise_not_ready(
hass, config_entry, app, installed_app, smartthings_mock):
"""Test if scenes are unauthorized we continue to load platforms."""
config_entry.add_to_hass(hass)
smartthings_mock.app.return_value = app
smartthings_mock.installed_app.return_value = installed_app
smartthings_mock.scenes.side_effect = \
ClientResponseError(None, None, status=500)
with pytest.raises(ConfigEntryNotReady):
await smartthings.async_setup_entry(hass, config_entry)
async def test_connection_errors_raise_not_ready(
hass, config_entry, smartthings_mock):
"""Test config entry not ready raised for connection errors."""
config_entry.add_to_hass(hass)
smartthings_mock.app.side_effect = ClientConnectionError()
with pytest.raises(ConfigEntryNotReady):
await smartthings.async_setup_entry(hass, config_entry)
<|fim▁hole|> hass, config_entry, app, smartthings_mock):
"""Test base_url no longer valid creates a new flow."""
hass.config.api.base_url = 'http://0.0.0.0'
config_entry.add_to_hass(hass)
smartthings_mock.app.return_value = app
# Assert setup returns false
result = await smartthings.async_setup_entry(hass, config_entry)
assert not result
async def test_unauthorized_installed_app_raises_not_ready(
hass, config_entry, app, installed_app,
smartthings_mock):
"""Test config entry not ready raised when the app isn't authorized."""
config_entry.add_to_hass(hass)
installed_app.installed_app_status = InstalledAppStatus.PENDING
smartthings_mock.app.return_value = app
smartthings_mock.installed_app.return_value = installed_app
with pytest.raises(ConfigEntryNotReady):
await smartthings.async_setup_entry(hass, config_entry)
async def test_scenes_unauthorized_loads_platforms(
hass, config_entry, app, installed_app,
device, smartthings_mock, subscription_factory):
"""Test if scenes are unauthorized we continue to load platforms."""
config_entry.add_to_hass(hass)
smartthings_mock.app.return_value = app
smartthings_mock.installed_app.return_value = installed_app
smartthings_mock.devices.return_value = [device]
smartthings_mock.scenes.side_effect = \
ClientResponseError(None, None, status=403)
mock_token = Mock()
mock_token.access_token.return_value = str(uuid4())
mock_token.refresh_token.return_value = str(uuid4())
smartthings_mock.generate_tokens.return_value = mock_token
subscriptions = [subscription_factory(capability)
for capability in device.capabilities]
smartthings_mock.subscriptions.return_value = subscriptions
with patch.object(hass.config_entries,
'async_forward_entry_setup') as forward_mock:
assert await smartthings.async_setup_entry(hass, config_entry)
# Assert platforms loaded
await hass.async_block_till_done()
assert forward_mock.call_count == len(SUPPORTED_PLATFORMS)
async def test_config_entry_loads_platforms(
hass, config_entry, app, installed_app,
device, smartthings_mock, subscription_factory, scene):
"""Test config entry loads properly and proxies to platforms."""
config_entry.add_to_hass(hass)
smartthings_mock.app.return_value = app
smartthings_mock.installed_app.return_value = installed_app
smartthings_mock.devices.return_value = [device]
smartthings_mock.scenes.return_value = [scene]
mock_token = Mock()
mock_token.access_token.return_value = str(uuid4())
mock_token.refresh_token.return_value = str(uuid4())
smartthings_mock.generate_tokens.return_value = mock_token
subscriptions = [subscription_factory(capability)
for capability in device.capabilities]
smartthings_mock.subscriptions.return_value = subscriptions
with patch.object(hass.config_entries,
'async_forward_entry_setup') as forward_mock:
assert await smartthings.async_setup_entry(hass, config_entry)
# Assert platforms loaded
await hass.async_block_till_done()
assert forward_mock.call_count == len(SUPPORTED_PLATFORMS)
async def test_config_entry_loads_unconnected_cloud(
hass, config_entry, app, installed_app,
device, smartthings_mock, subscription_factory, scene):
"""Test entry loads during startup when cloud isn't connected."""
config_entry.add_to_hass(hass)
hass.data[DOMAIN][CONF_CLOUDHOOK_URL] = "https://test.cloud"
hass.config.api.base_url = 'http://0.0.0.0'
smartthings_mock.app.return_value = app
smartthings_mock.installed_app.return_value = installed_app
smartthings_mock.devices.return_value = [device]
smartthings_mock.scenes.return_value = [scene]
mock_token = Mock()
mock_token.access_token.return_value = str(uuid4())
mock_token.refresh_token.return_value = str(uuid4())
smartthings_mock.generate_tokens.return_value = mock_token
subscriptions = [subscription_factory(capability)
for capability in device.capabilities]
smartthings_mock.subscriptions.return_value = subscriptions
with patch.object(
hass.config_entries, 'async_forward_entry_setup') as forward_mock:
assert await smartthings.async_setup_entry(hass, config_entry)
await hass.async_block_till_done()
assert forward_mock.call_count == len(SUPPORTED_PLATFORMS)
async def test_unload_entry(hass, config_entry):
"""Test entries are unloaded correctly."""
connect_disconnect = Mock()
smart_app = Mock()
smart_app.connect_event.return_value = connect_disconnect
broker = smartthings.DeviceBroker(
hass, config_entry, Mock(), smart_app, [], [])
broker.connect()
hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id] = broker
with patch.object(hass.config_entries, 'async_forward_entry_unload',
return_value=True) as forward_mock:
assert await smartthings.async_unload_entry(hass, config_entry)
assert connect_disconnect.call_count == 1
assert config_entry.entry_id not in hass.data[DOMAIN][DATA_BROKERS]
# Assert platforms unloaded
await hass.async_block_till_done()
assert forward_mock.call_count == len(SUPPORTED_PLATFORMS)
async def test_remove_entry(hass, config_entry, smartthings_mock):
"""Test that the installed app and app are removed up."""
# Act
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 1
async def test_remove_entry_cloudhook(hass, config_entry, smartthings_mock):
"""Test that the installed app, app, and cloudhook are removed up."""
# Arrange
config_entry.add_to_hass(hass)
hass.data[DOMAIN][CONF_CLOUDHOOK_URL] = "https://test.cloud"
# Act
with patch.object(cloud, 'async_is_logged_in',
return_value=True) as mock_async_is_logged_in, \
patch.object(cloud, 'async_delete_cloudhook') \
as mock_async_delete_cloudhook:
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 1
assert mock_async_is_logged_in.call_count == 1
assert mock_async_delete_cloudhook.call_count == 1
async def test_remove_entry_app_in_use(hass, config_entry, smartthings_mock):
"""Test app is not removed if in use by another config entry."""
# Arrange
config_entry.add_to_hass(hass)
data = config_entry.data.copy()
data[CONF_INSTALLED_APP_ID] = str(uuid4())
entry2 = MockConfigEntry(version=2, domain=DOMAIN, data=data)
entry2.add_to_hass(hass)
# Act
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 0
async def test_remove_entry_already_deleted(
hass, config_entry, smartthings_mock):
"""Test handles when the apps have already been removed."""
# Arrange
smartthings_mock.delete_installed_app.side_effect = ClientResponseError(
None, None, status=403)
smartthings_mock.delete_app.side_effect = ClientResponseError(
None, None, status=403)
# Act
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 1
async def test_remove_entry_installedapp_api_error(
hass, config_entry, smartthings_mock):
"""Test raises exceptions removing the installed app."""
# Arrange
smartthings_mock.delete_installed_app.side_effect = \
ClientResponseError(None, None, status=500)
# Act
with pytest.raises(ClientResponseError):
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 0
async def test_remove_entry_installedapp_unknown_error(
hass, config_entry, smartthings_mock):
"""Test raises exceptions removing the installed app."""
# Arrange
smartthings_mock.delete_installed_app.side_effect = Exception
# Act
with pytest.raises(Exception):
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 0
async def test_remove_entry_app_api_error(
hass, config_entry, smartthings_mock):
"""Test raises exceptions removing the app."""
# Arrange
smartthings_mock.delete_app.side_effect = \
ClientResponseError(None, None, status=500)
# Act
with pytest.raises(ClientResponseError):
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 1
async def test_remove_entry_app_unknown_error(
hass, config_entry, smartthings_mock):
"""Test raises exceptions removing the app."""
# Arrange
smartthings_mock.delete_app.side_effect = Exception
# Act
with pytest.raises(Exception):
await smartthings.async_remove_entry(hass, config_entry)
# Assert
assert smartthings_mock.delete_installed_app.call_count == 1
assert smartthings_mock.delete_app.call_count == 1
async def test_broker_regenerates_token(
hass, config_entry):
"""Test the device broker regenerates the refresh token."""
token = Mock(OAuthToken)
token.refresh_token = str(uuid4())
stored_action = None
def async_track_time_interval(hass, action, interval):
nonlocal stored_action
stored_action = action
with patch('homeassistant.components.smartthings'
'.async_track_time_interval',
new=async_track_time_interval):
broker = smartthings.DeviceBroker(
hass, config_entry, token, Mock(), [], [])
broker.connect()
assert stored_action
await stored_action(None) # pylint:disable=not-callable
assert token.refresh.call_count == 1
assert config_entry.data[CONF_REFRESH_TOKEN] == token.refresh_token
async def test_event_handler_dispatches_updated_devices(
hass, config_entry, device_factory, event_request_factory,
event_factory):
"""Test the event handler dispatches updated devices."""
devices = [
device_factory('Bedroom 1 Switch', ['switch']),
device_factory('Bathroom 1', ['switch']),
device_factory('Sensor', ['motionSensor']),
device_factory('Lock', ['lock'])
]
device_ids = [devices[0].device_id, devices[1].device_id,
devices[2].device_id, devices[3].device_id]
event = event_factory(devices[3].device_id, capability='lock',
attribute='lock', value='locked',
data={'codeId': '1'})
request = event_request_factory(device_ids=device_ids, events=[event])
config_entry.data[CONF_INSTALLED_APP_ID] = request.installed_app_id
called = False
def signal(ids):
nonlocal called
called = True
assert device_ids == ids
async_dispatcher_connect(hass, SIGNAL_SMARTTHINGS_UPDATE, signal)
broker = smartthings.DeviceBroker(
hass, config_entry, Mock(), Mock(), devices, [])
broker.connect()
# pylint:disable=protected-access
await broker._event_handler(request, None, None)
await hass.async_block_till_done()
assert called
for device in devices:
assert device.status.values['Updated'] == 'Value'
assert devices[3].status.attributes['lock'].value == 'locked'
assert devices[3].status.attributes['lock'].data == {'codeId': '1'}
async def test_event_handler_ignores_other_installed_app(
hass, config_entry, device_factory, event_request_factory):
"""Test the event handler dispatches updated devices."""
device = device_factory('Bedroom 1 Switch', ['switch'])
request = event_request_factory([device.device_id])
called = False
def signal(ids):
nonlocal called
called = True
async_dispatcher_connect(hass, SIGNAL_SMARTTHINGS_UPDATE, signal)
broker = smartthings.DeviceBroker(
hass, config_entry, Mock(), Mock(), [device], [])
broker.connect()
# pylint:disable=protected-access
await broker._event_handler(request, None, None)
await hass.async_block_till_done()
assert not called
async def test_event_handler_fires_button_events(
hass, config_entry, device_factory, event_factory,
event_request_factory):
"""Test the event handler fires button events."""
device = device_factory('Button 1', ['button'])
event = event_factory(device.device_id, capability='button',
attribute='button', value='pushed')
request = event_request_factory(events=[event])
config_entry.data[CONF_INSTALLED_APP_ID] = request.installed_app_id
called = False
def handler(evt):
nonlocal called
called = True
assert evt.data == {
'component_id': 'main',
'device_id': device.device_id,
'location_id': event.location_id,
'value': 'pushed',
'name': device.label,
'data': None
}
hass.bus.async_listen(EVENT_BUTTON, handler)
broker = smartthings.DeviceBroker(
hass, config_entry, Mock(), Mock(), [device], [])
broker.connect()
# pylint:disable=protected-access
await broker._event_handler(request, None, None)
await hass.async_block_till_done()
assert called<|fim▁end|> | async def test_base_url_no_longer_https_does_not_load( |
<|file_name|>reflector.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The `Reflector` struct.
use dom::bindings::conversions::DerivedFrom;
use dom::bindings::root::DomRoot;
use dom::globalscope::GlobalScope;
use js::jsapi::{HandleObject, JSContext, JSObject, Heap};
use std::default::Default;
/// Create the reflector for a new DOM object and yield ownership to the
/// reflector.
pub fn reflect_dom_object<T, U>(
obj: Box<T>,
global: &U,
wrap_fn: unsafe fn(*mut JSContext, &GlobalScope, Box<T>) -> DomRoot<T>)
-> DomRoot<T>
where T: DomObject, U: DerivedFrom<GlobalScope>
{
let global_scope = global.upcast();
unsafe {
wrap_fn(global_scope.get_cx(), global_scope, obj)
}
}
/// A struct to store a reference to the reflector of a DOM object.
#[allow(unrooted_must_root)]
#[derive(MallocSizeOf)]
#[must_root]
// If you're renaming or moving this field, update the path in plugins::reflector as well
pub struct Reflector {
#[ignore_malloc_size_of = "defined and measured in rust-mozjs"]
object: Heap<*mut JSObject>,
}
#[allow(unrooted_must_root)]
impl PartialEq for Reflector {
fn eq(&self, other: &Reflector) -> bool {
self.object.get() == other.object.get()
}
}
impl Reflector {
/// Get the reflector.
#[inline]
pub fn get_jsobject(&self) -> HandleObject {
// We're rooted, so it's safe to hand out a handle to object in Heap
unsafe { self.object.handle() }
}<|fim▁hole|> assert!(self.object.get().is_null());
assert!(!object.is_null());
self.object.set(object);
}
/// Return a pointer to the memory location at which the JS reflector
/// object is stored. Used to root the reflector, as
/// required by the JSAPI rooting APIs.
pub fn rootable(&self) -> &Heap<*mut JSObject> {
&self.object
}
/// Create an uninitialized `Reflector`.
pub fn new() -> Reflector {
Reflector {
object: Heap::default(),
}
}
}
/// A trait to provide access to the `Reflector` for a DOM object.
pub trait DomObject: 'static {
/// Returns the receiver's reflector.
fn reflector(&self) -> &Reflector;
/// Returns the global scope of the realm that the DomObject was created in.
fn global(&self) -> DomRoot<GlobalScope> where Self: Sized {
GlobalScope::from_reflector(self)
}
}
impl DomObject for Reflector {
fn reflector(&self) -> &Self {
self
}
}
/// A trait to initialize the `Reflector` for a DOM object.
pub trait MutDomObject: DomObject {
/// Initializes the Reflector
fn init_reflector(&mut self, obj: *mut JSObject);
}
impl MutDomObject for Reflector {
fn init_reflector(&mut self, obj: *mut JSObject) {
self.set_jsobject(obj)
}
}<|fim▁end|> |
/// Initialize the reflector. (May be called only once.)
pub fn set_jsobject(&mut self, object: *mut JSObject) { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod types;
use bincode::serde::serialize_into;
use bincode::SizeLimit;
use std::net::TcpStream;
/// Send a message to connected partner on stream
pub fn send(mut stream: &mut TcpStream, msg: types::MessageType) {
serialize_into(
stream,<|fim▁hole|> SizeLimit::Infinite
).unwrap();
}<|fim▁end|> | &msg, |
<|file_name|>preprocess_test.py<|end_file_name|><|fim▁begin|># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Hate Crime Table 1."""
import os
import sys
import unittest
import tempfile
import json
import pandas as pd
from . import preprocess
_SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(_SCRIPT_PATH, '..')) # for utils
import utils
_YEAR_INDEX = 0
_OUTPUT_COLUMNS = ['Year', 'StatVar', 'Quantity']
class HateCrimeTable1Test(unittest.TestCase):
def test_csv(self):
csv_files = []
test_config = {
'type': 'xls',
'path': 'testdata/2019.xls',
'args': {
'header': 3,
'skipfooter': 3
}
}
with tempfile.TemporaryDirectory() as tmp_dir:
xls_file_path = os.path.join(_SCRIPT_PATH, test_config['path'])
csv_file_path = os.path.join(tmp_dir, '2019.csv')<|fim▁hole|>
read_file = pd.read_excel(xls_file_path, **test_config['args'])
read_file = preprocess._clean_dataframe(read_file)
read_file.insert(_YEAR_INDEX, 'Year', '2019')
read_file.to_csv(csv_file_path, index=None, header=True)
csv_files.append(csv_file_path)
config_path = os.path.join(_SCRIPT_PATH, 'config.json')
with open(config_path, 'r', encoding='utf-8') as f:
config = json.load(f)
cleaned_csv_path = os.path.join(tmp_dir, 'cleaned.csv')
utils.create_csv_mcf(csv_files, cleaned_csv_path, config,
_OUTPUT_COLUMNS, preprocess._write_output_csv)
with open(cleaned_csv_path, 'r', encoding='utf-8') as f_result:
test_result = f_result.read()
expected_csv_path = os.path.join(_SCRIPT_PATH, 'testdata',
'expected.csv')
with open(expected_csv_path, 'r',
encoding='utf-8') as f_expected:
expected_result = f_expected.read()
self.assertEqual(test_result, expected_result)<|fim▁end|> | |
<|file_name|>recline.js<|end_file_name|><|fim▁begin|>/**
* Drupal-specific JS helper functions and utils. Not to be confused with the
* Recline library, which should live in your libraries directory.
*/
;(function ($) {
// Constants.
var MAX_LABEL_WIDTH = 77;
var LABEL_MARGIN = 5;
// Undefined variables.
var dataset, views, datasetOptions, fileSize, fileType, router;
var dataExplorerSettings, state, $explorer, dataExplorer, maxSizePreview;
var datastoreStatus;
// Create drupal behavior
Drupal.behaviors.Recline = {
attach: function (context) {
$explorer = $('.data-explorer');
// Local scoped variables.
Drupal.settings.recline = Drupal.settings.recline || {};
fileSize = Drupal.settings.recline.fileSize;
fileType = Drupal.settings.recline.fileType;
maxSizePreview = Drupal.settings.recline.maxSizePreview;
datastoreStatus = Drupal.settings.recline.datastoreStatus;
dataExplorerSettings = {
grid: Drupal.settings.recline.grid,
graph: Drupal.settings.recline.graph,
map: Drupal.settings.recline.map
};
// This is the very basic state collection.
state = recline.View.parseQueryString(decodeURIComponent(window.location.hash));
if ('#map' in state) {
state.currentView = 'map';
} else if ('#graph' in state) {
state.currentView = 'graph';
}
// Init the explorer.
init();
// Attach toogle event.
$('.recline-embed a.embed-link').on('click', function(){
$(this).parents('.recline-embed').find('.embed-code-wrapper').toggle();
return false;
});
}
}
// make Explorer creation / initialization in a function so we can call it
// again and again
function createExplorer (dataset, state, settings) {
// Remove existing data explorer view.
dataExplorer && dataExplorer.remove();
var $el = $('<div />');
$el.appendTo($explorer);
var views = [];
if (settings.grid) {
views.push({
id: 'grid',
label: 'Grid',
view: new recline.View.SlickGrid({
model: dataset
})
});
}
if (settings.graph) {
state.graphOptions = {
xaxis: {
tickFormatter: tickFormatter(dataset),
},
hooks:{
processOffset: [processOffset(dataset)],
bindEvents: [bindEvents],
}
};
views.push({
id: 'graph',
label: 'Graph',
view: new recline.View.Graph({
model: dataset,
state: state
})
});
}
if (settings.map) {
views.push({
id: 'map',
label: 'Map',
view: new recline.View.Map({
model: dataset,
options: {
mapTilesURL: '//stamen-tiles-{s}.a.ssl.fastly.net/terrain/{z}/{x}/{y}.png',
}
})
});
}
// Multiview settings
var multiviewOptions = {
model: dataset,
el: $el,
state: state,
views: views
};
// Getting base embed url.
var urlBaseEmbed = $('.embed-code').text();
var iframeOptions = {src: urlBaseEmbed, width:850, height:400};
// Attaching router to dataexplorer state.
dataExplorer = new recline.View.MultiView(multiviewOptions);
router = new recline.DeepLink.Router(dataExplorer);
// Adding router listeners.
var changeEmbedCode = getEmbedCode(iframeOptions);
router.on('init', changeEmbedCode);
router.on('stateChange', changeEmbedCode);
// Add map dependency just for map views.
_.each(dataExplorer.pageViews, function(item, index){
if(item.id && item.id === 'map'){
var map = dataExplorer.pageViews[index].view.map;
router.addDependency(new recline.DeepLink.Deps.Map(map, router));
}
});
// Start to track state chages.
router.start();
$.event.trigger('createDataExplorer');
return views;
}
// Returns the dataset configuration.
function getDatasetOptions () {
var datasetOptions = {};
var delimiter = Drupal.settings.recline.delimiter;
var file = Drupal.settings.recline.file;
var uuid = Drupal.settings.recline.uuid;
// Get correct file location, make sure not local
file = (getOrigin(window.location) !== getOrigin(file)) ? '/node/' + Drupal.settings.recline.uuid + '/data' : file;
// Select the backend to use
switch(getBackend(datastoreStatus, fileType)) {
case 'csv':
datasetOptions = {
backend: 'csv',
url: file,
delimiter: delimiter
};
break;
case 'tsv':
datasetOptions = {
backend: 'csv',
url: file,
delimiter: delimiter
};
break;
case 'txt':
datasetOptions = {
backend: 'csv',
url: file,
delimiter: delimiter<|fim▁hole|> datasetOptions = {
endpoint: 'api',
id: uuid,
backend: 'ckan'
};
break;
case 'xls':
datasetOptions = {
backend: 'xls',
url: file
};
break;
case 'dataproxy':
datasetOptions = {
url: file,
backend: 'dataproxy'
};
break;
default:
showError('File type ' + fileType + ' not supported for preview.');
break;
}
return datasetOptions;
}
// Correct for fact that IE does not provide .origin
function getOrigin(u) {
var url = parseURL(u);
return url.protocol + '//' + url.hostname + (url.port ? (':' + url.port) : '');
}
// Parse a simple URL string to get its properties
function parseURL(url) {
var parser = document.createElement('a');
parser.href = url;
return {
protocol: parser.protocol,
hostname: parser.hostname,
port: parser.port,
pathname: parser.pathname,
search: parser.search,
hash: parser.hash,
host: parser.host
}
}
// Retrieve a backend given a file type and and a datastore status.
function getBackend (datastoreStatus, fileType) {
// If it's inside the datastore then we use the dkan API
if (datastoreStatus) return 'ckan';
var formats = {
'csv': ['text/csv', 'csv'],
'xls': ['application/vnd.ms-excel', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'],
'tsv': ['text/tab-separated-values', 'text/tsv', 'tsv', 'tab'],
'txt': ['text/plain', 'txt'],
};
var backend = _.findKey(formats, function(format) { return _.include(format, fileType) });
// If the backend is a txt but the delimiter is not a tab, we don't need
// to show it using the backend.
if (Drupal.settings.recline.delimiter !== "\t" && backend === 'txt') {return '';}
// If the backend is an xls but the browser version is prior 9 then
// we need to fallback to dataproxy
if (backend === 'xls' && document.documentMode < 9) return 'dataproxy';
return backend;
}
// Displays an error retrieved from the response object.
function showRequestError (response) {
// Actually dkan doesn't provide standarization over
// error handling responses. For example: if you request
// unexistent resources it will retrive an array with a
// message inside.
// Recline backends will return an object with an error.
try {
var ro = (typeof response === 'string') ? JSON.parse(response) : response;
if(ro.error) {
showError(ro.error.message)
} else if(ro instanceof Array) {
showError(ro[0]);
}
} catch (error) {
showError(response);
}
}
// Displays an error.
function showError (message) {
$explorer.html('<div class="messages error">' + message + '</div>');
}
// Creates the embed code.
function getEmbedCode (options){
return function(state){
var iframeOptions = _.clone(options);
var iframeTmpl = _.template('<iframe width="<%= width %>" height="<%= height %>" src="<%= src %>" frameborder="0"></iframe>');
var previewTmpl = _.template('<%= src %>');
_.extend(iframeOptions, {src: iframeOptions.src + '#' + (state.serializedState || '')});
var html = iframeTmpl(iframeOptions);
$('.embed-code').text(html);
var preview = previewTmpl(iframeOptions);
$('.preview-code').text(preview);
};
}
// Creates the preview url code.
function getPreviewCode (options){
return function(state){
var previewOptions = _.clone(options);
var previewTmpl = _.template('<%= src %>');
_.extend(previewOptions, {src: previewOptions.src + '#' + (state.serializedState || '')});
var html = previewTmpl(previewOptions);
$('.preview-url').text(html);
};
}
// Check if a chart has their axis inverted.
function isInverted (){
return dataExplorer.pageViews[1].view.state.attributes.graphType === 'bars';
}
// Computes the width of a chart.
function computeWidth (plot, labels) {
var biggerLabel = '';
for( var i = 0; i < labels.length; i++){
if(labels[i].length > biggerLabel.length && !_.isUndefined(labels[i])){
biggerLabel = labels[i];
}
}
var canvas = plot.getCanvas();
var ctx = canvas.getContext('2d');
ctx.font = 'sans-serif smaller';
return ctx.measureText(biggerLabel).width;
}
// Resize a chart.
function resize (plot) {
var itemWidth = computeWidth(plot, _.pluck(plot.getXAxes()[0].ticks, 'label'));
var graph = dataExplorer.pageViews[1];
if(!isInverted() && $('#prevent-label-overlapping').is(':checked')){
var canvasWidth = Math.min(itemWidth + LABEL_MARGIN, MAX_LABEL_WIDTH) * plot.getXAxes()[0].ticks.length;
var canvasContainerWith = $('.panel.graph').parent().width();
if(canvasWidth < canvasContainerWith){
canvasWidth = canvasContainerWith;
}
$('.panel.graph').width(canvasWidth);
$('.recline-flot').css({overflow:'auto'});
}else{
$('.recline-flot').css({overflow:'hidden'});
$('.panel.graph').css({width: '100%'});
}
plot.resize();
plot.setupGrid();
plot.draw();
}
// Bind events after chart resizes.
function bindEvents (plot, eventHolder) {
var p = plot || dataExplorer.pageViews[1].view.plot;
resize(p);
setTimeout(addCheckbox, 0);
}
// Compute the chart offset to display ticks properly.
function processOffset (dataset) {
return function(plot, offset) {
if(dataExplorer.pageViews[1].view.xvaluesAreIndex){
var series = plot.getData();
for (var i = 0; i < series.length; i++) {
var numTicks = Math.min(dataset.records.length, 200);
var ticks = [];
for (var j = 0; j < dataset.records.length; j++) {
ticks.push(parseInt(j, 10));
}
if(isInverted()){
series[i].yaxis.options.ticks = ticks;
}else{
series[i].xaxis.options.ticks = ticks;
}
}
}
};
}
// Format ticks base on previews computations.
function tickFormatter (dataset){
return function (x) {
x = parseInt(x, 10);
try {
if(isInverted()) return x;
var field = dataExplorer.pageViews[1].view.state.get('group');
var label = dataset.records.models[x].get(field) || '';
if(!moment(String(label)).isValid() && !isNaN(parseInt(label, 10))){
label = parseInt(label, 10) - 1;
}
return label;
} catch(e) {
return x;
}
};
}
// Add checkbox to control resize behavior.
function addCheckbox () {
$control = $('.form-stacked:visible').find('#prevent-label-overlapping');
if(!$control.length){
$form = $('.form-stacked');
$checkboxDiv = $('<div class="checkbox"></div>').appendTo($form);
$label = $('<label />', { 'for': 'prevent-label-overlapping', text: 'Resize graph to prevent label overlapping' }).appendTo($checkboxDiv);
$label.prepend($('<input />', { type: 'checkbox', id: 'prevent-label-overlapping', value: '' }));
$control = $('#prevent-label-overlapping');
$control.on('change', function(){
resize(dataExplorer.pageViews[1].view.plot);
});
}
}
// Init the multiview.
function init () {
if(fileSize < maxSizePreview || datastoreStatus) {
dataset = new recline.Model.Dataset(getDatasetOptions());
dataset.fetch().fail(showRequestError);
views = createExplorer(dataset, state, dataExplorerSettings);
views.forEach(function(view) { view.id === 'map' && view.view.redraw('refresh') });
} else {
showError('File was too large or unavailable for preview.');
}
}
})(jQuery);<|fim▁end|> | };
break;
case 'ckan': |
<|file_name|>PitchContour.java<|end_file_name|><|fim▁begin|>package org.vitrivr.cineast.core.util.audio.pitch.tracking;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.vitrivr.cineast.core.util.audio.pitch.Pitch;
/**
* This is a helper class for pitch tracking. It represents a pitch contour, that is, a candidate for a melody fragment. The contour has a fixed length and each slot in the sequence represents a specific timeframe (e.g. belonging to a FFT bin in the underlying STFT).
* <p>
* The intention behind this class is the simplification of comparison between different pitch contours, either on a frame-by-frame basis but also as an entity. In addition to the actual pitch information, the pitch contour class also provides access to pitch contour statistics related to salience and pitch frequency.
*
* @see PitchTracker
*/
public class PitchContour {
/**
* The minimum frequency in Hz on the (artifical) cent-scale.
*/
private static final float CENT_SCALE_MINIMUM = 55.0f;
/**
* Entity that keeps track of salience related contour statistics.
*/
private SummaryStatistics salienceStatistics = new SummaryStatistics();
/**
* Entity that keeps track of frequency related contour statistics.
*/
private SummaryStatistics frequencyStatistics = new SummaryStatistics();
/**
* Sequence of pitches that form the PitchContour.
*/
private final List<Pitch> contour = new LinkedList<>();
/**
* Indicates that the PitchContour statistics require recalculation.
*/
private boolean dirty = true;
/**
* The start frame-index of the pitch-contour. Marks beginning in time.
*/
private int start;
/**
* The end frame-index of the pitch-contour. Marks ending in time.
*/
private int end;
/**
* Constructor for PitchContour.
*
* @param start Start-index of the contour.
* @param pitch Pitch that belongs to the start-index.
*/
public PitchContour(int start, Pitch pitch) {
this.start = start;
this.end = start;
this.contour.add(pitch);
}<|fim▁hole|>
/**
* Sets the pitch at the given index if the index is within the bounds of the PitchContour.
*
* @param p Pitch to append.
*/
public void append(Pitch p) {
this.contour.add(p);
this.end += 1;
this.dirty = true;
}
/**
* Sets the pitch at the given index if the index is within the bounds of the PitchContour.
*
* @param p Pitch to append.
*/
public void prepend(Pitch p) {
this.contour.add(0, p);
this.start -= 1;
this.dirty = true;
}
/**
* Returns the pitch at the given index or null, if the index is out of bounds. Note that even if the index is within bounds, the Pitch can still be null.
*
* @param i Index for which to return a pitch.
*/
public Pitch getPitch(int i) {
if (i >= this.start && i <= this.end) {
return this.contour.get(i - this.start);
} else {
return null;
}
}
/**
* Getter for start.
*
* @return Start frame-index.
*/
public final int getStart() {
return start;
}
/**
* Getter for end.
*
* @return End frame-index.
*/
public final int getEnd() {
return end;
}
/**
* Size of the pitch-contour. This number also includes empty slots.
*
* @return Size of the contour.
*/
public final int size() {
return this.contour.size();
}
/**
* Returns the mean of all pitches in the melody.
*
* @return Pitch mean
*/
public final double pitchMean() {
if (this.dirty) {
this.calculate();
}
return this.frequencyStatistics.getMean();
}
/**
* Returns the standard-deviation of all pitches in the melody.
*
* @return Pitch standard deviation
*/
public final double pitchDeviation() {
if (this.dirty) {
this.calculate();
}
return this.frequencyStatistics.getStandardDeviation();
}
/**
* Returns the mean-salience of all pitches in the contour.
*
* @return Salience mean
*/
public final double salienceMean() {
if (this.dirty) {
this.calculate();
}
return this.salienceStatistics.getMean();
}
/**
* Returns the salience standard deviation of all pitches in the contour.
*
* @return Salience standard deviation.
*/
public final double salienceDeviation() {
if (this.dirty) {
this.calculate();
}
return this.salienceStatistics.getStandardDeviation();
}
/**
* Returns the sum of all salience values in the pitch contour.
*/
public final double salienceSum() {
if (this.dirty) {
this.calculate();
}
return this.salienceStatistics.getSum();
}
/**
* Calculates the overlap between the given pitch-contours.
*
* @return Size of the overlap between two pitch-contours.
*/
public final int overlap(PitchContour contour) {
return Math.max(0, Math.min(this.end, contour.end) - Math.max(this.start, contour.start));
}
/**
* Determines if two PitchContours overlap and returns true of false.
*
* @return true, if two PitchContours overlap and falseotherwise.
*/
public final boolean overlaps(PitchContour contour) {
return this.overlap(contour) > 0;
}
/**
* Re-calculates the PitchContour statistics.
*/
private void calculate() {
this.salienceStatistics.clear();
this.frequencyStatistics.clear();
for (Pitch pitch : this.contour) {
if (pitch != null) {
this.salienceStatistics.addValue(pitch.getSalience());
this.frequencyStatistics.addValue(pitch.distanceCents(CENT_SCALE_MINIMUM));
}
}
this.dirty = false;
}
}<|fim▁end|> | |
<|file_name|>should.meta.js<|end_file_name|><|fim▁begin|>exports.name = 'should';<|fim▁hole|><|fim▁end|> | exports.category = 'bdd/tdd';
exports.homepage = 'https://npmjs.org/package/should'; |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>//// For registering Service Worker
// if ('serviceWorker' in navigator) {
// navigator.serviceWorker
// .register('./service-worker.js', { scope: './' })
// .then(function(registration) {
// console.log("Service Worker Registered");
// })
// .catch(function(err) {
// console.log("Service Worker Failed to Register", err);<|fim▁hole|>
// Image Slider
var leftarrow = $('.slider .left');
var rightarrow = $('.slider .right');
leftarrow.click(function(){
var left = $(this).siblings('.container').css('margin-left').replace('px', '');
left = parseInt(left)+250;
if(left <= 50)
$('.container').animate({'margin-left': left},500);
});
rightarrow.click(function(){
var total = $(this).siblings('.container').children('.item').length;
var left = $(this).siblings('.container').css('margin-left').replace('px', '') - 250;
if(left >= -(total-5)*250)
$('.container').animate({'margin-left': left},500);
});
// Feedback Form
var arrow = $('.chat-head img');
var textarea = $('.chat-text textarea');
arrow.on('click', function(){
var src = arrow.attr('src');
$('.chat-body').slideToggle('fast');
if(src == 'asset/img/down.png'){
arrow.attr('src', 'asset/img/up.png');
}
else{
arrow.attr('src', 'asset/img/down.png');
}
});
textarea.keypress(function(event) {
var $this = $(this);
if(event.keyCode == 13){
var msg = $this.val();
if(msg != ''){
$this.val('');
$('.msg-insert').prepend("<div class='msg-send'>"+msg+"</div>");
}
else{alert('xfghjkl');}
}
});
});<|fim▁end|> | // })
// }
$(function(){ |
<|file_name|>swipe_1.js<|end_file_name|><|fim▁begin|>/*!
* Angular Material Design
* https://github.com/angular/material
* @license MIT
* v1.1.0-rc4-master-c81f9f1
*/
goog.provide('ng.material.components.swipe');
goog.require('ng.material.core');
/**
* @ngdoc module
* @name material.components.swipe
* @description Swipe module!
*/
/**
* @ngdoc directive
* @module material.components.swipe
* @name mdSwipeLeft
*
* @restrict A
*
* @description
* The md-swipe-left directive allows you to specify custom behavior when an element is swiped
* left.
*
* @usage
* <hljs lang="html">
* <div md-swipe-left="onSwipeLeft()">Swipe me left!</div>
* </hljs>
*/
/**
* @ngdoc directive
* @module material.components.swipe
* @name mdSwipeRight
*
* @restrict A
*
* @description
* The md-swipe-right directive allows you to specify custom behavior when an element is swiped
* right.
*
* @usage
* <hljs lang="html">
* <div md-swipe-right="onSwipeRight()">Swipe me right!</div>
* </hljs>
*/
/**
* @ngdoc directive
* @module material.components.swipe
* @name mdSwipeUp
*
* @restrict A
*
* @description
* The md-swipe-up directive allows you to specify custom behavior when an element is swiped
* up.
*
* @usage
* <hljs lang="html">
* <div md-swipe-up="onSwipeUp()">Swipe me up!</div>
* </hljs>
*/
/**
* @ngdoc directive
* @module material.components.swipe
* @name mdSwipeDown
*
* @restrict A<|fim▁hole|> * @description
* The md-swipe-down directive allows you to specify custom behavior when an element is swiped
* down.
*
* @usage
* <hljs lang="html">
* <div md-swipe-down="onSwipDown()">Swipe me down!</div>
* </hljs>
*/
angular.module('material.components.swipe', ['material.core'])
.directive('mdSwipeLeft', getDirective('SwipeLeft'))
.directive('mdSwipeRight', getDirective('SwipeRight'))
.directive('mdSwipeUp', getDirective('SwipeUp'))
.directive('mdSwipeDown', getDirective('SwipeDown'));
function getDirective(name) {
var directiveName = 'md' + name;
var eventName = '$md.' + name.toLowerCase();
DirectiveFactory.$inject = ["$parse"];
return DirectiveFactory;
/* ngInject */
function DirectiveFactory($parse) {
return { restrict: 'A', link: postLink };
function postLink(scope, element, attr) {
var fn = $parse(attr[directiveName]);
element.on(eventName, function(ev) {
scope.$apply(function() { fn(scope, { $event: ev }); });
});
}
}
}
ng.material.components.swipe = angular.module("material.components.swipe");<|fim▁end|> | * |
<|file_name|>regions-fn-subtyping.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Issue #2263.
// Should pass region checking.
fn ok(f: @fn(x: &uint)) {
// Here, g is a function that can accept a uint pointer with
// lifetime r, and f is a function that can accept a uint pointer
// with any lifetime. The assignment g = f should be OK (i.e.,
// f's type should be a subtype of g's type), because f can be
// used in any context that expects g's type. But this currently
// fails.
let mut g: @fn<'r>(y: &'r uint) = |x| { };
g = f;
}
// This version is the same as above, except that here, g's type is
// inferred.
fn ok_inferred(f: @fn(x: &uint)) {
let mut g: @fn<'r>(x: &'r uint) = |_| {};
g = f;
}
pub fn main() {
}<|fim▁end|> | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT |
<|file_name|>Hadoop1Compat.java<|end_file_name|><|fim▁begin|>package com.thinkaurelius.titan.hadoop.compat.h1;
import com.thinkaurelius.titan.graphdb.configuration.TitanConstants;
import com.thinkaurelius.titan.hadoop.config.job.JobClasspathConfigurer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import com.thinkaurelius.titan.hadoop.HadoopGraph;
import com.thinkaurelius.titan.hadoop.compat.HadoopCompat;
import com.thinkaurelius.titan.hadoop.compat.HadoopCompiler;
public class Hadoop1Compat implements HadoopCompat {
static final String CFG_SPECULATIVE_MAPS = "mapred.map.tasks.speculative.execution";
static final String CFG_SPECULATIVE_REDUCES = "mapred.reduce.tasks.speculative.execution";
static final String CFG_JOB_JAR = "mapred.jar";
@Override
public HadoopCompiler newCompiler(HadoopGraph g) {
return new Hadoop1Compiler(g);
}
@Override
public TaskAttemptContext newTask(Configuration c, TaskAttemptID t) {
return new TaskAttemptContext(c, t);
}
@Override
public String getSpeculativeMapConfigKey() {
return CFG_SPECULATIVE_MAPS;
}
@Override
public String getSpeculativeReduceConfigKey() {
return CFG_SPECULATIVE_REDUCES;
}
@Override
public String getMapredJarConfigKey() {
return CFG_JOB_JAR;
}<|fim▁hole|> }
@Override
public Configuration getContextConfiguration(TaskAttemptContext context) {
return context.getConfiguration();
}
@Override
public long getCounter(MapReduceDriver counters, Enum<?> e) {
return counters.getCounters().findCounter(e).getValue();
}
@Override
public JobClasspathConfigurer newMapredJarConfigurer(String mapredJarPath) {
return new MapredJarConfigurer(mapredJarPath);
}
@Override
public JobClasspathConfigurer newDistCacheConfigurer() {
return new DistCacheConfigurer("titan-hadoop-core-" + TitanConstants.VERSION + ".jar");
}
@Override
public Configuration getJobContextConfiguration(JobContext context) {
return context.getConfiguration();
}
@Override
public Configuration newImmutableConfiguration(Configuration base) {
return new ImmutableConfiguration(base);
}
}<|fim▁end|> |
@Override
public void incrementContextCounter(TaskInputOutputContext context, Enum<?> counter, long incr) {
context.getCounter(counter).increment(incr); |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for berth project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "berth.settings")
from django.core.wsgi import get_wsgi_application<|fim▁hole|><|fim▁end|> | application = get_wsgi_application() |
<|file_name|>comment_eater.py<|end_file_name|><|fim▁begin|>from __future__ import division, absolute_import, print_function
import sys
if sys.version_info[0] >= 3:
from io import StringIO
else:
from io import StringIO
import compiler
import inspect
import textwrap
import tokenize
from .compiler_unparse import unparse
class Comment(object):
""" A comment block.
"""
is_comment = True
def __init__(self, start_lineno, end_lineno, text):
# int : The first line number in the block. 1-indexed.
self.start_lineno = start_lineno
# int : The last line number. Inclusive!
self.end_lineno = end_lineno
# str : The text block including '#' character but not any leading spaces.
self.text = text
def add(self, string, start, end, line):
""" Add a new comment line.
"""
self.start_lineno = min(self.start_lineno, start[0])
self.end_lineno = max(self.end_lineno, end[0])
self.text += string
def __repr__(self):
return '%s(%r, %r, %r)' % (self.__class__.__name__, self.start_lineno,
self.end_lineno, self.text)
class NonComment(object):
""" A non-comment block of code.
"""
is_comment = False
def __init__(self, start_lineno, end_lineno):
self.start_lineno = start_lineno
self.end_lineno = end_lineno
def add(self, string, start, end, line):
""" Add lines to the block.
"""
if string.strip():
# Only add if not entirely whitespace.
self.start_lineno = min(self.start_lineno, start[0])
self.end_lineno = max(self.end_lineno, end[0])
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.start_lineno,
self.end_lineno)
class CommentBlocker(object):
""" Pull out contiguous comment blocks.
"""
def __init__(self):
# Start with a dummy.
self.current_block = NonComment(0, 0)
# All of the blocks seen so far.
self.blocks = []
# The index mapping lines of code to their associated comment blocks.
self.index = {}
def process_file(self, file):
""" Process a file object.
"""
if sys.version_info[0] >= 3:
nxt = file.__next__
else:<|fim▁hole|>
def process_token(self, kind, string, start, end, line):
""" Process a single token.
"""
if self.current_block.is_comment:
if kind == tokenize.COMMENT:
self.current_block.add(string, start, end, line)
else:
self.new_noncomment(start[0], end[0])
else:
if kind == tokenize.COMMENT:
self.new_comment(string, start, end, line)
else:
self.current_block.add(string, start, end, line)
def new_noncomment(self, start_lineno, end_lineno):
""" We are transitioning from a noncomment to a comment.
"""
block = NonComment(start_lineno, end_lineno)
self.blocks.append(block)
self.current_block = block
def new_comment(self, string, start, end, line):
""" Possibly add a new comment.
Only adds a new comment if this comment is the only thing on the line.
Otherwise, it extends the noncomment block.
"""
prefix = line[:start[1]]
if prefix.strip():
# Oops! Trailing comment, not a comment block.
self.current_block.add(string, start, end, line)
else:
# A comment block.
block = Comment(start[0], end[0], string)
self.blocks.append(block)
self.current_block = block
def make_index(self):
""" Make the index mapping lines of actual code to their associated
prefix comments.
"""
for prev, block in zip(self.blocks[:-1], self.blocks[1:]):
if not block.is_comment:
self.index[block.start_lineno] = prev
def search_for_comment(self, lineno, default=None):
""" Find the comment block just before the given line number.
Returns None (or the specified default) if there is no such block.
"""
if not self.index:
self.make_index()
block = self.index.get(lineno, None)
text = getattr(block, 'text', default)
return text
def strip_comment_marker(text):
""" Strip # markers at the front of a block of comment text.
"""
lines = []
for line in text.splitlines():
lines.append(line.lstrip('#'))
text = textwrap.dedent('\n'.join(lines))
return text
def get_class_traits(klass):
""" Yield all of the documentation for trait definitions on a class object.
"""
# FIXME: gracefully handle errors here or in the caller?
source = inspect.getsource(klass)
cb = CommentBlocker()
cb.process_file(StringIO(source))
mod_ast = compiler.parse(source)
class_ast = mod_ast.node.nodes[0]
for node in class_ast.code.nodes:
# FIXME: handle other kinds of assignments?
if isinstance(node, compiler.ast.Assign):
name = node.nodes[0].name
rhs = unparse(node.expr).strip()
doc = strip_comment_marker(cb.search_for_comment(node.lineno, default=''))
yield name, rhs, doc<|fim▁end|> | nxt = file.next
for token in tokenize.generate_tokens(nxt):
self.process_token(*token)
self.make_index() |
<|file_name|>MSRadioBox.C<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 1997-2008 Morgan Stanley All rights reserved.
// See .../src/LICENSE for terms of distribution<|fim▁hole|>
#include <MSGUI/MSRadioBox.H>
MSRadioBox::MSRadioBox(MSWidget *owner_,const char *title_) :
MSActionBox(owner_,title_)
{ _activeButton=0; }
MSRadioBox::MSRadioBox(MSWidget *owner_,const MSStringVector& title_) :
MSActionBox(owner_,title_)
{ _activeButton=0; }
MSRadioBox::~MSRadioBox(void) {}
const MSSymbol& MSRadioBox::symbol(void)
{
static MSSymbol sym ("MSRadioBox");
return sym;
}
const MSSymbol& MSRadioBox::widgetType(void) const
{ return symbol(); }
void MSRadioBox::arm(MSRadioButton *radioButton_)
{
disarm();
_activeButton=radioButton_;
if (activeButton()!=0) activeButton()->state(MSTrue);
}
void MSRadioBox::disarm(void)
{
if (activeButton()!=0) activeButton()->state(MSFalse);
_activeButton=0;
}
void MSRadioBox::firstMapNotify(void)
{
MSNodeItem *hp=childListHead();
MSNodeItem *np=hp;
MSLayoutEntry *entry;
MSRadioButton *radioButton;
unsigned count=0;
while ((np=np->next())!=hp)
{
entry=(MSLayoutEntry *)np->data();
radioButton=(MSRadioButton *)entry->widget();
if (radioButton->state()==MSTrue)
{
if (count==0) _activeButton=radioButton;
count++;
}
if (count>1) radioButton->state(MSFalse);
}
if (count==0&&(np=np->next())!=hp)
{
entry=(MSLayoutEntry *)np->data();
radioButton=(MSRadioButton *)entry->widget();
radioButton->state(MSTrue);
_activeButton=radioButton;
}
MSActionBox::firstMapNotify();
}
void MSRadioBox::activeButton(MSRadioButton *radioButton_, MSBoolean callback_)
{
radioButton_->arm(callback_);
}<|fim▁end|> | //
//
/////////////////////////////////////////////////////////////////////////////// |
<|file_name|>schedule_topic_messages_and_cancellation.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
"""
Example to show scheduling messages to and cancelling messages from a Service Bus Queue.
"""
import os
import datetime
from azure.servicebus import ServiceBusClient, ServiceBusMessage
CONNECTION_STR = os.environ["SERVICE_BUS_CONNECTION_STR"]
TOPIC_NAME = os.environ["SERVICE_BUS_TOPIC_NAME"]
def schedule_single_message(sender):
message = ServiceBusMessage("Message to be scheduled")
scheduled_time_utc = datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
sequence_number = sender.schedule_messages(message, scheduled_time_utc)<|fim▁hole|>
def schedule_multiple_messages(sender):
messages_to_schedule = []
for _ in range(10):
messages_to_schedule.append(ServiceBusMessage("Message to be scheduled"))
scheduled_time_utc = datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
sequence_numbers = sender.schedule_messages(
messages_to_schedule, scheduled_time_utc
)
return sequence_numbers
def main():
servicebus_client = ServiceBusClient.from_connection_string(
conn_str=CONNECTION_STR, logging_enable=True
)
with servicebus_client:
sender = servicebus_client.get_topic_sender(topic_name=TOPIC_NAME)
with sender:
sequence_number = schedule_single_message(sender)
print(
"Single message is scheduled and sequence number is {}".format(
sequence_number
)
)
sequence_numbers = schedule_multiple_messages(sender)
print(
"Multiple messages are scheduled and sequence numbers are {}".format(
sequence_numbers
)
)
sender.cancel_scheduled_messages(sequence_number)
sender.cancel_scheduled_messages(sequence_numbers)
print("All scheduled messages are cancelled.")
if __name__ == "__main__":
main()<|fim▁end|> | return sequence_number
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
setup(name='netdisco',<|fim▁hole|> description='Discover devices on your local network',
url='https://github.com/home-assistant/netdisco',
author='Paulus Schoutsen',
author_email='[email protected]',
license='Apache License 2.0',
install_requires=['netifaces>=0.10.0', 'requests>=2.0',
'zeroconf==0.17.6'],
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False)<|fim▁end|> | version='0.9.2', |
<|file_name|>taz_files.py<|end_file_name|><|fim▁begin|>import csv
import osgeo.ogr
from osgeo import ogr, osr
EPSG_LAT_LON = 4326
def read_tazs_from_csv(csv_zone_locs_fname):
taz_tuples = []
tfile = open(csv_zone_locs_fname, 'rb')
treader = csv.reader(tfile, delimiter=',', quotechar="'")
for ii, row in enumerate(treader):
if ii == 0: continue
else:
taz_tuple = (row[0], row[1], row[2])
taz_tuples.append(taz_tuple)
return taz_tuples
def read_tazs_from_shp(shp_zone_locs_fname):
taz_tuples = []
tazs_shp = osgeo.ogr.Open(shp_zone_locs_fname)
tazs_layer = tazs_shp.GetLayer(0)
src_srs = tazs_layer.GetSpatialRef()
target_srs = osr.SpatialReference()
target_srs.ImportFromEPSG(EPSG_LAT_LON)
transform_to_lat_lon = osr.CoordinateTransformation(src_srs,
target_srs)
for taz_feat in tazs_layer:
taz_id = taz_feat.GetField("N")
taz_geom = taz_feat.GetGeometryRef()
taz_geom.Transform(transform_to_lat_lon)
taz_lat = taz_geom.GetX()
taz_lon = taz_geom.GetY()
taz_tuples.append((taz_id, taz_lat, taz_lon))
taz_feat.Destroy()
tazs_shp.Destroy()<|fim▁hole|><|fim▁end|> | return taz_tuples |
<|file_name|>set_serviceaccount.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package set
import (
"errors"
"fmt"
"github.com/spf13/cobra"
v1 "k8s.io/api/core/v1"
"k8s.io/klog/v2"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/cli-runtime/pkg/printers"
"k8s.io/cli-runtime/pkg/resource"
cmdutil "k8s.io/kubectl/pkg/cmd/util"
"k8s.io/kubectl/pkg/polymorphichelpers"
"k8s.io/kubectl/pkg/scheme"
"k8s.io/kubectl/pkg/util/i18n"
"k8s.io/kubectl/pkg/util/templates"<|fim▁hole|> serviceaccountResources = i18n.T(`replicationcontroller (rc), deployment (deploy), daemonset (ds), job, replicaset (rs), statefulset`)
serviceaccountLong = templates.LongDesc(i18n.T(`
Update the service account of pod template resources.
Possible resources (case insensitive) can be:
`) + serviceaccountResources)
serviceaccountExample = templates.Examples(i18n.T(`
# Set deployment nginx-deployment's service account to serviceaccount1
kubectl set serviceaccount deployment nginx-deployment serviceaccount1
# Print the result (in YAML format) of updated nginx deployment with the service account from local file, without hitting the API server
kubectl set sa -f nginx-deployment.yaml serviceaccount1 --local --dry-run=client -o yaml
`))
)
// SetServiceAccountOptions encapsulates the data required to perform the operation.
type SetServiceAccountOptions struct {
PrintFlags *genericclioptions.PrintFlags
RecordFlags *genericclioptions.RecordFlags
fileNameOptions resource.FilenameOptions
dryRunStrategy cmdutil.DryRunStrategy
dryRunVerifier *resource.DryRunVerifier
shortOutput bool
all bool
output string
local bool
updatePodSpecForObject polymorphichelpers.UpdatePodSpecForObjectFunc
infos []*resource.Info
serviceAccountName string
fieldManager string
PrintObj printers.ResourcePrinterFunc
Recorder genericclioptions.Recorder
genericclioptions.IOStreams
}
// NewSetServiceAccountOptions returns an initialized SetServiceAccountOptions instance
func NewSetServiceAccountOptions(streams genericclioptions.IOStreams) *SetServiceAccountOptions {
return &SetServiceAccountOptions{
PrintFlags: genericclioptions.NewPrintFlags("serviceaccount updated").WithTypeSetter(scheme.Scheme),
RecordFlags: genericclioptions.NewRecordFlags(),
Recorder: genericclioptions.NoopRecorder{},
IOStreams: streams,
}
}
// NewCmdServiceAccount returns the "set serviceaccount" command.
func NewCmdServiceAccount(f cmdutil.Factory, streams genericclioptions.IOStreams) *cobra.Command {
o := NewSetServiceAccountOptions(streams)
cmd := &cobra.Command{
Use: "serviceaccount (-f FILENAME | TYPE NAME) SERVICE_ACCOUNT",
DisableFlagsInUseLine: true,
Aliases: []string{"sa"},
Short: i18n.T("Update the service account of a resource"),
Long: serviceaccountLong,
Example: serviceaccountExample,
Run: func(cmd *cobra.Command, args []string) {
cmdutil.CheckErr(o.Complete(f, cmd, args))
cmdutil.CheckErr(o.Run())
},
}
o.PrintFlags.AddFlags(cmd)
o.RecordFlags.AddFlags(cmd)
usage := "identifying the resource to get from a server."
cmdutil.AddFilenameOptionFlags(cmd, &o.fileNameOptions, usage)
cmd.Flags().BoolVar(&o.all, "all", o.all, "Select all resources, in the namespace of the specified resource types")
cmd.Flags().BoolVar(&o.local, "local", o.local, "If true, set serviceaccount will NOT contact api-server but run locally.")
cmdutil.AddDryRunFlag(cmd)
cmdutil.AddFieldManagerFlagVar(cmd, &o.fieldManager, "kubectl-set")
return cmd
}
// Complete configures serviceAccountConfig from command line args.
func (o *SetServiceAccountOptions) Complete(f cmdutil.Factory, cmd *cobra.Command, args []string) error {
var err error
o.RecordFlags.Complete(cmd)
o.Recorder, err = o.RecordFlags.ToRecorder()
if err != nil {
return err
}
o.shortOutput = cmdutil.GetFlagString(cmd, "output") == "name"
o.dryRunStrategy, err = cmdutil.GetDryRunStrategy(cmd)
if err != nil {
return err
}
if o.local && o.dryRunStrategy == cmdutil.DryRunServer {
return fmt.Errorf("cannot specify --local and --dry-run=server - did you mean --dry-run=client?")
}
dynamicClient, err := f.DynamicClient()
if err != nil {
return err
}
o.dryRunVerifier = resource.NewDryRunVerifier(dynamicClient, f.OpenAPIGetter())
o.output = cmdutil.GetFlagString(cmd, "output")
o.updatePodSpecForObject = polymorphichelpers.UpdatePodSpecForObjectFn
cmdutil.PrintFlagsWithDryRunStrategy(o.PrintFlags, o.dryRunStrategy)
printer, err := o.PrintFlags.ToPrinter()
if err != nil {
return err
}
o.PrintObj = printer.PrintObj
cmdNamespace, enforceNamespace, err := f.ToRawKubeConfigLoader().Namespace()
if err != nil {
return err
}
if len(args) == 0 {
return errors.New("serviceaccount is required")
}
o.serviceAccountName = args[len(args)-1]
resources := args[:len(args)-1]
builder := f.NewBuilder().
WithScheme(scheme.Scheme, scheme.Scheme.PrioritizedVersionsAllGroups()...).
LocalParam(o.local).
ContinueOnError().
NamespaceParam(cmdNamespace).DefaultNamespace().
FilenameParam(enforceNamespace, &o.fileNameOptions).
Flatten()
if !o.local {
builder.ResourceTypeOrNameArgs(o.all, resources...).
Latest()
}
o.infos, err = builder.Do().Infos()
if err != nil {
return err
}
return nil
}
// Run creates and applies the patch either locally or calling apiserver.
func (o *SetServiceAccountOptions) Run() error {
patchErrs := []error{}
patchFn := func(obj runtime.Object) ([]byte, error) {
_, err := o.updatePodSpecForObject(obj, func(podSpec *v1.PodSpec) error {
podSpec.ServiceAccountName = o.serviceAccountName
return nil
})
if err != nil {
return nil, err
}
// record this change (for rollout history)
if err := o.Recorder.Record(obj); err != nil {
klog.V(4).Infof("error recording current command: %v", err)
}
return runtime.Encode(scheme.DefaultJSONEncoder(), obj)
}
patches := CalculatePatches(o.infos, scheme.DefaultJSONEncoder(), patchFn)
for _, patch := range patches {
info := patch.Info
name := info.ObjectName()
if patch.Err != nil {
patchErrs = append(patchErrs, fmt.Errorf("error: %s %v\n", name, patch.Err))
continue
}
if o.local || o.dryRunStrategy == cmdutil.DryRunClient {
if err := o.PrintObj(info.Object, o.Out); err != nil {
patchErrs = append(patchErrs, err)
}
continue
}
if o.dryRunStrategy == cmdutil.DryRunServer {
if err := o.dryRunVerifier.HasSupport(info.Mapping.GroupVersionKind); err != nil {
patchErrs = append(patchErrs, err)
continue
}
}
actual, err := resource.
NewHelper(info.Client, info.Mapping).
DryRun(o.dryRunStrategy == cmdutil.DryRunServer).
WithFieldManager(o.fieldManager).
Patch(info.Namespace, info.Name, types.StrategicMergePatchType, patch.Patch, nil)
if err != nil {
patchErrs = append(patchErrs, fmt.Errorf("failed to patch ServiceAccountName %v", err))
continue
}
if err := o.PrintObj(actual, o.Out); err != nil {
patchErrs = append(patchErrs, err)
}
}
return utilerrors.NewAggregate(patchErrs)
}<|fim▁end|> | )
var ( |
<|file_name|>tasks.server.controller.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller'),
Task = mongoose.model('Task'),
Project = mongoose.model('Project'),
Person = mongoose.model('Person'),
_ = require('lodash');
/**
* Create a Task
*/
var person, project;
exports.createTask = function(req, res) {
var task = new Task(req.body);
task.user = req.user;
Person.findById(req.body.personId).exec(function(err, person_object) {
person = person_object;
Project.findById(req.body.projectId).exec(function(err, project_object) {
project = project_object;
task.projectName = project.name;
task.personName = person.name;
task.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
person.tasks.push(task);
person.save();
project.tasks.push(task);
project.save();
res.jsonp(task);
}
});
});
});
};
/**
* Show the current Task
*/
exports.readTask = function(req, res) {
res.jsonp(req.task);
};
/**
* Update a Task
*/
exports.updateTask = function(req, res) {
var task = req.task;
task = _.extend(task, req.body);
task.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(task);
}
});
};
/**
* Delete an Task
*/
exports.deleteTask = function(req, res) {
var task = req.task;
Project.findById(req.task.project).exec(function(err, project) {
if (project && project.tasks) {
var i = project.tasks.indexOf(task._id);
project.tasks.splice(i, 1);
project.save();
}
});
Person.findById(req.task.person).exec(function(err, person) {
if (person && person.tasks) {
var i = person.tasks.indexOf(task._id);
person.tasks.splice(i, 1);
person.save();
}
});
task.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)<|fim▁hole|> });
} else {
res.jsonp(task);
}
});
};
/**
* List of Tasks
*/
exports.listTasks = function(req, res) {
Task.find({'user':req.user._id}).sort('-created').populate('person', 'name').populate('project', 'name').exec(function(err, tasks) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(tasks);
}
});
};
/**
* Task middleware
*/
exports.taskByID = function(req, res, next, id) {
Task.findById(id).populate('user', 'username').exec(function(err, task) {
if (err) return next(err);
if (!task) return next(new Error('Failed to load Task ' + id));
req.task = task;
next();
});
};
/**
* Task authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.task.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
};<|fim▁end|> | |
<|file_name|>hpmv.hpp<|end_file_name|><|fim▁begin|>//
// Copyright (c) 2003--2009
// Toon Knapen, Karl Meerbergen, Kresimir Fresl,
// Thomas Klimpel and Rutger ter Borg
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// THIS FILE IS AUTOMATICALLY GENERATED
// PLEASE DO NOT EDIT!
//
#ifndef BOOST_NUMERIC_BINDINGS_BLAS_LEVEL2_HPMV_HPP
#define BOOST_NUMERIC_BINDINGS_BLAS_LEVEL2_HPMV_HPP
#include <boost/mpl/bool.hpp>
#include <boost/numeric/bindings/blas/detail/blas.h>
#include <boost/numeric/bindings/traits/traits.hpp>
#include <boost/numeric/bindings/traits/type_traits.hpp>
#include <boost/static_assert.hpp>
#include <boost/type_traits/is_same.hpp>
namespace boost {
namespace numeric {
namespace bindings {
namespace blas {
namespace level2 {
// overloaded functions to call blas
namespace detail {
inline void hpmv( char const uplo, integer_t const n,
traits::complex_f const alpha, traits::complex_f* ap,
traits::complex_f* x, integer_t const incx,
traits::complex_f const beta, traits::complex_f* y,
integer_t const incy ) {
BLAS_CHPMV( &uplo, &n, traits::complex_ptr(&alpha),
traits::complex_ptr(ap), traits::complex_ptr(x), &incx,
traits::complex_ptr(&beta), traits::complex_ptr(y), &incy );
}
inline void hpmv( char const uplo, integer_t const n,
traits::complex_d const alpha, traits::complex_d* ap,
traits::complex_d* x, integer_t const incx,
traits::complex_d const beta, traits::complex_d* y,
integer_t const incy ) {
BLAS_ZHPMV( &uplo, &n, traits::complex_ptr(&alpha),
traits::complex_ptr(ap), traits::complex_ptr(x), &incx,
traits::complex_ptr(&beta), traits::complex_ptr(y), &incy );
}
}
// value-type based template
template< typename ValueType >
struct hpmv_impl {
typedef ValueType value_type;
typedef typename traits::type_traits<ValueType>::real_type real_type;
typedef void return_type;
// templated specialization
template< typename MatrixAP, typename VectorX, typename VectorY >
static return_type invoke( value_type const alpha, MatrixAP& ap,
VectorX& x, value_type const beta, VectorY& y ) {
BOOST_STATIC_ASSERT( (boost::is_same< typename traits::matrix_traits<
MatrixAP >::value_type, typename traits::vector_traits<
VectorX >::value_type >::value) );
BOOST_STATIC_ASSERT( (boost::is_same< typename traits::matrix_traits<
MatrixAP >::value_type, typename traits::vector_traits<
VectorY >::value_type >::value) );
detail::hpmv( traits::matrix_uplo_tag(ap),
traits::matrix_num_columns(ap), alpha,
traits::matrix_storage(ap), traits::vector_storage(x),
traits::vector_stride(x), beta, traits::vector_storage(y),
traits::vector_stride(y) );
}
};
// low-level template function for direct calls to level2::hpmv
template< typename MatrixAP, typename VectorX, typename VectorY ><|fim▁hole|>hpmv( typename traits::matrix_traits<
MatrixAP >::value_type const alpha, MatrixAP& ap, VectorX& x,
typename traits::matrix_traits< MatrixAP >::value_type const beta,
VectorY& y ) {
typedef typename traits::matrix_traits< MatrixAP >::value_type value_type;
hpmv_impl< value_type >::invoke( alpha, ap, x, beta, y );
}
}}}}} // namespace boost::numeric::bindings::blas::level2
#endif<|fim▁end|> | inline typename hpmv_impl< typename traits::matrix_traits<
MatrixAP >::value_type >::return_type |
<|file_name|>test_driver.py<|end_file_name|><|fim▁begin|># Copyright 2016 Huawei Technologies India Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_config import cfg
from ryu.services.protocols.bgp import bgpspeaker
from ryu.services.protocols.bgp.rtconf.neighbors import CONNECT_MODE_ACTIVE
from neutron.services.bgp.agent import config as bgp_config
from neutron.services.bgp.driver import exceptions as bgp_driver_exc
from neutron.services.bgp.driver.ryu import driver as ryu_driver
from neutron.tests import base
# Test variables for BGP Speaker
FAKE_LOCAL_AS1 = 12345
FAKE_LOCAL_AS2 = 23456
FAKE_ROUTER_ID = '1.1.1.1'
# Test variables for BGP Peer
FAKE_PEER_AS = 45678
FAKE_PEER_IP = '2.2.2.5'
FAKE_AUTH_TYPE = 'md5'
FAKE_PEER_PASSWORD = 'awesome'
# Test variables for Route
FAKE_ROUTE = '2.2.2.0/24'
FAKE_NEXTHOP = '5.5.5.5'
class TestRyuBgpDriver(base.BaseTestCase):
def setUp(self):
super(TestRyuBgpDriver, self).setUp()
cfg.CONF.register_opts(bgp_config.BGP_PROTO_CONFIG_OPTS, 'BGP')
cfg.CONF.set_override('bgp_router_id', FAKE_ROUTER_ID, 'BGP')
self.ryu_bgp_driver = ryu_driver.RyuBgpDriver(cfg.CONF.BGP)
mock_ryu_speaker_p = mock.patch.object(bgpspeaker, 'BGPSpeaker')
self.mock_ryu_speaker = mock_ryu_speaker_p.start()
def test_add_new_bgp_speaker(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.mock_ryu_speaker.assert_called_once_with(
as_number=FAKE_LOCAL_AS1, router_id=FAKE_ROUTER_ID,
bgp_server_port=0,
best_path_change_handler=ryu_driver.best_path_change_cb,
peer_down_handler=ryu_driver.bgp_peer_down_cb,
peer_up_handler=ryu_driver.bgp_peer_up_cb)
def test_remove_bgp_speaker(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
self.ryu_bgp_driver.delete_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(0,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.assertEqual(1, speaker.shutdown.call_count)
def test_add_bgp_peer_without_password(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.add_bgp_peer(FAKE_LOCAL_AS1,
FAKE_PEER_IP,
FAKE_PEER_AS)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.neighbor_add.assert_called_once_with(
address=FAKE_PEER_IP,
remote_as=FAKE_PEER_AS,
password=None,
connect_mode=CONNECT_MODE_ACTIVE)
def test_add_bgp_peer_with_password(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.add_bgp_peer(FAKE_LOCAL_AS1,
FAKE_PEER_IP,
FAKE_PEER_AS,
FAKE_AUTH_TYPE,
FAKE_PEER_PASSWORD)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.neighbor_add.assert_called_once_with(
address=FAKE_PEER_IP,
remote_as=FAKE_PEER_AS,
password=FAKE_PEER_PASSWORD,
connect_mode=CONNECT_MODE_ACTIVE)
def test_remove_bgp_peer(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.delete_bgp_peer(FAKE_LOCAL_AS1, FAKE_PEER_IP)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.neighbor_del.assert_called_once_with(address=FAKE_PEER_IP)
def test_advertise_route(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.advertise_route(FAKE_LOCAL_AS1,
FAKE_ROUTE,
FAKE_NEXTHOP)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.prefix_add.assert_called_once_with(prefix=FAKE_ROUTE,
next_hop=FAKE_NEXTHOP)
def test_withdraw_route(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.withdraw_route(FAKE_LOCAL_AS1, FAKE_ROUTE)
speaker = self.ryu_bgp_driver.cache.get_bgp_speaker(FAKE_LOCAL_AS1)
speaker.prefix_del.assert_called_once_with(prefix=FAKE_ROUTE)
def test_add_same_bgp_speakers_twice(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.BgpSpeakerAlreadyScheduled,
self.ryu_bgp_driver.add_bgp_speaker, FAKE_LOCAL_AS1)
def test_add_different_bgp_speakers_when_one_already_added(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.BgpSpeakerMaxScheduled,
self.ryu_bgp_driver.add_bgp_speaker,
FAKE_LOCAL_AS2)
def test_add_bgp_speaker_with_invalid_asnum_paramtype(self):
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_speaker, '12345')
def test_add_bgp_speaker_with_invalid_asnum_range(self):
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_speaker, -1)
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_speaker, 65536)
def test_add_bgp_peer_with_invalid_paramtype(self):
# Test with an invalid asnum data-type
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, '12345')
# Test with an invalid auth-type and an invalid password
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'sha-1', 1234)
# Test with an invalid auth-type and a valid password
self.assertRaises(bgp_driver_exc.InvaildAuthType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'hmac-md5', FAKE_PEER_PASSWORD)
# Test with none auth-type and a valid password
self.assertRaises(bgp_driver_exc.InvaildAuthType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
'none', FAKE_PEER_PASSWORD)
# Test with none auth-type and an invalid password
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,<|fim▁hole|> self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS,
FAKE_AUTH_TYPE, None)
def test_add_bgp_peer_with_invalid_asnum_range(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, -1)
self.assertRaises(bgp_driver_exc.InvalidParamRange,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, 65536)
def test_add_bgp_peer_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.add_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP, FAKE_PEER_AS)
def test_remove_bgp_peer_with_invalid_paramtype(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.delete_bgp_peer,
FAKE_LOCAL_AS1, 12345)
def test_remove_bgp_peer_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.delete_bgp_peer,
FAKE_LOCAL_AS1, FAKE_PEER_IP)
def test_advertise_route_with_invalid_paramtype(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.advertise_route,
FAKE_LOCAL_AS1, 12345, FAKE_NEXTHOP)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.advertise_route,
FAKE_LOCAL_AS1, FAKE_ROUTE, 12345)
def test_advertise_route_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.advertise_route,
FAKE_LOCAL_AS1, FAKE_ROUTE, FAKE_NEXTHOP)
def test_withdraw_route_with_invalid_paramtype(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.withdraw_route,
FAKE_LOCAL_AS1, 12345)
self.assertRaises(bgp_driver_exc.InvalidParamType,
self.ryu_bgp_driver.withdraw_route,
FAKE_LOCAL_AS1, 12345)
def test_withdraw_route_without_adding_speaker(self):
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.withdraw_route,
FAKE_LOCAL_AS1, FAKE_ROUTE)
def test_add_multiple_bgp_speakers(self):
self.ryu_bgp_driver.add_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.assertRaises(bgp_driver_exc.BgpSpeakerMaxScheduled,
self.ryu_bgp_driver.add_bgp_speaker,
FAKE_LOCAL_AS2)
self.assertRaises(bgp_driver_exc.BgpSpeakerNotAdded,
self.ryu_bgp_driver.delete_bgp_speaker,
FAKE_LOCAL_AS2)
self.assertEqual(1,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())
self.ryu_bgp_driver.delete_bgp_speaker(FAKE_LOCAL_AS1)
self.assertEqual(0,
self.ryu_bgp_driver.cache.get_hosted_bgp_speakers_count())<|fim▁end|> | 'none', 1234)
# Test with a valid auth-type and no password
self.assertRaises(bgp_driver_exc.PasswordNotSpecified, |
<|file_name|>schema2manifesthandler_test.go<|end_file_name|><|fim▁begin|>package storage
import (
"regexp"
"testing"
"github.com/docker/distribution"
"github.com/docker/distribution/context"
"github.com/docker/distribution/manifest"
"github.com/docker/distribution/manifest/schema2"
"github.com/docker/distribution/registry/storage/driver/inmemory"
)
func TestVerifyManifestForeignLayer(t *testing.T) {
ctx := context.Background()
inmemoryDriver := inmemory.New()
registry := createRegistry(t, inmemoryDriver,
ManifestURLsAllowRegexp(regexp.MustCompile("^https?://foo")),
ManifestURLsDenyRegexp(regexp.MustCompile("^https?://foo/nope")))
repo := makeRepository(t, registry, "test")
manifestService := makeManifestService(t, repo)
config, err := repo.Blobs(ctx).Put(ctx, schema2.MediaTypeConfig, nil)
if err != nil {
t.Fatal(err)
}
layer, err := repo.Blobs(ctx).Put(ctx, schema2.MediaTypeLayer, nil)
if err != nil {
t.Fatal(err)
}
foreignLayer := distribution.Descriptor{
Digest: "sha256:463435349086340864309863409683460843608348608934092322395278926a",
Size: 6323,
MediaType: schema2.MediaTypeForeignLayer,
}
template := schema2.Manifest{
Versioned: manifest.Versioned{
SchemaVersion: 2,
MediaType: schema2.MediaTypeManifest,
},
Config: config,
}
type testcase struct {
BaseLayer distribution.Descriptor
URLs []string
Err error
}
cases := []testcase{
{
foreignLayer,
nil,
errMissingURL,
},
{
// regular layers may have foreign urls
layer,
[]string{"http://foo/bar"},
nil,
},
{
foreignLayer,
[]string{"file:///local/file"},
errInvalidURL,
},
{
foreignLayer,
[]string{"http://foo/bar#baz"},
errInvalidURL,
},
{
foreignLayer,
[]string{""},
errInvalidURL,
},
{
foreignLayer,
[]string{"https://foo/bar", ""},
errInvalidURL,
},
{
foreignLayer,
[]string{"", "https://foo/bar"},
errInvalidURL,
},
{
foreignLayer,
[]string{"http://nope/bar"},
errInvalidURL,
},
{
foreignLayer,
[]string{"http://foo/nope"},
errInvalidURL,
},
{
foreignLayer,
[]string{"http://foo/bar"},
nil,
},
{
foreignLayer,<|fim▁hole|> nil,
},
}
for _, c := range cases {
m := template
l := c.BaseLayer
l.URLs = c.URLs
m.Layers = []distribution.Descriptor{l}
dm, err := schema2.FromStruct(m)
if err != nil {
t.Error(err)
continue
}
_, err = manifestService.Put(ctx, dm)
if verr, ok := err.(distribution.ErrManifestVerification); ok {
// Extract the first error
if len(verr) == 2 {
if _, ok = verr[1].(distribution.ErrManifestBlobUnknown); ok {
err = verr[0]
}
}
}
if err != c.Err {
t.Errorf("%#v: expected %v, got %v", l, c.Err, err)
}
}
}<|fim▁end|> | []string{"https://foo/bar"}, |
<|file_name|>deletenonespacelstrip.py<|end_file_name|><|fim▁begin|># 2014-12-18
# build by qianqians
# deletenonespacelstrip
def deleteNoneSpacelstrip(str):
while(str.lstrip('\n') is not str):str = str.lstrip('\n')
while(str.lstrip('\t') is not str):str = str.lstrip('\t')
while(str.lstrip('\0') is not str):str = str.lstrip('\0')
while(str.lstrip('\n') is not str):str = str.lstrip('\n')
while(str.lstrip('\t') is not str):str = str.lstrip('\t')
while(str.lstrip('\0') is not str):str = str.lstrip('\0')
while(str.lstrip('\n') is not str):str = str.lstrip('\n')
while(str.lstrip('\t') is not str):str = str.lstrip('\t')
while(str.lstrip('\0') is not str):str = str.lstrip('\0')
while(str.lstrip(' ') is not str):str = str.lstrip(' ')<|fim▁hole|><|fim▁end|> | while(str.lstrip(' ') is not str):str = str.lstrip(' ')
while(str.lstrip(' ') is not str):str = str.lstrip(' ')
return str |
<|file_name|>supportedRtpCapabilities.ts<|end_file_name|><|fim▁begin|>import { RtpCapabilities } from './RtpParameters';
const supportedRtpCapabilities: RtpCapabilities =
{
codecs :
[
{
kind : 'audio',
mimeType : 'audio/opus',
clockRate : 48000,
channels : 2,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/multiopus',
clockRate : 48000,
channels : 4,
// Quad channel.
parameters :
{
'channel_mapping' : '0,1,2,3',
'num_streams' : 2,
'coupled_streams' : 2
},
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/multiopus',
clockRate : 48000,
channels : 6,
// 5.1.
parameters :
{
'channel_mapping' : '0,4,1,2,3,5',
'num_streams' : 4,
'coupled_streams' : 2
},
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/multiopus',
clockRate : 48000,
channels : 8,
// 7.1.
parameters :
{
'channel_mapping' : '0,6,1,2,3,4,5,7',
'num_streams' : 5,
'coupled_streams' : 3
},
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/PCMU',
preferredPayloadType : 0,
clockRate : 8000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/PCMA',
preferredPayloadType : 8,
clockRate : 8000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/ISAC',
clockRate : 32000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/ISAC',
clockRate : 16000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/G722',
preferredPayloadType : 9,
clockRate : 8000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/iLBC',
clockRate : 8000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/SILK',
clockRate : 24000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/SILK',
clockRate : 16000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/SILK',
clockRate : 12000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/SILK',
clockRate : 8000,
rtcpFeedback :
[
{ type: 'transport-cc' }
]
},
{
kind : 'audio',
mimeType : 'audio/CN',
preferredPayloadType : 13,
clockRate : 32000
},
{
kind : 'audio',
mimeType : 'audio/CN',
preferredPayloadType : 13,
clockRate : 16000
},
{
kind : 'audio',
mimeType : 'audio/CN',
preferredPayloadType : 13,
clockRate : 8000
},
{
kind : 'audio',
mimeType : 'audio/telephone-event',
clockRate : 48000
},
{
kind : 'audio',
mimeType : 'audio/telephone-event',
clockRate : 32000
},
{
kind : 'audio',
mimeType : 'audio/telephone-event',
clockRate : 16000
},
{
kind : 'audio',
mimeType : 'audio/telephone-event',
clockRate : 8000
},
{
kind : 'video',
mimeType : 'video/VP8',
clockRate : 90000,
rtcpFeedback :
[
{ type: 'nack' },
{ type: 'nack', parameter: 'pli' },
{ type: 'ccm', parameter: 'fir' },
{ type: 'goog-remb' },
{ type: 'transport-cc' }
]
},
{
kind : 'video',
mimeType : 'video/VP9',
clockRate : 90000,
rtcpFeedback :
[
{ type: 'nack' },
{ type: 'nack', parameter: 'pli' },
{ type: 'ccm', parameter: 'fir' },
{ type: 'goog-remb' },
{ type: 'transport-cc' }
]
},
{
kind : 'video',
mimeType : 'video/H264',
clockRate : 90000,
parameters :
{
'level-asymmetry-allowed' : 1
},<|fim▁hole|> { type: 'nack', parameter: 'pli' },
{ type: 'ccm', parameter: 'fir' },
{ type: 'goog-remb' },
{ type: 'transport-cc' }
]
},
{
kind : 'video',
mimeType : 'video/H265',
clockRate : 90000,
parameters :
{
'level-asymmetry-allowed' : 1
},
rtcpFeedback :
[
{ type: 'nack' },
{ type: 'nack', parameter: 'pli' },
{ type: 'ccm', parameter: 'fir' },
{ type: 'goog-remb' },
{ type: 'transport-cc' }
]
}
],
headerExtensions :
[
{
kind : 'audio',
uri : 'urn:ietf:params:rtp-hdrext:sdes:mid',
preferredId : 1,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'urn:ietf:params:rtp-hdrext:sdes:mid',
preferredId : 1,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id',
preferredId : 2,
preferredEncrypt : false,
direction : 'recvonly'
},
{
kind : 'video',
uri : 'urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id',
preferredId : 3,
preferredEncrypt : false,
direction : 'recvonly'
},
{
kind : 'audio',
uri : 'http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time',
preferredId : 4,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time',
preferredId : 4,
preferredEncrypt : false,
direction : 'sendrecv'
},
// NOTE: For audio we just enable transport-wide-cc-01 when receiving media.
{
kind : 'audio',
uri : 'http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01',
preferredId : 5,
preferredEncrypt : false,
direction : 'recvonly'
},
{
kind : 'video',
uri : 'http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01',
preferredId : 5,
preferredEncrypt : false,
direction : 'sendrecv'
},
// NOTE: Remove this once framemarking draft becomes RFC.
{
kind : 'video',
uri : 'http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07',
preferredId : 6,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'urn:ietf:params:rtp-hdrext:framemarking',
preferredId : 7,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'audio',
uri : 'urn:ietf:params:rtp-hdrext:ssrc-audio-level',
preferredId : 10,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'urn:3gpp:video-orientation',
preferredId : 11,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'urn:ietf:params:rtp-hdrext:toffset',
preferredId : 12,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'video',
uri : 'http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time',
preferredId : 13,
preferredEncrypt : false,
direction : 'sendrecv'
},
{
kind : 'audio',
uri : 'http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time',
preferredId : 13,
preferredEncrypt : false,
direction : 'sendrecv'
}
]
};
export { supportedRtpCapabilities };<|fim▁end|> | rtcpFeedback :
[
{ type: 'nack' }, |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate openvr_sys;
#[macro_use]
extern crate lazy_static;
use std::cell::Cell;
use std::ffi::{CStr, CString};
use std::sync::atomic::{AtomicBool, Ordering};
use std::{error, fmt, mem, ptr};
use openvr_sys as sys;
mod tracking;
pub mod chaperone;
pub mod compositor;
pub mod property;
pub mod render_models;
pub mod system;
pub use tracking::*;
pub use sys::VkDevice_T;
pub use sys::VkInstance_T;
pub use sys::VkPhysicalDevice_T;
pub use sys::VkQueue_T;
static INITIALIZED: AtomicBool = AtomicBool::new(false);
/// Initialize OpenVR
///
/// # Panics
///
/// When the library has already been initialized
///
/// # Safety
///
/// The `Context` MUST be dropped or shut down with `Context::shutdown` before shutting down the graphics API. No OpenVR
/// calls may be made on object derived from a `Context` after the `Context` has been dropped or explicitly shut down.
pub unsafe fn init(ty: ApplicationType) -> Result<Context, InitError> {
if INITIALIZED.swap(true, Ordering::Acquire) {
panic!("OpenVR has already been initialized!");
}
let mut error = sys::EVRInitError_VRInitError_None;
sys::VR_InitInternal(&mut error, ty as sys::EVRApplicationType);
if error != sys::EVRInitError_VRInitError_None {
return Err(InitError(error));
}
if !sys::VR_IsInterfaceVersionValid(sys::IVRSystem_Version.as_ptr() as *const i8) {
sys::VR_ShutdownInternal();
return Err(InitError(
sys::EVRInitError_VRInitError_Init_InterfaceNotFound,
));
}
Ok(Context { live: AtomicBool::new(true) })
}
pub struct System(&'static sys::VR_IVRSystem_FnTable);
pub struct Compositor(&'static sys::VR_IVRCompositor_FnTable);
pub struct RenderModels(&'static sys::VR_IVRRenderModels_FnTable);
pub struct Chaperone(&'static sys::VR_IVRChaperone_FnTable);
/// Entry points into OpenVR.
///
/// At most one of this object may exist at a time.
///
/// See safety notes in `init`.
pub struct Context { live: AtomicBool }
fn load<T>(suffix: &[u8]) -> Result<*const T, InitError> {
let mut magic = Vec::from(b"FnTable:".as_ref());
magic.extend(suffix);
let mut error = sys::EVRInitError_VRInitError_None;
let result = unsafe { sys::VR_GetGenericInterface(magic.as_ptr() as *const i8, &mut error) };
if error != sys::EVRInitError_VRInitError_None {
return Err(InitError(
sys::EVRInitError_VRInitError_Init_InterfaceNotFound,
));
}
Ok(result as *const T)
}
impl Context {
pub fn system(&self) -> Result<System, InitError> {
load(sys::IVRSystem_Version).map(|x| unsafe { System(&*x) })
}
pub fn compositor(&self) -> Result<Compositor, InitError> {
load(sys::IVRCompositor_Version).map(|x| unsafe { Compositor(&*x) })
}
pub fn render_models(&self) -> Result<RenderModels, InitError> {
load(sys::IVRRenderModels_Version).map(|x| unsafe { RenderModels(&*x) })
}
pub fn chaperone(&self) -> Result<Chaperone, InitError> {
load(sys::IVRChaperone_Version).map(|x| unsafe { Chaperone(&*x) })
}
}
impl Drop for Context {
fn drop(&mut self) {
unsafe { self.shutdown() }
}
}
impl Context {
/// Shut down OpenVR. Repeated calls are safe.
///
/// Called implicitly by `Context::drop`.
///
/// # Safety
///
/// This *must* be called *before* shutting down the graphics API, or OpenVR may invoke undefined behavior by
/// attempting to free graphics resources.
///
/// No calls to other OpenVR methods may be made after this has been called unless a new `Context` is first
/// constructed.
pub unsafe fn shutdown(&self) {
if self.live.swap(false, Ordering::Acquire) {
sys::VR_ShutdownInternal();
INITIALIZED.store(false, Ordering::Release);
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum ApplicationType {
/// Some other kind of application that isn't covered by the other entries
Other = sys::EVRApplicationType_VRApplication_Other as isize,
/// Application will submit 3D frames
Scene = sys::EVRApplicationType_VRApplication_Scene as isize,
/// Application only interacts with overlays
Overlay = sys::EVRApplicationType_VRApplication_Overlay as isize,
/// Application should not start SteamVR if it's not already running, and should not keep it running if everything
/// else quits.
Background = sys::EVRApplicationType_VRApplication_Background as isize,
/// Init should not try to load any drivers. The application needs access to utility interfaces (like IVRSettings
/// and IVRApplications) but not hardware.
Utility = sys::EVRApplicationType_VRApplication_Utility as isize,
/// Reserved for vrmonitor
VRMonitor = sys::EVRApplicationType_VRApplication_VRMonitor as isize,
/// Reserved for Steam
SteamWatchdog = sys::EVRApplicationType_VRApplication_SteamWatchdog as isize,
/// Start up SteamVR
Bootstrapper = sys::EVRApplicationType_VRApplication_Bootstrapper as isize,
}
#[derive(Copy, Clone)]
pub struct InitError(sys::EVRInitError);
impl fmt::Debug for InitError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let msg = unsafe { CStr::from_ptr(sys::VR_GetVRInitErrorAsSymbol(self.0)) };
f.pad(
msg.to_str()
.expect("OpenVR init error symbol was not valid UTF-8"),
)
}
}
impl error::Error for InitError {
fn description(&self) -> &str {
let msg = unsafe { CStr::from_ptr(sys::VR_GetVRInitErrorAsEnglishDescription(self.0)) };
msg.to_str()
.expect("OpenVR init error description was not valid UTF-8")
}
}
impl fmt::Display for InitError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(error::Error::description(self))
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Eye {
Left = sys::EVREye_Eye_Left as isize,
Right = sys::EVREye_Eye_Right as isize,
}
/// Helper to call OpenVR functions that return strings
unsafe fn get_string<F: FnMut(*mut std::os::raw::c_char, u32) -> u32>(mut f: F) -> Option<CString> {
let n = f(ptr::null_mut(), 0);
if n == 0 {<|fim▁hole|> let mut storage = Vec::new();
storage.reserve_exact(n as usize);
storage.resize(n as usize, mem::uninitialized());
let n_ = f(storage.as_mut_ptr() as *mut _, n);
assert!(n == n_);
storage.truncate((n - 1) as usize); // Strip trailing null
Some(CString::from_vec_unchecked(storage))
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ControllerAxis {
pub x: f32,
pub y: f32,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct ControllerState {
pub packet_num: u32,
pub button_pressed: u64,
pub button_touched: u64,
pub axis: [ControllerAxis; 5],
}
pub mod button_id {
use super::sys;
pub const SYSTEM: sys::EVRButtonId = sys::EVRButtonId_k_EButton_System;
pub const APPLICATION_MENU: sys::EVRButtonId = sys::EVRButtonId_k_EButton_ApplicationMenu;
pub const GRIP: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Grip;
pub const DPAD_LEFT: sys::EVRButtonId = sys::EVRButtonId_k_EButton_DPad_Left;
pub const DPAD_UP: sys::EVRButtonId = sys::EVRButtonId_k_EButton_DPad_Up;
pub const DPAD_RIGHT: sys::EVRButtonId = sys::EVRButtonId_k_EButton_DPad_Right;
pub const DPAD_DOWN: sys::EVRButtonId = sys::EVRButtonId_k_EButton_DPad_Down;
pub const A: sys::EVRButtonId = sys::EVRButtonId_k_EButton_A;
pub const PROXIMITY_SENSOR: sys::EVRButtonId = sys::EVRButtonId_k_EButton_ProximitySensor;
pub const AXIS0: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Axis0;
pub const AXIS1: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Axis1;
pub const AXIS2: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Axis2;
pub const AXIS3: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Axis3;
pub const AXIS4: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Axis4;
pub const STEAM_VR_TOUCHPAD: sys::EVRButtonId = sys::EVRButtonId_k_EButton_SteamVR_Touchpad;
pub const STEAM_VR_TRIGGER: sys::EVRButtonId = sys::EVRButtonId_k_EButton_SteamVR_Trigger;
pub const DASHBOARD_BACK: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Dashboard_Back;
pub const MAX: sys::EVRButtonId = sys::EVRButtonId_k_EButton_Max;
}<|fim▁end|> | return None;
} |
<|file_name|>complete_test.go<|end_file_name|><|fim▁begin|>package converter
import (
"go/token"
"reflect"
"strings"
"testing"
)
func TestIdentifierAt(t *testing.T) {
type args struct {
src string
idx int
}
tests := []struct {
name string
args args<|fim▁hole|> wantEnd int
}{
{
name: "basic",
args: args{"abc", 0},
wantStart: 0,
wantEnd: 3,
}, {
name: "basic",
args: args{"_a", 0},
wantStart: 0,
wantEnd: 2,
}, {
args: args{"abc", 1},
wantStart: 0,
wantEnd: 3,
}, {
args: args{"abc", 3},
wantStart: 0,
wantEnd: 3,
}, {
args: args{"abc", 10},
wantStart: -1,
wantEnd: -1,
}, {
args: args{"abc", -1},
wantStart: -1,
wantEnd: -1,
}, {
args: args{"1034", 2},
wantStart: -1,
wantEnd: -1,
}, {
args: args{"a034", 2},
wantStart: 0,
wantEnd: 4,
}, {
args: args{"a+b", 2},
wantStart: 2,
wantEnd: 3,
}, {
args: args{"a+b", 1},
wantStart: 0,
wantEnd: 1,
}, {
name: "multibytes",
args: args{"こんにちは", 6},
wantStart: 0,
wantEnd: 15,
}, {
name: "multibytes_invalidpos",
args: args{"こんにちは", 5},
wantStart: -1,
wantEnd: -1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotStart, gotEnd := identifierAt(tt.args.src, tt.args.idx)
if gotStart != tt.wantStart {
t.Errorf("identifierAt() gotStart = %v, want %v", gotStart, tt.wantStart)
}
if gotEnd != tt.wantEnd {
t.Errorf("identifierAt() gotEnd = %v, want %v", gotEnd, tt.wantEnd)
}
})
}
}
func Test_findLastDot(t *testing.T) {
type args struct {
src string
idx int
}
tests := []struct {
name string
args args
wantDot int
wantIDStart int
wantIDEnd int
}{
{
name: "basic",
args: args{"ab.cd", 3},
wantDot: 2,
wantIDStart: 3,
wantIDEnd: 5,
}, {
name: "eos",
args: args{"ab.cd", 5},
wantDot: 2,
wantIDStart: 3,
wantIDEnd: 5,
}, {
name: "dot",
args: args{"ab.cd", 2},
wantDot: -1,
wantIDStart: -1,
wantIDEnd: -1,
}, {
name: "space",
args: args{"ab. cd", 6},
wantDot: 2,
wantIDStart: 5,
wantIDEnd: 7,
}, {
name: "newline",
args: args{"ab.\ncd", 5},
wantDot: 2,
wantIDStart: 4,
wantIDEnd: 6,
}, {
name: "not_dot",
args: args{"a.b/cd", 4},
wantDot: -1,
wantIDStart: -1,
wantIDEnd: -1,
}, {
name: "empty_src",
args: args{"", 0},
wantDot: -1,
wantIDStart: -1,
wantIDEnd: -1,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotDot, gotIDStart, gotIDEnd := findLastDot(tt.args.src, tt.args.idx)
if gotDot != tt.wantDot {
t.Errorf("findLastDot() gotDot = %v, want %v", gotDot, tt.wantDot)
}
if gotIDStart != tt.wantIDStart {
t.Errorf("findLastDot() gotIDStart = %v, want %v", gotIDStart, tt.wantIDStart)
}
if gotIDEnd != tt.wantIDEnd {
t.Errorf("findLastDot() gotIDEnd = %v, want %v", gotIDEnd, tt.wantIDEnd)
}
})
}
}
func Test_isPosInFuncBody(t *testing.T) {
tests := []struct {
name string
src string
want bool
}{
{"before", `func sum(a, b int) int[cur] { return a + b }`, false},
{"brace_open", `func sum(a, b int) int [cur]{ return a + b }`, false},
{"first", `func sum(a, b int) int {[cur] return a + b }`, true},
{"last", `func sum(a, b int) int { return a + b[cur] }`, true},
{"brace_close", `func sum(a, b int) int { return a + b [cur]}`, true},
{"after", `func sum(a, b int) int { return a + b }[cur]`, false},
{"funclit", `f := func (a, b int) int { [cur]return a + b }`, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
src := tt.src
var pos token.Pos
pos = token.Pos(strings.Index(src, "[cur]") + 1)
if pos == token.NoPos {
t.Error("[cur] not found in src")
return
}
src = strings.Replace(src, "[cur]", "", -1)
_, blk, err := parseLesserGoString(src)
if err != nil {
t.Errorf("Failed to parse: %v", err)
return
}
if got := isPosInFuncBody(blk, pos); got != tt.want {
t.Errorf("isPosInFuncBody() = %v, want %v", got, tt.want)
}
})
}
}
func TestComplete(t *testing.T) {
const selectorSpecExample = `
type T0 struct {
x int
}
func (*T0) M0()
type T1 struct {
y int
}
func (T1) M1()
type T2 struct {
z int
T1
*T0
}
func (*T2) M2()
type Q *T2
var t T2 // with t.T0 != nil
var p *T2 // with p != nil and (*p).T0 != nil
var q Q = p
`
tests := []struct {
name string
src string
want []string
ignoreWant bool
wantInclude []string
wantExclude []string
}{
{
name: "go_keyword",
src: `
import (
"bytes"
)
go bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "go_keyword_in_func",
src: `
import (
"bytes"
)
func f() {
go bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "go_with_defer_keyword",
src: `
import (
"bytes"
)
func f(){
}
defer f()
go bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "defer_before_go_keyword",
src: `
func foo(){
}
func bar(){
}
defer fo[cur]
go bar()`,
want: []string{"foo"},
}, {
name: "defer_between_2_go_keywords",
src: `
func foo(){
}
func bar(){
}
go bar()
defer fo[cur]
go bar()`,
want: []string{"foo"},
}, {
name: "non_go_defer_function_call_with_go_keyword",
src: `
func foo(){
}
func bar(){
}
fo[cur]
go bar()`,
want: []string{"foo"},
}, {
name: "package",
src: `
import (
"bytes"
)
var buf bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "package_in_func",
src: `
import (
"bytes"
)
func f() {
var buf bytes.sp[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "package_upper",
src: `
import (
"bytes"
)
var buf bytes.SP[cur]`,
want: []string{"Split", "SplitAfter", "SplitAfterN", "SplitN"},
}, {
name: "value",
src: `
import (
"bytes"
)
var buf bytes.Buffer
buf.un[cur]`,
want: []string{"UnreadByte", "UnreadRune"},
}, {
name: "value_in_func",
src: `
import (
"bytes"
)
func f() {
var buf bytes.Buffer
buf.un[cur]`,
want: []string{"UnreadByte", "UnreadRune"},
}, {
name: "pointer",
src: `
import (
"bytes"
)
var buf *bytes.Buffer
buf.un[cur]`,
want: []string{"UnreadByte", "UnreadRune"},
}, {
name: "selector_example1",
src: `
[selector_example]
t.[cur]`,
want: []string{"M0", "M1", "M2", "T0", "T1", "x", "y", "z"},
}, {
name: "selector_example2",
src: `
[selector_example]
p.[cur]`,
want: []string{"M0", "M1", "M2", "T0", "T1", "x", "y", "z"},
}, {
name: "selector_example3",
src: `
[selector_example]
q.[cur]`,
want: []string{"T0", "T1", "x", "y", "z"},
}, {
// ".(" is parsed as TypeAssertExpr.
name: "dot_paren",
src: `
[selector_example]
q.[cur](`,
want: []string{"T0", "T1", "x", "y", "z"},
}, {
name: "before_type_assert",
src: `
[selector_example]
var x interface{}
x.(T0).[cur]`,
want: []string{"M0", "x"},
}, {
name: "before_type_switch",
src: `
[selector_example]
type I0 interface {
M0()
}
var i I0
switch i.[cur](type) {
default:
}`,
want: []string{"M0"},
}, {
name: "lgo_context",
src: `
_ctx.val[cur]`,
want: []string{"Value"},
}, {
name: "lgo_context_infunc",
src: `
func f() {
_ctx.val[cur]
}`,
want: []string{"Value"},
}, {
name: "id_simple",
src: `
abc := 100
xyz := "hello"
[cur]
zzz := 1.23
`,
ignoreWant: true,
wantInclude: []string{"abc", "xyz"},
wantExclude: []string{"zzz"},
}, {
name: "id_upper",
src: `
abc := 100
xyz := "hello"
XY[cur]
zzz := 1.23
`,
want: []string{"xyz"},
}, {
name: "id_camel_case",
src: `
func testFunc(){}
testf[cur]
`,
want: []string{"testFunc"},
}, {
name: "id_partial",
src: `
abc := 100
xyz := "hello"
xy[cur]
`,
want: []string{"xyz"},
}, {
name: "id_in_func",
src: `
func fn() {
abc := 100
xyz := "hello"
[cur]
zzz := 1.23
}`,
ignoreWant: true,
wantInclude: []string{"abc", "xyz", "int64"},
wantExclude: []string{"zzz"},
}, {
name: "id_partial_in_func",
src: `
func fn() {
abc := 100
xyz := "hello"
xy[cur]
}`,
want: []string{"xyz"},
}, {
name: "sort",
src: `
type data struct {
abc int
DEF int
xyz int
}
var d data
d.[cur]
`,
want: []string{"abc", "DEF", "xyz"},
}, {
// https://github.com/yunabe/lgo/issues/18
name: "bug18",
src: `var [cur]`,
ignoreWant: true,
wantInclude: []string{"int64"},
}, {
name: "bug17",
src: `
import "bytes"
var buf bytes.Buffer
buf.[cur]
y := 10`,
ignoreWant: true,
// TODO: Fix issue #17.
// wantInclude: []string{"Bytes", "Grow", "Len"},
}, {
// Similar to bug17, but Complete works in this case.
name: "bug17ok",
src: `
import "bytes"
var buf bytes.Buffer
buf.un[cur]
y := 10`,
want: []string{"UnreadByte", "UnreadRune"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
src := tt.src
src = strings.Replace(src, "[selector_example]", selectorSpecExample, -1)
pos := token.Pos(strings.Index(src, "[cur]") + 1)
if pos <= 0 {
t.Error("[cur] not found")
return
}
got, _, _ := Complete(strings.Replace(src, "[cur]", "", -1), pos, &Config{})
if !tt.ignoreWant && !reflect.DeepEqual(got, tt.want) {
t.Errorf("Expected %#v but got %#v", tt.want, got)
}
if len(tt.wantInclude) == 0 && len(tt.wantExclude) == 0 {
return
}
m := make(map[string]bool)
for _, c := range got {
m[c] = true
}
for _, c := range tt.wantInclude {
if !m[c] {
t.Errorf("%q is not suggested; Got %#v", c, got)
}
}
for _, c := range tt.wantExclude {
if m[c] {
t.Errorf("%q is suggested unexpectedly", c)
}
}
})
}
}
func TestCompleteKeywords(t *testing.T) {
// Checks autocomplete works even if identifiers have keyword prefixes.
// https://golang.org/ref/spec#Keywords
kwds := []string{
"break", "default", "func", "interface", "select",
"case", "defer", "go", "map", "struct",
"chan", "else", "goto", "package", "switch",
"const", "fallthrough", "if", "range", "type",
"continue", "for", "import", "return", "var",
}
tests := []struct {
name string
code string
want []string
}{
{
name: "id",
code: `
var [kwd]xyz, [kwd]abc int
[kwd][cur]`,
want: []string{"[kwd]abc", "[kwd]xyz"},
}, {
name: "idspace",
code: `
var [kwd]def, [kwd]ghi int
[kwd][cur] + 10`,
want: []string{"[kwd]def", "[kwd]ghi"},
}, {
name: "dot",
code: `
type data struct {
[kwd]123 int
[kwd]456 string
}
var d data
d.[kwd][cur]`,
want: []string{"[kwd]123", "[kwd]456"},
},
}
for _, kwd := range kwds {
for _, src := range tests {
t.Run(kwd+"_"+src.name, func(t *testing.T) {
code := strings.Replace(src.code, "[kwd]", kwd, -1)
pos := token.Pos(strings.Index(code, "[cur]") + 1)
if pos <= 0 {
t.Fatal("[cur] not found")
return
}
got, _, _ := Complete(strings.Replace(code, "[cur]", "", -1), pos, &Config{})
var want []string
for _, w := range src.want {
want = append(want, strings.Replace(w, "[kwd]", kwd, -1))
}
if !reflect.DeepEqual(got, want) {
t.Errorf("got %v; want %v", got, want)
}
})
}
}
}<|fim▁end|> | wantStart int |
<|file_name|>de6546b2c9685657e1a6a668adbc9d9f63aaa6bd.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | MessagesStore |
<|file_name|>jsDocPropertyLikeTagTests.ts<|end_file_name|><|fim▁begin|>import { expect } from "chai";
import { JSDocPropertyLikeTag, Node } from "../../../../../compiler";
import { getInfoFromText } from "../../../testHelpers";
describe(nameof(JSDocPropertyLikeTag), () => {
function getInfo(text: string) {
const info = getInfoFromText(text);
return { descendant: info.sourceFile.getFirstDescendantOrThrow(Node.isJSDocPropertyLikeTag), ...info };
}
describe(nameof<JSDocPropertyLikeTag>(d => d.getTypeExpression), () => {
it("should get undefined when there is no type given", () => {
const { descendant } = getInfo("/** @param t - String */\nfunction test() {}");
expect(descendant.getTypeExpression()).to.be.undefined;
});
it("should get when type is given", () => {
const { descendant } = getInfo("/** @param {boolean} t - String */\nfunction test() {}");
expect(descendant.getTypeExpression()!.getTypeNode().getText()).to.equal("boolean");
});
});
describe(nameof<JSDocPropertyLikeTag>(d => d.isBracketed), () => {
function doTest(text: string, expected: boolean) {
const { descendant } = getInfo(text);
expect(descendant.isBracketed()).to.equal(expected);
}
it("should return true when bracketed", () => {
doTest("/** @param {Object} [t] - String */\nfunction test() {}", true);
});
it("should return false when not bracketed", () => {
doTest("/** @param {Object} t - String */\nfunction test() {}", false);
});
});
describe(nameof<JSDocPropertyLikeTag>(d => d.getName), () => {
function doTest(text: string, expected: string) {
const { descendant } = getInfo(text);
expect(descendant.getName()).to.equal(expected);
}
it("should get when identifier", () => {
doTest("/** @param {boolean} t - String */\nfunction test() {}", "t");
});
it("should get when fully qualified name", () => {
doTest("/** @param {boolean} t.t.t - String */\nfunction test() {}", "t.t.t");
});
});
<|fim▁hole|> expect(descendant.getNameNode().getText()).to.equal(expected);
}
it("should get when identifier", () => {
doTest("/** @param {boolean} t - String */\nfunction test() {}", "t");
});
it("should get when fully qualified name", () => {
doTest("/** @param {boolean} t.t.t - String */\nfunction test() {}", "t.t.t");
});
});
});<|fim▁end|> | describe(nameof<JSDocPropertyLikeTag>(d => d.getNameNode), () => {
function doTest(text: string, expected: string) {
const { descendant } = getInfo(text); |
<|file_name|>ActionScheduler.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2014 - 2022 Frank Appel
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Frank Appel - initial API and implementation
*/
package com.codeaffine.eclipse.swt.util;
import org.eclipse.swt.widgets.Display;
<|fim▁hole|>
public ActionScheduler( Display display, Runnable action ) {
this.display = display;
this.action = action;
}
public void schedule( int delay ) {
display.timerExec( delay, action );
}
}<|fim▁end|> | public class ActionScheduler {
private final Display display;
private final Runnable action; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Translate python-phonenumbers PhoneNumber to/from protobuf PhoneNumber
Examples of use:
>>> import phonenumbers
>>> from phonenumbers.pb2 import phonenumber_pb2, PBToPy, PyToPB
>>> x_py = phonenumbers.PhoneNumber(country_code=44, national_number=7912345678)
>>> print x_py
Country Code: 44 National Number: 7912345678
>>> y_pb = phonenumber_pb2.PhoneNumber()
>>> y_pb.country_code = 44
>>> y_pb.national_number = 7912345678
>>> print str(y_pb).strip()
country_code: 44
national_number: 7912345678
>>> # Check italian_leading_zero default value when not set
>>> y_pb.italian_leading_zero
False
>>> y_py = PBToPy(y_pb)
>>> print y_py
Country Code: 44 National Number: 7912345678
>>> x_pb = PyToPB(x_py)
>>> print str(x_pb).strip()
country_code: 44
national_number: 7912345678
>>> x_py == y_py
True
>>> x_pb == y_pb
True<|fim▁hole|>>>> y_pb.italian_leading_zero = y_pb.italian_leading_zero
>>> x_pb == y_pb
False
"""
from phonenumber_pb2 import PhoneNumber as PhoneNumberPB
from phonenumbers import PhoneNumber
def PBToPy(numpb):
"""Convert phonenumber_pb2.PhoneNumber to phonenumber.PhoneNumber"""
return PhoneNumber(numpb.country_code if numpb.HasField("country_code") else None,
numpb.national_number if numpb.HasField("national_number") else None,
numpb.extension if numpb.HasField("extension") else None,
numpb.italian_leading_zero if numpb.HasField("italian_leading_zero") else None,
numpb.raw_input if numpb.HasField("raw_input") else None,
numpb.country_code_source if numpb.HasField("country_code_source") else None,
numpb.preferred_domestic_carrier_code if numpb.HasField("preferred_domestic_carrier_code") else None)
def PyToPB(numobj):
"""Convert phonenumber.PhoneNumber to phonenumber_pb2.PhoneNumber"""
numpb = PhoneNumberPB()
if numobj.country_code is not None:
numpb.country_code = numobj.country_code
if numobj.national_number is not None:
numpb.national_number = numobj.national_number
if numobj.extension is not None:
numpb.extension = numobj.extension
if numobj.italian_leading_zero is not None:
numpb.italian_leading_zero = numobj.italian_leading_zero
if numobj.raw_input is not None:
numpb.raw_input = numobj.raw_input
if numobj.country_code_source is not None:
numpb.country_code_source = numobj.country_code_source
if numobj.preferred_domestic_carrier_code is not None:
numpb.preferred_domestic_carrier_code = numobj.preferred_domestic_carrier_code
return numpb
__all__ = ['PBToPy', 'PyToPB']
if __name__ == '__main__': # pragma no cover
import doctest
doctest.testmod()<|fim▁end|> | >>> # Explicitly set the field to its default |
<|file_name|>spell_pet.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2014 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Scripts for spells with SPELLFAMILY_DEATHKNIGHT and SPELLFAMILY_GENERIC spells used by deathknight players.
* Ordered alphabetically using scriptname.
* Scriptnames of files in this file should be prefixed with "spell_dk_".
*/
#include "ScriptMgr.h"
#include "SpellScript.h"
#include "SpellAuraEffects.h"
#include "Unit.h"
#include "Player.h"
#include "Pet.h"
enum HunterPetCalculate
{
SPELL_TAMED_PET_PASSIVE_06 = 19591,
SPELL_TAMED_PET_PASSIVE_07 = 20784,
SPELL_TAMED_PET_PASSIVE_08 = 34666,
SPELL_TAMED_PET_PASSIVE_09 = 34667,
SPELL_TAMED_PET_PASSIVE_10 = 34675,
SPELL_HUNTER_PET_SCALING_01 = 34902,
SPELL_HUNTER_PET_SCALING_02 = 34903,
SPELL_HUNTER_PET_SCALING_03 = 34904,
SPELL_HUNTER_PET_SCALING_04 = 61017,
SPELL_HUNTER_ANIMAL_HANDLER = 34453,
};
enum WarlockPetCalculate
{
SPELL_PET_PASSIVE_CRIT = 35695,
SPELL_PET_PASSIVE_DAMAGE_TAKEN = 35697,
SPELL_WARLOCK_PET_SCALING_01 = 34947,
SPELL_WARLOCK_PET_SCALING_02 = 34956,
SPELL_WARLOCK_PET_SCALING_03 = 34957,
SPELL_WARLOCK_PET_SCALING_04 = 34958,
SPELL_WARLOCK_PET_SCALING_05 = 61013,
ENTRY_FELGUARD = 17252,
ENTRY_VOIDWALKER = 1860,
ENTRY_FELHUNTER = 417,
ENTRY_SUCCUBUS = 1863,
ENTRY_IMP = 416,
SPELL_WARLOCK_GLYPH_OF_VOIDWALKER = 56247,
};
enum DKPetCalculate
{
SPELL_DEATH_KNIGHT_RUNE_WEAPON_02 = 51906,
SPELL_DEATH_KNIGHT_PET_SCALING_01 = 54566,
SPELL_DEATH_KNIGHT_PET_SCALING_02 = 51996,
SPELL_DEATH_KNIGHT_PET_SCALING_03 = 61697,
SPELL_NIGHT_OF_THE_DEAD = 55620,
ENTRY_ARMY_OF_THE_DEAD_GHOUL = 24207,
SPELL_DEATH_KNIGHT_GLYPH_OF_GHOUL = 58686,
};
enum ShamanPetCalculate
{
SPELL_FERAL_SPIRIT_PET_UNK_01 = 35674,
SPELL_FERAL_SPIRIT_PET_UNK_02 = 35675,
SPELL_FERAL_SPIRIT_PET_UNK_03 = 35676,
SPELL_FERAL_SPIRIT_PET_SCALING_04 = 61783,
};
enum MiscPetCalculate
{
SPELL_MAGE_PET_PASSIVE_ELEMENTAL = 44559,
SPELL_PET_HEALTH_SCALING = 61679,
SPELL_PET_UNK_01 = 67561,
SPELL_PET_UNK_02 = 67557,
};
class spell_gen_pet_calculate : public SpellScriptLoader
{
public:
spell_gen_pet_calculate() : SpellScriptLoader("spell_gen_pet_calculate") { }
class spell_gen_pet_calculate_AuraScript : public AuraScript
{
PrepareAuraScript(spell_gen_pet_calculate_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountCritSpell(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float CritSpell = 0.0f;
// Crit from Intellect
CritSpell += owner->GetSpellCritFromIntellect();
// Increase crit from SPELL_AURA_MOD_SPELL_CRIT_CHANCE
CritSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
// Increase crit from SPELL_AURA_MOD_CRIT_PCT
CritSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_CRIT_PCT);
// Increase crit spell from spell crit ratings
CritSpell += owner->GetRatingBonusValue(CR_CRIT_SPELL);
amount += int32(CritSpell);
}
}
void CalculateAmountCritMelee(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float CritMelee = 0.0f;
// Crit from Agility
CritMelee += owner->GetMeleeCritFromAgility();
// Increase crit from SPELL_AURA_MOD_WEAPON_CRIT_PERCENT
CritMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
// Increase crit from SPELL_AURA_MOD_CRIT_PCT
CritMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_CRIT_PCT);
// Increase crit melee from melee crit ratings
CritMelee += owner->GetRatingBonusValue(CR_CRIT_MELEE);
amount += int32(CritMelee);
}
}
void CalculateAmountMeleeHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_HIT_CHANCE
HitMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_HIT_CHANCE);
// Increase hit melee from meele hit ratings
HitMelee += owner->GetRatingBonusValue(CR_HIT_MELEE);
amount += int32(HitMelee);
}
}
void CalculateAmountSpellHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitSpell = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
HitSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
HitSpell += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(HitSpell);
}
}
void CalculateAmountExpertise(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float Expertise = 0.0f;
// Increase hit from SPELL_AURA_MOD_EXPERTISE
Expertise += owner->GetTotalAuraModifier(SPELL_AURA_MOD_EXPERTISE);
// Increase Expertise from Expertise ratings
Expertise += owner->GetRatingBonusValue(CR_EXPERTISE);
amount += int32(Expertise);
}
}
void Register() OVERRIDE
{
switch (m_scriptSpellId)
{
case SPELL_TAMED_PET_PASSIVE_06:
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountCritMelee, EFFECT_0, SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountCritSpell, EFFECT_1, SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
break;
case SPELL_PET_PASSIVE_CRIT:
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountCritSpell, EFFECT_0, SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountCritMelee, EFFECT_1, SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
break;
case SPELL_WARLOCK_PET_SCALING_05:
case SPELL_HUNTER_PET_SCALING_04:
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountMeleeHit, EFFECT_0, SPELL_AURA_MOD_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountSpellHit, EFFECT_1, SPELL_AURA_MOD_SPELL_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountExpertise, EFFECT_2, SPELL_AURA_MOD_EXPERTISE);
break;
case SPELL_DEATH_KNIGHT_PET_SCALING_03:
// case SPELL_SHAMAN_PET_HIT:
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountMeleeHit, EFFECT_0, SPELL_AURA_MOD_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_gen_pet_calculate_AuraScript::CalculateAmountSpellHit, EFFECT_1, SPELL_AURA_MOD_SPELL_HIT_CHANCE);
break;
default:
break;
}
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_gen_pet_calculate_AuraScript();
}
};
class spell_warl_pet_scaling_01 : public SpellScriptLoader
{
public:
spell_warl_pet_scaling_01() : SpellScriptLoader("spell_warl_pet_scaling_01") { }
class spell_warl_pet_scaling_01_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_scaling_01_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
_tempBonus = 0;
return true;
}
void CalculateStaminaAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = CalculatePct(owner->GetStat(STAT_STAMINA), 75);
amount += ownerBonus;
}
}
void ApplyEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
if (_tempBonus)
{
PetLevelInfo const* pInfo = sObjectMgr->GetPetLevelInfo(pet->GetEntry(), pet->getLevel());
uint32 healthMod = 0;
uint32 baseHealth = pInfo->health;
switch (pet->GetEntry())
{
case ENTRY_IMP:
healthMod = uint32(_tempBonus * 8.4f);
break;
case ENTRY_FELGUARD:
case ENTRY_VOIDWALKER:
healthMod = _tempBonus * 11;
break;
case ENTRY_SUCCUBUS:
healthMod = uint32(_tempBonus * 9.1f);
break;
case ENTRY_FELHUNTER:
healthMod = uint32(_tempBonus * 9.5f);
break;
default:
healthMod = 0;
break;
}
if (healthMod)
pet->ToPet()->SetCreateHealth(baseHealth + healthMod);
}
}
void RemoveEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
{
PetLevelInfo const* pInfo = sObjectMgr->GetPetLevelInfo(pet->GetEntry(), pet->getLevel());
pet->ToPet()->SetCreateHealth(pInfo->health);
}
}
void CalculateAttackPowerAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
int32 fire = int32(owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_POS + SPELL_SCHOOL_FIRE)) - owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_NEG + SPELL_SCHOOL_FIRE);
int32 shadow = int32(owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_POS + SPELL_SCHOOL_SHADOW)) - owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_NEG + SPELL_SCHOOL_SHADOW);
int32 maximum = (fire > shadow) ? fire : shadow;
if (maximum < 0)
maximum = 0;
float bonusAP = maximum * 0.57f;
amount += bonusAP;
// Glyph of felguard
if (pet->GetEntry() == ENTRY_FELGUARD)
{
if (AuraEffect* /* aurEff */ect = owner->GetAuraEffect(56246, EFFECT_0))
{
float base_attPower = pet->GetModifierValue(UNIT_MOD_ATTACK_POWER, BASE_VALUE) * pet->GetModifierValue(UNIT_MOD_ATTACK_POWER, BASE_PCT);
amount += CalculatePct(amount+base_attPower, /* aurEff */ect->GetAmount());
}
}
}
}
void CalculateDamageDoneAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
//the damage bonus used for pets is either fire or shadow damage, whatever is higher
int32 fire = int32(owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_POS + SPELL_SCHOOL_FIRE)) - owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_NEG + SPELL_SCHOOL_FIRE);
int32 shadow = int32(owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_POS + SPELL_SCHOOL_SHADOW)) - owner->GetUInt32Value(PLAYER_FIELD_MOD_DAMAGE_DONE_NEG + SPELL_SCHOOL_SHADOW);
int32 maximum = (fire > shadow) ? fire : shadow;
float bonusDamage = 0.0f;
if (maximum > 0)
bonusDamage = maximum * 0.15f;
amount += bonusDamage;
}
}
void Register() OVERRIDE
{
OnEffectRemove += AuraEffectRemoveFn(spell_warl_pet_scaling_01_AuraScript::RemoveEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
AfterEffectApply += AuraEffectApplyFn(spell_warl_pet_scaling_01_AuraScript::ApplyEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_01_AuraScript::CalculateStaminaAmount, EFFECT_0, SPELL_AURA_MOD_STAT);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_01_AuraScript::CalculateAttackPowerAmount, EFFECT_1, SPELL_AURA_MOD_ATTACK_POWER);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_01_AuraScript::CalculateDamageDoneAmount, EFFECT_2, SPELL_AURA_MOD_DAMAGE_DONE);
}
private:
uint32 _tempBonus;
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_scaling_01_AuraScript();
}
};
class spell_warl_pet_scaling_02 : public SpellScriptLoader
{
public:
spell_warl_pet_scaling_02() : SpellScriptLoader("spell_warl_pet_scaling_02") { }
class spell_warl_pet_scaling_02_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_scaling_02_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
_tempBonus = 0;
return true;
}
void CalculateIntellectAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetStat(STAT_INTELLECT), 30);
amount += ownerBonus;
_tempBonus = ownerBonus;
}
}
void ApplyEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
if (_tempBonus)
{
PetLevelInfo const* pInfo = sObjectMgr->GetPetLevelInfo(pet->GetEntry(), pet->getLevel());
uint32 manaMod = 0;
uint32 baseMana = pInfo->mana;
switch (pet->GetEntry())
{
case ENTRY_IMP:
manaMod = uint32(_tempBonus * 4.9f);
break;
case ENTRY_VOIDWALKER:
case ENTRY_SUCCUBUS:
case ENTRY_FELHUNTER:
case ENTRY_FELGUARD:
manaMod = uint32(_tempBonus * 11.5f);
break;
default:
manaMod = 0;
break;
}
if (manaMod)
pet->ToPet()->SetCreateMana(baseMana + manaMod);
}
}
void RemoveEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
{
PetLevelInfo const* pInfo = sObjectMgr->GetPetLevelInfo(pet->GetEntry(), pet->getLevel());
pet->ToPet()->SetCreateMana(pInfo->mana);
}
}
void CalculateArmorAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetArmor(), 35);
amount += ownerBonus;
}
}
void CalculateFireResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_FIRE), 40);
amount += ownerBonus;
}
}
void Register() OVERRIDE
{
OnEffectRemove += AuraEffectRemoveFn(spell_warl_pet_scaling_02_AuraScript::RemoveEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
AfterEffectApply += AuraEffectApplyFn(spell_warl_pet_scaling_02_AuraScript::ApplyEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_02_AuraScript::CalculateIntellectAmount, EFFECT_0, SPELL_AURA_MOD_STAT);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_02_AuraScript::CalculateArmorAmount, EFFECT_1, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_02_AuraScript::CalculateFireResistanceAmount, EFFECT_2, SPELL_AURA_MOD_RESISTANCE);
}
private:
uint32 _tempBonus;
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_scaling_02_AuraScript();
}
};
class spell_warl_pet_scaling_03 : public SpellScriptLoader
{
public:
spell_warl_pet_scaling_03() : SpellScriptLoader("spell_warl_pet_scaling_03") { }
class spell_warl_pet_scaling_03_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_scaling_03_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateFrostResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_FROST), 40);
amount += ownerBonus;
}
}
void CalculateArcaneResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_ARCANE), 40);
amount += ownerBonus;
}
}
void CalculateNatureResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_NATURE), 40);
amount += ownerBonus;
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_03_AuraScript::CalculateFrostResistanceAmount, EFFECT_0, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_03_AuraScript::CalculateArcaneResistanceAmount, EFFECT_1, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_03_AuraScript::CalculateNatureResistanceAmount, EFFECT_2, SPELL_AURA_MOD_RESISTANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_scaling_03_AuraScript();
}
};
class spell_warl_pet_scaling_04 : public SpellScriptLoader
{
public:
spell_warl_pet_scaling_04() : SpellScriptLoader("spell_warl_pet_scaling_04") { }
class spell_warl_pet_scaling_04_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_scaling_04_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateShadowResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_SHADOW), 40);
amount += ownerBonus;
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_04_AuraScript::CalculateShadowResistanceAmount, EFFECT_0, SPELL_AURA_MOD_RESISTANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_scaling_04_AuraScript();
}
};
class spell_warl_pet_scaling_05 : public SpellScriptLoader
{
public:
spell_warl_pet_scaling_05() : SpellScriptLoader("spell_warl_pet_scaling_05") { }
class spell_warl_pet_scaling_05_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_scaling_05_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountMeleeHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
HitMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
HitMelee += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(HitMelee);
}
}
void CalculateAmountSpellHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitSpell = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
HitSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
HitSpell += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(HitSpell);
}
}
void CalculateAmountExpertise(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float Expertise = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
Expertise += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
Expertise += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(Expertise);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_05_AuraScript::CalculateAmountMeleeHit, EFFECT_0, SPELL_AURA_MOD_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_05_AuraScript::CalculateAmountSpellHit, EFFECT_1, SPELL_AURA_MOD_SPELL_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_scaling_05_AuraScript::CalculateAmountExpertise, EFFECT_2, SPELL_AURA_MOD_EXPERTISE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_scaling_05_AuraScript();<|fim▁hole|>};
class spell_warl_pet_passive : public SpellScriptLoader
{
public:
spell_warl_pet_passive() : SpellScriptLoader("spell_warl_pet_passive") { }
class spell_warl_pet_passive_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_passive_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountCritSpell(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float CritSpell = 0.0f;
// Crit from Intellect
CritSpell += owner->GetSpellCritFromIntellect();
// Increase crit from SPELL_AURA_MOD_SPELL_CRIT_CHANCE
CritSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
// Increase crit from SPELL_AURA_MOD_CRIT_PCT
CritSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_CRIT_PCT);
// Increase crit spell from spell crit ratings
CritSpell += owner->GetRatingBonusValue(CR_CRIT_SPELL);
if (AuraApplication* improvedDemonicTacticsApp = owner->GetAuraApplicationOfRankedSpell(54347))
if (Aura* improvedDemonicTactics = improvedDemonicTacticsApp->GetBase())
if (AuraEffect* improvedDemonicTacticsEffect = improvedDemonicTactics->GetEffect(EFFECT_0))
amount += CalculatePct(CritSpell, improvedDemonicTacticsEffect->GetAmount());
}
}
void CalculateAmountCritMelee(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float CritMelee = 0.0f;
// Crit from Agility
CritMelee += owner->GetMeleeCritFromAgility();
// Increase crit from SPELL_AURA_MOD_WEAPON_CRIT_PERCENT
CritMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
// Increase crit from SPELL_AURA_MOD_CRIT_PCT
CritMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_CRIT_PCT);
// Increase crit melee from melee crit ratings
CritMelee += owner->GetRatingBonusValue(CR_CRIT_MELEE);
if (AuraApplication* improvedDemonicTacticsApp = owner->GetAuraApplicationOfRankedSpell(54347))
if (Aura* improvedDemonicTactics = improvedDemonicTacticsApp->GetBase())
if (AuraEffect* improvedDemonicTacticsEffect = improvedDemonicTactics->GetEffect(EFFECT_0))
amount += CalculatePct(CritMelee, improvedDemonicTacticsEffect->GetAmount());
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_passive_AuraScript::CalculateAmountCritSpell, EFFECT_0, SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_passive_AuraScript::CalculateAmountCritMelee, EFFECT_1, SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_passive_AuraScript();
}
};
// this doesnt actually fit in here
class spell_warl_pet_passive_damage_done : public SpellScriptLoader
{
public:
spell_warl_pet_passive_damage_done() : SpellScriptLoader("spell_warl_pet_passive_damage_done") { }
class spell_warl_pet_passive_damage_done_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_passive_damage_done_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountDamageDone(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (GetCaster()->GetOwner()->ToPlayer())
{
switch (GetCaster()->GetEntry())
{
case ENTRY_VOIDWALKER:
amount += -16;
break;
case ENTRY_FELHUNTER:
amount += -20;
break;
case ENTRY_SUCCUBUS:
case ENTRY_FELGUARD:
amount += 5;
break;
}
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_passive_damage_done_AuraScript::CalculateAmountDamageDone, EFFECT_0, SPELL_AURA_MOD_DAMAGE_PERCENT_DONE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_passive_damage_done_AuraScript::CalculateAmountDamageDone, EFFECT_1, SPELL_AURA_MOD_DAMAGE_PERCENT_DONE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_passive_damage_done_AuraScript();
}
};
class spell_warl_pet_passive_voidwalker : public SpellScriptLoader
{
public:
spell_warl_pet_passive_voidwalker() : SpellScriptLoader("spell_warl_pet_passive_voidwalker") { }
class spell_warl_pet_passive_voidwalker_AuraScript : public AuraScript
{
PrepareAuraScript(spell_warl_pet_passive_voidwalker_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
if (AuraEffect* /* aurEff */ect = owner->GetAuraEffect(SPELL_WARLOCK_GLYPH_OF_VOIDWALKER, EFFECT_0))
amount += /* aurEff */ect->GetAmount();
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_warl_pet_passive_voidwalker_AuraScript::CalculateAmount, EFFECT_0, SPELL_AURA_MOD_TOTAL_STAT_PERCENTAGE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_warl_pet_passive_voidwalker_AuraScript();
}
};
class spell_sha_pet_scaling_04 : public SpellScriptLoader
{
public:
spell_sha_pet_scaling_04() : SpellScriptLoader("spell_sha_pet_scaling_04") { }
class spell_sha_pet_scaling_04_AuraScript : public AuraScript
{
PrepareAuraScript(spell_sha_pet_scaling_04_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountMeleeHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_HIT_CHANCE
HitMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_HIT_CHANCE);
// Increase hit melee from meele hit ratings
HitMelee += owner->GetRatingBonusValue(CR_HIT_MELEE);
amount += int32(HitMelee);
}
}
void CalculateAmountSpellHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitSpell = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
HitSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
HitSpell += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(HitSpell);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_sha_pet_scaling_04_AuraScript::CalculateAmountMeleeHit, EFFECT_0, SPELL_AURA_MOD_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_sha_pet_scaling_04_AuraScript::CalculateAmountSpellHit, EFFECT_1, SPELL_AURA_MOD_SPELL_HIT_CHANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_sha_pet_scaling_04_AuraScript();
}
};
class spell_hun_pet_scaling_01 : public SpellScriptLoader
{
public:
spell_hun_pet_scaling_01() : SpellScriptLoader("spell_hun_pet_scaling_01") { }
class spell_hun_pet_scaling_01_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_pet_scaling_01_AuraScript);
void CalculateStaminaAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
if (pet->IsPet())
if (Unit* owner = pet->ToPet()->GetOwner())
{
float mod = 0.45f;
float ownerBonus = 0.0f;
PetSpellMap::const_iterator itr = (pet->ToPet()->m_spells.find(62758)); // Wild Hunt rank 1
if (itr == pet->ToPet()->m_spells.end())
itr = pet->ToPet()->m_spells.find(62762); // Wild Hunt rank 2
if (itr != pet->ToPet()->m_spells.end()) // If pet has Wild Hunt
{
SpellInfo const* spellInfo = sSpellMgr->GetSpellInfo(itr->first); // Then get the SpellProto and add the dummy effect value
AddPct(mod, spellInfo->Effects[EFFECT_0].CalcValue());
}
ownerBonus = owner->GetStat(STAT_STAMINA)*mod;
amount += ownerBonus;
}
}
void ApplyEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
if (_tempHealth)
pet->SetHealth(_tempHealth);
}
void RemoveEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
_tempHealth = pet->GetHealth();
}
void CalculateAttackPowerAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float mod = 1.0f; //Hunter contribution modifier
float bonusAP = 0.0f;
PetSpellMap::const_iterator itr = (pet->ToPet()->m_spells.find(62758)); // Wild Hunt rank 1
if (itr == pet->ToPet()->m_spells.end())
itr = pet->ToPet()->m_spells.find(62762); // Wild Hunt rank 2
if (itr != pet->ToPet()->m_spells.end()) // If pet has Wild Hunt
{
SpellInfo const* spellInfo = sSpellMgr->GetSpellInfo(itr->first); // Then get the SpellProto and add the dummy effect value
mod += CalculatePct(1.0f, spellInfo->Effects[EFFECT_1].CalcValue());
}
bonusAP = owner->GetTotalAttackPowerValue(RANGED_ATTACK) * 0.22f * mod;
amount += bonusAP;
}
}
void CalculateDamageDoneAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float mod = 1.0f; //Hunter contribution modifier
float bonusDamage = 0.0f;
PetSpellMap::const_iterator itr = (pet->ToPet()->m_spells.find(62758)); // Wild Hunt rank 1
if (itr == pet->ToPet()->m_spells.end())
itr = pet->ToPet()->m_spells.find(62762); // Wild Hunt rank 2
if (itr != pet->ToPet()->m_spells.end()) // If pet has Wild Hunt
{
SpellInfo const* spellInfo = sSpellMgr->GetSpellInfo(itr->first); // Then get the SpellProto and add the dummy effect value
mod += CalculatePct(1.0f, spellInfo->Effects[EFFECT_1].CalcValue());
}
bonusDamage = owner->GetTotalAttackPowerValue(RANGED_ATTACK) * 0.1287f * mod;
amount += bonusDamage;
}
}
void Register() OVERRIDE
{
OnEffectRemove += AuraEffectRemoveFn(spell_hun_pet_scaling_01_AuraScript::RemoveEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
AfterEffectApply += AuraEffectApplyFn(spell_hun_pet_scaling_01_AuraScript::ApplyEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_01_AuraScript::CalculateStaminaAmount, EFFECT_0, SPELL_AURA_MOD_STAT);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_01_AuraScript::CalculateAttackPowerAmount, EFFECT_1, SPELL_AURA_MOD_ATTACK_POWER);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_01_AuraScript::CalculateDamageDoneAmount, EFFECT_2, SPELL_AURA_MOD_DAMAGE_DONE);
}
private:
uint32 _tempHealth;
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_pet_scaling_01_AuraScript();
}
};
class spell_hun_pet_scaling_02 : public SpellScriptLoader
{
public:
spell_hun_pet_scaling_02() : SpellScriptLoader("spell_hun_pet_scaling_02") { }
class spell_hun_pet_scaling_02_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_pet_scaling_02_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateFrostResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_FROST), 40);
amount += ownerBonus;
}
}
void CalculateFireResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_FIRE), 40);
amount += ownerBonus;
}
}
void CalculateNatureResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_NATURE), 40);
amount += ownerBonus;
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_02_AuraScript::CalculateFrostResistanceAmount, EFFECT_1, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_02_AuraScript::CalculateFireResistanceAmount, EFFECT_0, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_02_AuraScript::CalculateNatureResistanceAmount, EFFECT_2, SPELL_AURA_MOD_RESISTANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_pet_scaling_02_AuraScript();
}
};
class spell_hun_pet_scaling_03 : public SpellScriptLoader
{
public:
spell_hun_pet_scaling_03() : SpellScriptLoader("spell_hun_pet_scaling_03") { }
class spell_hun_pet_scaling_03_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_pet_scaling_03_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateShadowResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_SHADOW), 40);
amount += ownerBonus;
}
}
void CalculateArcaneResistanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetResistance(SPELL_SCHOOL_ARCANE), 40);
amount += ownerBonus;
}
}
void CalculateArmorAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsPet())
return;
Unit* owner = pet->ToPet()->GetOwner();
if (!owner)
return;
float ownerBonus = 0.0f;
ownerBonus = CalculatePct(owner->GetArmor(), 35);
amount += ownerBonus;
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_03_AuraScript::CalculateShadowResistanceAmount, EFFECT_0, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_03_AuraScript::CalculateArcaneResistanceAmount, EFFECT_1, SPELL_AURA_MOD_RESISTANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_03_AuraScript::CalculateArmorAmount, EFFECT_2, SPELL_AURA_MOD_RESISTANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_pet_scaling_03_AuraScript();
}
};
class spell_hun_pet_scaling_04 : public SpellScriptLoader
{
public:
spell_hun_pet_scaling_04() : SpellScriptLoader("spell_hun_pet_scaling_04") { }
class spell_hun_pet_scaling_04_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_pet_scaling_04_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountMeleeHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_HIT_CHANCE
HitMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_HIT_CHANCE);
// Increase hit melee from meele hit ratings
HitMelee += owner->GetRatingBonusValue(CR_HIT_MELEE);
amount += int32(HitMelee);
}
}
void CalculateAmountSpellHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitSpell = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
HitSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
HitSpell += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(HitSpell);
}
}
void CalculateAmountExpertise(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float Expertise = 0.0f;
// Increase hit from SPELL_AURA_MOD_EXPERTISE
Expertise += owner->GetTotalAuraModifier(SPELL_AURA_MOD_EXPERTISE);
// Increase Expertise from Expertise ratings
Expertise += owner->GetRatingBonusValue(CR_EXPERTISE);
amount += int32(Expertise);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_04_AuraScript::CalculateAmountMeleeHit, EFFECT_0, SPELL_AURA_MOD_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_04_AuraScript::CalculateAmountSpellHit, EFFECT_1, SPELL_AURA_MOD_SPELL_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_scaling_04_AuraScript::CalculateAmountExpertise, EFFECT_2, SPELL_AURA_MOD_EXPERTISE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_pet_scaling_04_AuraScript();
}
};
class spell_hun_pet_passive_crit : public SpellScriptLoader
{
public:
spell_hun_pet_passive_crit() : SpellScriptLoader("spell_hun_pet_passive_crit") { }
class spell_hun_pet_passive_crit_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_pet_passive_crit_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountCritSpell(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float CritSpell = 0.0f;
// Crit from Intellect
// CritSpell += owner->GetSpellCritFromIntellect();
// Increase crit from SPELL_AURA_MOD_SPELL_CRIT_CHANCE
// CritSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
// Increase crit from SPELL_AURA_MOD_CRIT_PCT
// CritSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_CRIT_PCT);
// Increase crit spell from spell crit ratings
// CritSpell += owner->GetRatingBonusValue(CR_CRIT_SPELL);
amount += (CritSpell*0.8f);
}
}
void CalculateAmountCritMelee(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float CritMelee = 0.0f;
// Crit from Agility
// CritMelee += owner->GetMeleeCritFromAgility();
// Increase crit from SPELL_AURA_MOD_WEAPON_CRIT_PERCENT
// CritMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
// Increase crit from SPELL_AURA_MOD_CRIT_PCT
// CritMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_CRIT_PCT);
// Increase crit melee from melee crit ratings
// CritMelee += owner->GetRatingBonusValue(CR_CRIT_MELEE);
amount += (CritMelee*0.8f);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_passive_crit_AuraScript::CalculateAmountCritSpell, EFFECT_1, SPELL_AURA_MOD_SPELL_CRIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_passive_crit_AuraScript::CalculateAmountCritMelee, EFFECT_0, SPELL_AURA_MOD_WEAPON_CRIT_PERCENT);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_pet_passive_crit_AuraScript();
}
};
class spell_hun_pet_passive_damage_done : public SpellScriptLoader
{
public:
spell_hun_pet_passive_damage_done() : SpellScriptLoader("spell_hun_pet_passive_damage_done") { }
class spell_hun_pet_passive_damage_done_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_pet_passive_damage_done_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountDamageDone(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (GetCaster()->GetOwner()->ToPlayer())
{
// Pet's base damage changes depending on happiness
if (GetCaster()->IsPet() && GetCaster()->ToPet()->IsHunterPet())
{
switch (GetCaster()->ToPet()->GetHappinessState())
{
case HAPPY:
// 125% of normal damage
amount += 25.0f;
break;
case CONTENT:
// 100% of normal damage, nothing to modify
break;
case UNHAPPY:
// 75% of normal damage
amount += -25.0f;
break;
}
}
// Cobra Reflexes
if (AuraEffect* cobraReflexes = GetCaster()->GetAuraEffectOfRankedSpell(61682, EFFECT_0))
amount -= cobraReflexes->GetAmount();
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_pet_passive_damage_done_AuraScript::CalculateAmountDamageDone, EFFECT_0, SPELL_AURA_MOD_DAMAGE_PERCENT_DONE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_pet_passive_damage_done_AuraScript();
}
};
class spell_hun_animal_handler : public SpellScriptLoader
{
public:
spell_hun_animal_handler() : SpellScriptLoader("spell_hun_animal_handler") { }
class spell_hun_animal_handler_AuraScript : public AuraScript
{
PrepareAuraScript(spell_hun_animal_handler_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountDamageDone(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
if (AuraEffect* /* aurEff */ect = owner->GetAuraEffectOfRankedSpell(SPELL_HUNTER_ANIMAL_HANDLER, EFFECT_1))
amount = /* aurEff */ect->GetAmount();
else
amount = 0;
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_hun_animal_handler_AuraScript::CalculateAmountDamageDone, EFFECT_0, SPELL_AURA_MOD_ATTACK_POWER_PCT);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_hun_animal_handler_AuraScript();
}
};
class spell_dk_avoidance_passive : public SpellScriptLoader
{
public:
spell_dk_avoidance_passive() : SpellScriptLoader("spell_dk_avoidance_passive") { }
class spell_dk_avoidance_passive_AuraScript : public AuraScript
{
PrepareAuraScript(spell_dk_avoidance_passive_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAvoidanceAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (Unit* owner = pet->GetOwner())
{
// Army of the dead ghoul
if (pet->GetEntry() == ENTRY_ARMY_OF_THE_DEAD_GHOUL)
amount = -90;
// Night of the dead
else if (Aura* aur = owner->GetAuraOfRankedSpell(SPELL_NIGHT_OF_THE_DEAD))
amount = aur->GetSpellInfo()->Effects[EFFECT_2].CalcValue();
}
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_avoidance_passive_AuraScript::CalculateAvoidanceAmount, EFFECT_0, SPELL_AURA_MOD_CREATURE_AOE_DAMAGE_AVOIDANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_dk_avoidance_passive_AuraScript();
}
};
class spell_dk_pet_scaling_01 : public SpellScriptLoader
{
public:
spell_dk_pet_scaling_01() : SpellScriptLoader("spell_dk_pet_scaling_01") { }
class spell_dk_pet_scaling_01_AuraScript : public AuraScript
{
PrepareAuraScript(spell_dk_pet_scaling_01_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
_tempHealth = 0;
return true;
}
void CalculateStaminaAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (pet->IsGuardian())
{
if (Unit* owner = pet->GetOwner())
{
float mod = 0.3f;
// Ravenous Dead. Check just if owner has Ravenous Dead since it's effect is not an aura
if (AuraEffect const* aurEff = owner->GetAuraEffect(SPELL_AURA_MOD_TOTAL_STAT_PERCENTAGE, SPELLFAMILY_DEATHKNIGHT, 3010, 0))
mod += aurEff->GetSpellInfo()->Effects[EFFECT_1].CalcValue()/100; // Ravenous Dead edits the original scale
// Glyph of the Ghoul
if (AuraEffect const* aurEff = owner->GetAuraEffect(SPELL_DEATH_KNIGHT_GLYPH_OF_GHOUL, 0))
mod += aurEff->GetAmount()/100;
float ownerBonus = float(owner->GetStat(STAT_STAMINA)) * mod;
amount += ownerBonus;
}
}
}
}
void ApplyEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
if (_tempHealth)
pet->SetHealth(_tempHealth);
}
void RemoveEffect(AuraEffect const* /* aurEff */, AuraEffectHandleModes /*mode*/)
{
if (Unit* pet = GetUnitOwner())
_tempHealth = pet->GetHealth();
}
void CalculateStrengthAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
if (!pet->IsGuardian())
return;
Unit* owner = pet->GetOwner();
if (!owner)
return;
float mod = 0.7f;
// Ravenous Dead
AuraEffect const* aurEff = NULL;
// Check just if owner has Ravenous Dead since it's effect is not an aura
aurEff = owner->GetAuraEffect(SPELL_AURA_MOD_TOTAL_STAT_PERCENTAGE, SPELLFAMILY_DEATHKNIGHT, 3010, 0);
if (aurEff)
{
mod += CalculatePct(mod, aurEff->GetSpellInfo()->Effects[EFFECT_1].CalcValue()); // Ravenous Dead edits the original scale
}
// Glyph of the Ghoul
aurEff = owner->GetAuraEffect(58686, 0);
if (aurEff)
mod += CalculatePct(1.0f, aurEff->GetAmount()); // Glyph of the Ghoul adds a flat value to the scale mod
float ownerBonus = float(owner->GetStat(STAT_STRENGTH)) * mod;
amount += ownerBonus;
}
}
void Register() OVERRIDE
{
OnEffectRemove += AuraEffectRemoveFn(spell_dk_pet_scaling_01_AuraScript::RemoveEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
AfterEffectApply += AuraEffectApplyFn(spell_dk_pet_scaling_01_AuraScript::ApplyEffect, EFFECT_0, SPELL_AURA_MOD_STAT, AURA_EFFECT_HANDLE_CHANGE_AMOUNT_MASK);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_pet_scaling_01_AuraScript::CalculateStaminaAmount, EFFECT_0, SPELL_AURA_MOD_STAT);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_pet_scaling_01_AuraScript::CalculateStrengthAmount, EFFECT_1, SPELL_AURA_MOD_STAT);
}
private:
uint32 _tempHealth;
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_dk_pet_scaling_01_AuraScript();
}
};
class spell_dk_pet_scaling_02 : public SpellScriptLoader
{
public:
spell_dk_pet_scaling_02() : SpellScriptLoader("spell_dk_pet_scaling_02") { }
class spell_dk_pet_scaling_02_AuraScript : public AuraScript
{
PrepareAuraScript(spell_dk_pet_scaling_02_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountMeleeHaste(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HasteMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_HIT_CHANCE
HasteMelee += (1-owner->m_modAttackSpeedPct[BASE_ATTACK])*100;
amount += int32(HasteMelee);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_pet_scaling_02_AuraScript::CalculateAmountMeleeHaste, EFFECT_1, SPELL_AURA_MELEE_SLOW);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_dk_pet_scaling_02_AuraScript();
}
};
class spell_dk_pet_scaling_03 : public SpellScriptLoader
{
public:
spell_dk_pet_scaling_03() : SpellScriptLoader("spell_dk_pet_scaling_03") { }
class spell_dk_pet_scaling_03_AuraScript : public AuraScript
{
PrepareAuraScript(spell_dk_pet_scaling_03_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateAmountMeleeHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_HIT_CHANCE
HitMelee += owner->GetTotalAuraModifier(SPELL_AURA_MOD_HIT_CHANCE);
// Increase hit melee from meele hit ratings
HitMelee += owner->GetRatingBonusValue(CR_HIT_MELEE);
amount += int32(HitMelee);
}
}
void CalculateAmountSpellHit(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HitSpell = 0.0f;
// Increase hit from SPELL_AURA_MOD_SPELL_HIT_CHANCE
HitSpell += owner->GetTotalAuraModifier(SPELL_AURA_MOD_SPELL_HIT_CHANCE);
// Increase hit spell from spell hit ratings
HitSpell += owner->GetRatingBonusValue(CR_HIT_SPELL);
amount += int32(HitSpell);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_pet_scaling_03_AuraScript::CalculateAmountMeleeHit, EFFECT_0, SPELL_AURA_MOD_HIT_CHANCE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_pet_scaling_03_AuraScript::CalculateAmountSpellHit, EFFECT_1, SPELL_AURA_MOD_SPELL_HIT_CHANCE);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_dk_pet_scaling_03_AuraScript();
}
};
class spell_dk_rune_weapon_scaling_02 : public SpellScriptLoader
{
public:
spell_dk_rune_weapon_scaling_02() : SpellScriptLoader("spell_dk_rune_weapon_scaling_02") { }
class spell_dk_rune_weapon_scaling_02_AuraScript : public AuraScript
{
PrepareAuraScript(spell_dk_rune_weapon_scaling_02_AuraScript);
bool Load() OVERRIDE
{
if (!GetCaster() || !GetCaster()->GetOwner() || GetCaster()->GetOwner()->GetTypeId() != TYPEID_PLAYER)
return false;
return true;
}
void CalculateDamageDoneAmount(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (Unit* pet = GetUnitOwner())
{
Unit* owner = pet->GetOwner();
if (!owner)
return;
if (pet->IsGuardian())
((Guardian*)pet)->SetBonusDamage(owner->GetTotalAttackPowerValue(BASE_ATTACK));
amount += owner->CalculateDamage(BASE_ATTACK, true, true);
}
}
void CalculateAmountMeleeHaste(AuraEffect const* /* aurEff */, int32& amount, bool& /*canBeRecalculated*/)
{
if (!GetCaster() || !GetCaster()->GetOwner())
return;
if (Player* owner = GetCaster()->GetOwner()->ToPlayer())
{
// For others recalculate it from:
float HasteMelee = 0.0f;
// Increase hit from SPELL_AURA_MOD_HIT_CHANCE
HasteMelee += (1-owner->m_modAttackSpeedPct[BASE_ATTACK])*100;
amount += int32(HasteMelee);
}
}
void Register() OVERRIDE
{
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_rune_weapon_scaling_02_AuraScript::CalculateDamageDoneAmount, EFFECT_0, SPELL_AURA_MOD_DAMAGE_DONE);
DoEffectCalcAmount += AuraEffectCalcAmountFn(spell_dk_rune_weapon_scaling_02_AuraScript::CalculateAmountMeleeHaste, EFFECT_1, SPELL_AURA_MELEE_SLOW);
}
};
AuraScript* GetAuraScript() const OVERRIDE
{
return new spell_dk_rune_weapon_scaling_02_AuraScript();
}
};
void AddSC_pet_spell_scripts()
{
new spell_gen_pet_calculate();
}<|fim▁end|> | } |
<|file_name|>headers.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 Uber Technologies, Inc.
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package thrift
import (
"fmt"
"io"
"io/ioutil"
"github.com/uber/tchannel/golang/typed"
)
// TODO(prashant): Use a small buffer and then flush it when it's full.
func writeHeaders(w io.Writer, headers map[string]string) error {
// Calculate the size of the buffer that we need.
size := 2
for k, v := range headers {
size += 4 /* size of key/value lengths */
size += len(k) + len(v)
}
buf := make([]byte, size)
writeBuffer := typed.NewWriteBuffer(buf)
writeBuffer.WriteUint16(uint16(len(headers)))
for k, v := range headers {
writeBuffer.WriteLen16String(k)
writeBuffer.WriteLen16String(v)
}
if err := writeBuffer.Err(); err != nil {
return err
}
// Safety check to ensure the bytes written calculation is correct.
if writeBuffer.BytesWritten() != size {
return fmt.Errorf("writeHeaders size calculation wrong, expected to write %v bytes, only wrote %v bytes",
size, writeBuffer.BytesWritten())
}
<|fim▁hole|> _, err := writeBuffer.FlushTo(w)
return err
}
// TODO(prashant): Allow typed.ReadBuffer to read directly from the reader.
func readHeaders(r io.Reader) (map[string]string, error) {
bs, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
buffer := typed.NewReadBuffer(bs)
numHeaders := buffer.ReadUint16()
if numHeaders == 0 {
return nil, buffer.Err()
}
headers := make(map[string]string)
for i := 0; i < int(numHeaders) && buffer.Err() == nil; i++ {
k := buffer.ReadLen16String()
v := buffer.ReadLen16String()
headers[k] = v
}
return headers, buffer.Err()
}<|fim▁end|> | |
<|file_name|>users.rs<|end_file_name|><|fim▁begin|>use failure::format_err;
use log::error;
use rusqlite::types::ToSql;
use serde_derive::{Deserialize, Serialize};
use crate::db::{self, Database};
use crate::errors::*;
#[derive(Deserialize, Serialize, Clone)]
pub struct UserInfo {
pub id: Option<i32>,
pub github: String,
pub slack: String,
pub mute_direct_messages: bool,
}
#[derive(Clone)]
pub struct UserConfig {
db: Database,
}
impl UserInfo {
pub fn new(git_user: &str, slack_user: &str) -> UserInfo {
UserInfo {
id: None,
github: git_user.to_string(),
slack: slack_user.to_string(),
mute_direct_messages: false,
}
}
}
impl UserConfig {
pub fn new(db: Database) -> UserConfig {
UserConfig { db }
}
pub fn insert(&mut self, git_user: &str, slack_user: &str) -> Result<()> {
self.insert_info(&UserInfo::new(git_user, slack_user))
}
pub fn insert_info(&mut self, user: &UserInfo) -> Result<()> {
let conn = self.db.connect()?;
conn.execute(
"INSERT INTO users (github_name, slack_name, mute_direct_messages) VALUES (?1, ?2, ?3)",
&[
&user.github,
&user.slack,
&db::to_tinyint(user.mute_direct_messages) as &dyn ToSql,
],
)
.map_err(|e| format_err!("Error inserting user {}: {}", user.github, e))?;
Ok(())
}
pub fn update(&mut self, user: &UserInfo) -> Result<()> {
let conn = self.db.connect()?;
conn.execute(
"UPDATE users set github_name = ?1, slack_name = ?2, mute_direct_messages = ?3 where id = ?4",
&[&user.github, &user.slack, &db::to_tinyint(user.mute_direct_messages) as &dyn ToSql, &user.id],
).map_err(|e| format_err!("Error updating user {}: {}", user.github, e))?;
Ok(())
}
pub fn delete(&mut self, user_id: i32) -> Result<()> {
let conn = self.db.connect()?;
conn.execute("DELETE from users where id = ?1", &[&user_id])
.map_err(|e| format_err!("Error deleting user {}: {}", user_id, e))?;
Ok(())<|fim▁hole|> self.lookup_info(github_name).map(|u| u.slack)
}
pub fn slack_user_mention(&self, github_name: &str) -> Option<String> {
self.lookup_info(github_name).and_then(|u| {
if u.mute_direct_messages {
None
} else {
Some(mention(&u.slack))
}
})
}
pub fn get_all(&self) -> Result<Vec<UserInfo>> {
let conn = self.db.connect()?;
let mut stmt = conn.prepare(
"SELECT id, slack_name, github_name, mute_direct_messages FROM users ORDER BY github_name",
)?;
let found = stmt.query_map([], |row| {
Ok(UserInfo {
id: row.get(0)?,
slack: row.get(1)?,
github: row.get(2)?,
mute_direct_messages: db::to_bool(row.get(3)?),
})
})?;
let mut users = vec![];
for user in found {
users.push(user?);
}
Ok(users)
}
pub fn lookup_info(&self, github_name: &str) -> Option<UserInfo> {
match self.do_lookup_info(github_name) {
Ok(u) => u,
Err(e) => {
error!("Error looking up user: {}", e);
None
}
}
}
fn do_lookup_info(&self, github_name: &str) -> Result<Option<UserInfo>> {
let github_name = github_name.to_string();
let conn = self.db.connect()?;
let mut stmt = conn.prepare(
"SELECT id, slack_name, mute_direct_messages FROM users where github_name = ?1",
)?;
let found = stmt.query_map(&[&github_name], |row| {
Ok(UserInfo {
id: row.get(0)?,
slack: row.get(1)?,
github: github_name.clone(),
mute_direct_messages: db::to_bool(row.get(2)?),
})
})?;
let user = found.into_iter().flatten().next();
Ok(user)
}
}
pub fn mention(username: &str) -> String {
format!("@{}", username)
}
#[cfg(test)]
mod tests {
use super::*;
use tempdir::TempDir;
fn new_test() -> (UserConfig, TempDir) {
let temp_dir = TempDir::new("users.rs").unwrap();
let db_file = temp_dir.path().join("db.sqlite3");
let db = Database::new(&db_file.to_string_lossy()).expect("create temp database");
(UserConfig::new(db), temp_dir)
}
#[test]
fn test_slack_user_name_no_defaults() {
let (users, _temp) = new_test();
assert_eq!(None, users.slack_user_name("joe"));
assert_eq!(None, users.slack_user_mention("joe"));
}
#[test]
fn test_slack_user_name() {
let (mut users, _temp) = new_test();
users.insert("some-git-user", "the-slacker").unwrap();
assert_eq!(
Some("the-slacker".into()),
users.slack_user_name("some-git-user")
);
assert_eq!(
Some("@the-slacker".into()),
users.slack_user_mention("some-git-user")
);
assert_eq!(None, users.slack_user_name("some.other.user"));
assert_eq!(None, users.slack_user_mention("some.other.user"));
}
#[test]
fn test_mention() {
assert_eq!("@me", mention("me"));
}
}<|fim▁end|> | }
pub fn slack_user_name(&self, github_name: &str) -> Option<String> { |
<|file_name|>helper_sent.py<|end_file_name|><|fim▁begin|>from helper_sql import sqlExecute
<|fim▁hole|><|fim▁end|> | def insert(t):
sqlExecute('''INSERT INTO sent VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)''', *t) |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Configuration file for the Sphinx documentation builder.
This file does only contain a selection of the most common options. For a
full list see the documentation:
http://www.sphinx-doc.org/en/master/config
"""
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, u'/tmp/lingvo/lingvo')
# -- Project information -----------------------------------------------------
project = u'Lingvo'
copyright = u'2018'
author = u''
# The short X.Y version
version = u''
# The full version, including alpha/beta/rc tags
release = u''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.mathjax',
'sphinx.ext.napoleon', 'sphinx.ext.todo', 'sphinx.ext.viewcode'
]
autodoc_default_flags = [
'members', 'undoc-members', 'private-members', 'show-inheritance'
]
autodoc_member_order = 'bysource'
napoleon_google_docstring = True
default_role = 'py:obj'
intersphinx_mapping = {
'python': ('https://docs.python.org/3.7', None),
'numpy': ('http://numpy.org/doc/stable/', None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
from docutils.transforms import Transform
from recommonmark.parser import CommonMarkParser
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'lingvodoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).<|fim▁hole|>latex_documents = [
(master_doc, 'lingvo.tex', u'Lingvo Documentation', u'', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'lingvo', u'Lingvo Documentation', [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'lingvo', u'Lingvo Documentation', author, 'Lingvo',
'One line description of project.', 'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
class ResetFlags(Transform):
default_priority = 999
def apply(self):
from absl import flags # pylint: disable=g-import-not-at-top
for flag in list(flags.FLAGS):
if flag not in ('showprefixforinfo',):
delattr(flags.FLAGS, flag)
def setup(app):
app.add_transform(ResetFlags)<|fim▁end|> | |
<|file_name|>Atom.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:2c648d581addc08fbd0fe28ad0d26ba08942ebfe6be59756e11ff9abcd9d3bbd<|fim▁hole|><|fim▁end|> | size 23184 |
<|file_name|>_binomtest.py<|end_file_name|><|fim▁begin|>from math import sqrt
import numpy as np
from scipy._lib._util import _validate_int
from scipy.optimize import brentq
from scipy.special import ndtri
from ._discrete_distns import binom
from ._common import ConfidenceInterval
class BinomTestResult:
"""
Result of `scipy.stats.binomtest`.
Attributes
----------
k : int
The number of successes (copied from `binomtest` input).
n : int
The number of trials (copied from `binomtest` input).
alternative : str
Indicates the alternative hypothesis specified in the input
to `binomtest`. It will be one of ``'two-sided'``, ``'greater'``,
or ``'less'``.
pvalue : float
The p-value of the hypothesis test.
proportion_estimate : float
The estimate of the proportion of successes.
Methods
-------
proportion_ci :
Compute the confidence interval for the estimate of the proportion.
"""
def __init__(self, k, n, alternative, pvalue, proportion_estimate):
self.k = k
self.n = n
self.alternative = alternative
self.proportion_estimate = proportion_estimate
self.pvalue = pvalue
def __repr__(self):
s = ("BinomTestResult("
f"k={self.k}, "
f"n={self.n}, "
f"alternative={self.alternative!r}, "
f"proportion_estimate={self.proportion_estimate}, "
f"pvalue={self.pvalue})")
return s
def proportion_ci(self, confidence_level=0.95, method='exact'):
"""
Compute the confidence interval for the estimated proportion.
Parameters
----------
confidence_level : float, optional
Confidence level for the computed confidence interval
of the estimated proportion. Default is 0.95.
method : {'exact', 'wilson', 'wilsoncc'}, optional
Selects the method used to compute the confidence interval
for the estimate of the proportion:
'exact' :
Use the Clopper-Pearson exact method [1]_.
'wilson' :
Wilson's method, without continuity correction ([2]_, [3]_).
'wilsoncc' :
Wilson's method, with continuity correction ([2]_, [3]_).
Default is ``'exact'``.
Returns
-------
ci : ``ConfidenceInterval`` object
The object has attributes ``low`` and ``high`` that hold the
lower and upper bounds of the confidence interval.
References
----------
.. [1] C. J. Clopper and E. S. Pearson, The use of confidence or
fiducial limits illustrated in the case of the binomial,
Biometrika, Vol. 26, No. 4, pp 404-413 (Dec. 1934).
.. [2] E. B. Wilson, Probable inference, the law of succession, and
statistical inference, J. Amer. Stat. Assoc., 22, pp 209-212
(1927).
.. [3] Robert G. Newcombe, Two-sided confidence intervals for the
single proportion: comparison of seven methods, Statistics
in Medicine, 17, pp 857-872 (1998).
Examples
--------
>>> from scipy.stats import binomtest
>>> result = binomtest(k=7, n=50, p=0.1)
>>> result.proportion_estimate
0.14
>>> result.proportion_ci()
ConfidenceInterval(low=0.05819170033997342, high=0.26739600249700846)
"""
if method not in ('exact', 'wilson', 'wilsoncc'):
raise ValueError("method must be one of 'exact', 'wilson' or "
"'wilsoncc'.")
if not (0 <= confidence_level <= 1):
raise ValueError('confidence_level must be in the interval '
'[0, 1].')
if method == 'exact':
low, high = _binom_exact_conf_int(self.k, self.n,
confidence_level,
self.alternative)
else:
# method is 'wilson' or 'wilsoncc'
low, high = _binom_wilson_conf_int(self.k, self.n,
confidence_level,
self.alternative,
correction=method == 'wilsoncc')
return ConfidenceInterval(low=low, high=high)
def _findp(func):
try:
p = brentq(func, 0, 1)
except RuntimeError:
raise RuntimeError('numerical solver failed to converge when '
'computing the confidence limits') from None
except ValueError as exc:
raise ValueError('brentq raised a ValueError; report this to the '<|fim▁hole|> return p
def _binom_exact_conf_int(k, n, confidence_level, alternative):
"""
Compute the estimate and confidence interval for the binomial test.
Returns proportion, prop_low, prop_high
"""
if alternative == 'two-sided':
alpha = (1 - confidence_level) / 2
if k == 0:
plow = 0.0
else:
plow = _findp(lambda p: binom.sf(k-1, n, p) - alpha)
if k == n:
phigh = 1.0
else:
phigh = _findp(lambda p: binom.cdf(k, n, p) - alpha)
elif alternative == 'less':
alpha = 1 - confidence_level
plow = 0.0
if k == n:
phigh = 1.0
else:
phigh = _findp(lambda p: binom.cdf(k, n, p) - alpha)
elif alternative == 'greater':
alpha = 1 - confidence_level
if k == 0:
plow = 0.0
else:
plow = _findp(lambda p: binom.sf(k-1, n, p) - alpha)
phigh = 1.0
return plow, phigh
def _binom_wilson_conf_int(k, n, confidence_level, alternative, correction):
# This function assumes that the arguments have already been validated.
# In particular, `alternative` must be one of 'two-sided', 'less' or
# 'greater'.
p = k / n
if alternative == 'two-sided':
z = ndtri(0.5 + 0.5*confidence_level)
else:
z = ndtri(confidence_level)
# For reference, the formulas implemented here are from
# Newcombe (1998) (ref. [3] in the proportion_ci docstring).
denom = 2*(n + z**2)
center = (2*n*p + z**2)/denom
q = 1 - p
if correction:
if alternative == 'less' or k == 0:
lo = 0.0
else:
dlo = (1 + z*sqrt(z**2 - 2 - 1/n + 4*p*(n*q + 1))) / denom
lo = center - dlo
if alternative == 'greater' or k == n:
hi = 1.0
else:
dhi = (1 + z*sqrt(z**2 + 2 - 1/n + 4*p*(n*q - 1))) / denom
hi = center + dhi
else:
delta = z/denom * sqrt(4*n*p*q + z**2)
if alternative == 'less' or k == 0:
lo = 0.0
else:
lo = center - delta
if alternative == 'greater' or k == n:
hi = 1.0
else:
hi = center + delta
return lo, hi
def binomtest(k, n, p=0.5, alternative='two-sided'):
"""
Perform a test that the probability of success is p.
The binomial test [1]_ is a test of the null hypothesis that the
probability of success in a Bernoulli experiment is `p`.
Details of the test can be found in many texts on statistics, such
as section 24.5 of [2]_.
Parameters
----------
k : int
The number of successes.
n : int
The number of trials.
p : float, optional
The hypothesized probability of success, i.e. the expected
proportion of successes. The value must be in the interval
``0 <= p <= 1``. The default value is ``p = 0.5``.
alternative : {'two-sided', 'greater', 'less'}, optional
Indicates the alternative hypothesis. The default value is
'two-sided'.
Returns
-------
result : `~scipy.stats._result_classes.BinomTestResult` instance
The return value is an object with the following attributes:
k : int
The number of successes (copied from `binomtest` input).
n : int
The number of trials (copied from `binomtest` input).
alternative : str
Indicates the alternative hypothesis specified in the input
to `binomtest`. It will be one of ``'two-sided'``, ``'greater'``,
or ``'less'``.
pvalue : float
The p-value of the hypothesis test.
proportion_estimate : float
The estimate of the proportion of successes.
The object has the following methods:
proportion_ci(confidence_level=0.95, method='exact') :
Compute the confidence interval for ``proportion_estimate``.
Notes
-----
.. versionadded:: 1.7.0
References
----------
.. [1] Binomial test, https://en.wikipedia.org/wiki/Binomial_test
.. [2] Jerrold H. Zar, Biostatistical Analysis (fifth edition),
Prentice Hall, Upper Saddle River, New Jersey USA (2010)
Examples
--------
>>> from scipy.stats import binomtest
A car manufacturer claims that no more than 10% of their cars are unsafe.
15 cars are inspected for safety, 3 were found to be unsafe. Test the
manufacturer's claim:
>>> result = binomtest(3, n=15, p=0.1, alternative='greater')
>>> result.pvalue
0.18406106910639114
The null hypothesis cannot be rejected at the 5% level of significance
because the returned p-value is greater than the critical value of 5%.
The estimated proportion is simply ``3/15``:
>>> result.proportion_estimate
0.2
We can use the `proportion_ci()` method of the result to compute the
confidence interval of the estimate:
>>> result.proportion_ci(confidence_level=0.95)
ConfidenceInterval(low=0.05684686759024681, high=1.0)
"""
k = _validate_int(k, 'k', minimum=0)
n = _validate_int(n, 'n', minimum=1)
if k > n:
raise ValueError('k must not be greater than n.')
if not (0 <= p <= 1):
raise ValueError("p must be in range [0,1]")
if alternative not in ('two-sided', 'less', 'greater'):
raise ValueError("alternative not recognized; \n"
"must be 'two-sided', 'less' or 'greater'")
if alternative == 'less':
pval = binom.cdf(k, n, p)
elif alternative == 'greater':
pval = binom.sf(k-1, n, p)
else:
# alternative is 'two-sided'
d = binom.pmf(k, n, p)
rerr = 1 + 1e-7
if k == p * n:
# special case as shortcut, would also be handled by `else` below
pval = 1.
elif k < p * n:
ix = _binary_search_for_binom_tst(lambda x1: -binom.pmf(x1, n, p),
-d*rerr, np.ceil(p * n), n)
# y is the number of terms between mode and n that are <= d*rerr.
# ix gave us the first term where a(ix) <= d*rerr < a(ix-1)
# if the first equality doesn't hold, y=n-ix. Otherwise, we
# need to include ix as well as the equality holds. Note that
# the equality will hold in very very rare situations due to rerr.
y = n - ix + int(d*rerr == binom.pmf(ix, n, p))
pval = binom.cdf(k, n, p) + binom.sf(n - y, n, p)
else:
ix = _binary_search_for_binom_tst(lambda x1: binom.pmf(x1, n, p),
d*rerr, 0, np.floor(p * n))
# y is the number of terms between 0 and mode that are <= d*rerr.
# we need to add a 1 to account for the 0 index.
# For comparing this with old behavior, see
# tst_binary_srch_for_binom_tst method in test_morestats.
y = ix + 1
pval = binom.cdf(y-1, n, p) + binom.sf(k-1, n, p)
pval = min(1.0, pval)
result = BinomTestResult(k=k, n=n, alternative=alternative,
proportion_estimate=k/n, pvalue=pval)
return result
def _binary_search_for_binom_tst(a, d, lo, hi):
"""
Conducts an implicit binary search on a function specified by `a`.
Meant to be used on the binomial PMF for the case of two-sided tests
to obtain the value on the other side of the mode where the tail
probability should be computed. The values on either side of
the mode are always in order, meaning binary search is applicable.
Parameters
----------
a : callable
The function over which to perform binary search. Its values
for inputs lo and hi should be in ascending order.
d : float
The value to search.
lo : int
The lower end of range to search.
hi : int
The higher end of the range to search.
Returns
----------
int
The index, i between lo and hi
such that a(i)<=d<a(i+1)
"""
while lo < hi:
mid = lo + (hi-lo)//2
midval = a(mid)
if midval < d:
lo = mid+1
elif midval > d:
hi = mid-1
else:
return mid
if a(lo) <= d:
return lo
else:
return lo-1<|fim▁end|> | 'SciPy developers') from exc |
<|file_name|>regexp-17.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | "aaaaaaaaaa,aaaaaaaaaaaaaaa".replace(/^(a+)\1*,\1+$/,"$1") |
<|file_name|>pool.rs<|end_file_name|><|fim▁begin|>use memory::bucket::Bucket;
use memory::bucket::BUCKET_PAGES;
use memory::heap::Heap;
use memory::heapbucket::HeapBucket;
use memory::page::Page;
use memory::page::PAGE_SIZE;
use std::alloc::alloc;
use std::alloc::dealloc;
use std::alloc::Layout;
use std::mem::size_of;
use std::ptr::null_mut;
use std::usize::MAX;
pub struct Pool {
pub map: usize,
heap: *mut Heap,
}
impl Pool {
pub fn create(heap: *mut Heap) -> *mut Pool {
unsafe {
let memory = alloc(Layout::from_size_align_unchecked(
PAGE_SIZE * BUCKET_PAGES * BUCKET_PAGES,
PAGE_SIZE * BUCKET_PAGES,
));
// println!("Pool memory: {:X}.", memory as usize);
if memory == null_mut() {
panic!("Unable to create pool: Out of memory.");
}
let mut pool: *mut Pool = null_mut();
for i in 0..BUCKET_PAGES {
let bucket_page = (memory as usize + PAGE_SIZE * BUCKET_PAGES * i) as *mut Page;
// println!("Bucket page: {}, {:X}.", i, bucket_page as usize);
(*bucket_page).reset();
let bucket = (*bucket_page).allocate(Bucket::Heap(HeapBucket {
pool: null_mut(),
map: MAX,
}));
// println!("Bucket initialized: {:X}.", bucket as usize);
if i == 0 {
pool = (*bucket_page).allocate(Pool {
map: MAX,
heap: heap,
});
// println!("Pool object: {:X}.", pool as usize);
// println!("Pool map after creation: {:X}.", (*pool).map);
}
match *bucket {
Bucket::Stack(ref _s) => (),
Bucket::Heap(ref mut h) => h.pool = pool,
}
}
pool
}
}
pub fn allocate_page(&mut self) -> *mut Page {
// println!("Pool map before allocation: {:X}.", self.map);
if self.map == 0 {
unsafe {
(*self.heap).mark_as_full(self);
return (*self.heap).allocate_page();
};
}
let pool_page_address = Page::get(self as *const Pool as usize) as usize;
// println!("Pool page address: {:X}.", pool_page_address);
let bucket_page_address = {
let position = Bucket::find_least_position(self.map);
// println!("Pool least position: {}.", position);
pool_page_address + PAGE_SIZE * BUCKET_PAGES * (position - 1)
};
// println!("Bucket page address: {:X}.", bucket_page_address);
let bucket = (bucket_page_address + size_of::<Page>()) as *mut Bucket;
// println!("Bucket address: {:X}.", bucket as usize);
unsafe { (*bucket).allocate_page() }
}
fn get_allocation_bit(&self, page: usize) -> usize {
let first_bucket_address = Page::get(self as *const Pool as usize) as usize;
let distance = page - first_bucket_address;
let position = distance / PAGE_SIZE / BUCKET_PAGES;
1 << (BUCKET_PAGES - 1 - position)
}
pub fn mark_as_full(&mut self, page: usize) {
let bit = self.get_allocation_bit(page);
// println!("Pool bit to be marked as full: {:X}", bit);
self.map = self.map & !bit;
}
pub fn mark_as_free(&mut self, page: usize) {
let bit = self.get_allocation_bit(page);
// println!("Pool bit to be marked as free: {:X}", bit);
let old_map = self.map;
self.map = self.map | bit;
if old_map == 0 {
unsafe {
(*self.heap).mark_as_free(self);
}
}
}
pub fn deallocate(&mut self) {
if self.map != MAX {
panic!("Pool is not empty!")
}
unsafe {
// println!(
// "Pool: dealloc {:X}",
// Page::get(self as *const Pool as usize) as usize
// );
dealloc(
Page::get(self as *const Pool as usize) as *mut u8,
Layout::from_size_align(
PAGE_SIZE * BUCKET_PAGES * BUCKET_PAGES,<|fim▁hole|> );
}
}
}<|fim▁end|> | PAGE_SIZE * BUCKET_PAGES,
)
.unwrap(), |
<|file_name|>gphotolistcamerascommand.cpp<|end_file_name|><|fim▁begin|>/*
* This file is part of the Photomaton-mobile package.
*
* (c) Romain Lamarche
*
* For the full copyright and license information, please view the LICENSE<|fim▁hole|>
#include "gphotolistcamerascommand.h"
#include "gphoto/gphotocameracontroller.h"
GPhotoListCamerasCommand::GPhotoListCamerasCommand(QObject *parent) :
AbstractGPhotoCommand(parent)
{
}
void GPhotoListCamerasCommand::execute(GPhotoCameraController* controller)
{
QList<GPhotoCameraHandle> detectedCameras = controller->internalDetectCameras();
// emit camerasDetected(detectedCameras);
}<|fim▁end|> | * file that was distributed with this source code.
*/ |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>"""This module holds the common test code.
.. seealso:: `pytest good practices
<https://pytest.org/latest/goodpractices.html>`__ for why this module exists.
"""
import os
import sys
# sys.path makes knittingpattern importable<|fim▁hole|>__builtins__["HERE"] = HERE<|fim▁end|> | HERE = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(HERE, "../..")) |
<|file_name|>install.py<|end_file_name|><|fim▁begin|>from ceph_deploy.util import templates
from ceph_deploy.lib import remoto
from ceph_deploy.hosts.common import map_components
from ceph_deploy.util.paths import gpg
NON_SPLIT_PACKAGES = ['ceph-osd', 'ceph-mon', 'ceph-mds']
def rpm_dist(distro):
if distro.normalized_name in ['redhat', 'centos', 'scientific'] and distro.normalized_release.int_major >= 6:
return 'el' + distro.normalized_release.major
return 'el6'
def repository_url_part(distro):
"""
Historically everything CentOS, RHEL, and Scientific has been mapped to
`el6` urls, but as we are adding repositories for `rhel`, the URLs should
map correctly to, say, `rhel6` or `rhel7`.
This function looks into the `distro` object and determines the right url
part for the given distro, falling back to `el6` when all else fails.
Specifically to work around the issue of CentOS vs RHEL::
>>> import platform
>>> platform.linux_distribution()
('Red Hat Enterprise Linux Server', '7.0', 'Maipo')
"""
if distro.normalized_release.int_major >= 6:
if distro.normalized_name == 'redhat':
return 'rhel' + distro.normalized_release.major
if distro.normalized_name in ['centos', 'scientific']:
return 'el' + distro.normalized_release.major
return 'el6'
def install(distro, version_kind, version, adjust_repos, **kw):
packages = map_components(
NON_SPLIT_PACKAGES,
kw.pop('components', [])
)
logger = distro.conn.logger
release = distro.release
machine = distro.machine_type
repo_part = repository_url_part(distro)
dist = rpm_dist(distro)
distro.packager.clean()
# Get EPEL installed before we continue:
if adjust_repos:
distro.packager.install('epel-release')
distro.packager.install('yum-plugin-priorities')
distro.conn.remote_module.enable_yum_priority_obsoletes()<|fim▁hole|> if version_kind in ['stable', 'testing']:
key = 'release'
else:
key = 'autobuild'
if adjust_repos:
if version_kind != 'dev':
distro.packager.add_repo_gpg_key(gpg.url(key))
if version_kind == 'stable':
url = 'http://ceph.com/rpm-{version}/{repo}/'.format(
version=version,
repo=repo_part,
)
elif version_kind == 'testing':
url = 'http://ceph.com/rpm-testing/{repo}/'.format(repo=repo_part)
remoto.process.run(
distro.conn,
[
'rpm',
'-Uvh',
'--replacepkgs',
'{url}noarch/ceph-release-1-0.{dist}.noarch.rpm'.format(url=url, dist=dist),
],
)
if version_kind == 'dev':
logger.info('skipping install of ceph-release package')
logger.info('repo file will be created manually')
mirror_install(
distro,
'http://gitbuilder.ceph.com/ceph-rpm-centos{release}-{machine}-basic/ref/{version}/'.format(
release=release.split(".", 1)[0],
machine=machine,
version=version),
gpg.url(key),
adjust_repos=True,
extra_installs=False
)
# set the right priority
logger.warning('ensuring that /etc/yum.repos.d/ceph.repo contains a high priority')
distro.conn.remote_module.set_repo_priority(['Ceph', 'Ceph-noarch', 'ceph-source'])
logger.warning('altered ceph.repo priorities to contain: priority=1')
if packages:
distro.packager.install(packages)
def mirror_install(distro, repo_url, gpg_url, adjust_repos, extra_installs=True, **kw):
packages = map_components(
NON_SPLIT_PACKAGES,
kw.pop('components', [])
)
repo_url = repo_url.strip('/') # Remove trailing slashes
distro.packager.clean()
if adjust_repos:
distro.packager.add_repo_gpg_key(gpg_url)
ceph_repo_content = templates.ceph_repo.format(
repo_url=repo_url,
gpg_url=gpg_url
)
distro.conn.remote_module.write_yum_repo(ceph_repo_content)
# set the right priority
if distro.packager.name == 'yum':
distro.packager.install('yum-plugin-priorities')
distro.conn.remote_module.set_repo_priority(['Ceph', 'Ceph-noarch', 'ceph-source'])
distro.conn.logger.warning('altered ceph.repo priorities to contain: priority=1')
if extra_installs and packages:
distro.packager.install(packages)
def repo_install(distro, reponame, baseurl, gpgkey, **kw):
packages = map_components(
NON_SPLIT_PACKAGES,
kw.pop('components', [])
)
logger = distro.conn.logger
# Get some defaults
name = kw.pop('name', '%s repo' % reponame)
enabled = kw.pop('enabled', 1)
gpgcheck = kw.pop('gpgcheck', 1)
install_ceph = kw.pop('install_ceph', False)
proxy = kw.pop('proxy', '') # will get ignored if empty
_type = 'repo-md'
baseurl = baseurl.strip('/') # Remove trailing slashes
distro.packager.clean()
if gpgkey:
distro.packager.add_repo_gpg_key(gpgkey)
repo_content = templates.custom_repo(
reponame=reponame,
name=name,
baseurl=baseurl,
enabled=enabled,
gpgcheck=gpgcheck,
_type=_type,
gpgkey=gpgkey,
proxy=proxy,
**kw
)
distro.conn.remote_module.write_yum_repo(
repo_content,
"%s.repo" % reponame
)
repo_path = '/etc/yum.repos.d/{reponame}.repo'.format(reponame=reponame)
# set the right priority
if kw.get('priority'):
if distro.packager.name == 'yum':
distro.packager.install('yum-plugin-priorities')
distro.conn.remote_module.set_repo_priority([reponame], repo_path)
logger.warning('altered {reponame}.repo priorities to contain: priority=1'.format(
reponame=reponame)
)
# Some custom repos do not need to install ceph
if install_ceph and packages:
distro.packager.install(packages)<|fim▁end|> | logger.warning('check_obsoletes has been enabled for Yum priorities plugin') |
<|file_name|>wallet.rs<|end_file_name|><|fim▁begin|>use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
use indy_api_types::wallet::*;
use crate::commands::{Command, CommandExecutor};
use indy_api_types::domain::wallet::{Config, Credentials, ExportConfig, KeyConfig};
use indy_api_types::errors::prelude::*;
use crate::services::crypto::CryptoService;
use indy_wallet::{KeyDerivationData, WalletService, Metadata};
use indy_utils::crypto::{chacha20poly1305_ietf, randombytes};
use indy_utils::crypto::chacha20poly1305_ietf::Key as MasterKey;
use indy_api_types::{WalletHandle, CallbackHandle};
use rust_base58::ToBase58;
type DeriveKeyResult<T> = IndyResult<T>;
pub enum WalletCommand {
RegisterWalletType(String, // type_
WalletCreate, // create
WalletOpen, // open
WalletClose, // close
WalletDelete, // delete
WalletAddRecord, // add record
WalletUpdateRecordValue, // update record value
WalletUpdateRecordTags, // update record value
WalletAddRecordTags, // add record tags
WalletDeleteRecordTags, // delete record tags
WalletDeleteRecord, // delete record
WalletGetRecord, // get record
WalletGetRecordId, // get record id
WalletGetRecordType, // get record id
WalletGetRecordValue, // get record value
WalletGetRecordTags, // get record tags
WalletFreeRecord, // free record
WalletGetStorageMetadata, // get storage metadata
WalletSetStorageMetadata, // set storage metadata
WalletFreeStorageMetadata, // free storage metadata
WalletSearchRecords, // search records
WalletSearchAllRecords, // search all records
WalletGetSearchTotalCount, // get search total count
WalletFetchSearchNextRecord, // fetch search next record
WalletFreeSearch, // free search
Box<dyn Fn(IndyResult<()>) + Send>),
Create(Config, // config
Credentials, // credentials
Box<dyn Fn(IndyResult<()>) + Send>),
CreateContinue(Config, // config
Credentials, // credentials
KeyDerivationData,
DeriveKeyResult<MasterKey>, // derive_key_result
CallbackHandle),
Open(Config, // config
Credentials, // credentials
Box<dyn Fn(IndyResult<WalletHandle>) + Send>),
OpenContinue(WalletHandle,
DeriveKeyResult<(MasterKey, Option<MasterKey>)>, // derive_key_result
),
Close(WalletHandle,
Box<dyn Fn(IndyResult<()>) + Send>),
Delete(Config, // config
Credentials, // credentials
Box<dyn Fn(IndyResult<()>) + Send>),
DeleteContinue(Config, // config
Credentials, // credentials
Metadata, // credentials
DeriveKeyResult<MasterKey>,
CallbackHandle),
Export(WalletHandle,
ExportConfig, // export config
Box<dyn Fn(IndyResult<()>) + Send>),
ExportContinue(WalletHandle,
ExportConfig, // export config
KeyDerivationData,
DeriveKeyResult<MasterKey>,
CallbackHandle),
Import(Config, // config
Credentials, // credentials
ExportConfig, // import config
Box<dyn Fn(IndyResult<()>) + Send>),
ImportContinue(Config, // config
Credentials, // credentials
DeriveKeyResult<(MasterKey, MasterKey)>, // derive_key_result
WalletHandle,
CallbackHandle
),
GenerateKey(Option<KeyConfig>, // config
Box<dyn Fn(IndyResult<String>) + Send>),
DeriveKey(KeyDerivationData,
Box<dyn Fn(DeriveKeyResult<MasterKey>) + Send>),
}
macro_rules! get_cb {
($self_:ident, $e:expr) => (match $self_.pending_callbacks.borrow_mut().remove(&$e) {
Some(val) => val,
None => return error!("No pending command for id: {}", $e)
});
}
pub struct WalletCommandExecutor {
wallet_service: Rc<WalletService>,
crypto_service: Rc<CryptoService>,
open_callbacks: RefCell<HashMap<WalletHandle, Box<dyn Fn(IndyResult<WalletHandle>) + Send>>>,
pending_callbacks: RefCell<HashMap<CallbackHandle, Box<dyn Fn(IndyResult<()>) + Send>>>
}
impl WalletCommandExecutor {
pub fn new(wallet_service: Rc<WalletService>, crypto_service: Rc<CryptoService>) -> WalletCommandExecutor {
WalletCommandExecutor {
wallet_service,
crypto_service,
open_callbacks: RefCell::new(HashMap::new()),
pending_callbacks: RefCell::new(HashMap::new())
}
}
pub fn execute(&self, command: WalletCommand) {
match command {
WalletCommand::RegisterWalletType(type_, create, open, close, delete, add_record,
update_record_value, update_record_tags, add_record_tags,
delete_record_tags, delete_record, get_record, get_record_id, get_record_type,
get_record_value, get_record_tags, free_record, get_storage_metadata, set_storage_metadata,
free_storage_metadata, search_records, search_all_records, get_search_total_count,
fetch_search_next_record, free_search, cb) => {
debug!(target: "wallet_command_executor", "RegisterWalletType command received");
cb(self._register_type(&type_, create, open, close, delete, add_record,
update_record_value, update_record_tags, add_record_tags,
delete_record_tags, delete_record, get_record, get_record_id, get_record_type,
get_record_value, get_record_tags, free_record, get_storage_metadata, set_storage_metadata,
free_storage_metadata, search_records, search_all_records, get_search_total_count,
fetch_search_next_record, free_search));
}
WalletCommand::Create(config, credentials, cb) => {
debug!(target: "wallet_command_executor", "Create command received");
self._create(&config, &credentials, cb)
}
WalletCommand::CreateContinue(config, credentials, key_data, key_result, cb_id) => {
debug!(target: "wallet_command_executor", "CreateContinue command received");
self._create_continue(cb_id, &config, &credentials, key_data, key_result)
}
WalletCommand::Open(config, credentials, cb) => {
debug!(target: "wallet_command_executor", "Open command received");
self._open(&config, &credentials, cb);
}
WalletCommand::OpenContinue(wallet_handle, key_result) => {
debug!(target: "wallet_command_executor", "OpenContinue command received");
self._open_continue(wallet_handle, key_result)
}
WalletCommand::Close(handle, cb) => {
debug!(target: "wallet_command_executor", "Close command received");
cb(self._close(handle));
}
WalletCommand::Delete(config, credentials, cb) => {
debug!(target: "wallet_command_executor", "Delete command received");
self._delete(&config, &credentials, cb)
}
WalletCommand::DeleteContinue(config, credentials, metadata, key_result, cb_id) => {
debug!(target: "wallet_command_executor", "DeleteContinue command received");
self._delete_continue(cb_id, &config, &credentials, &metadata, key_result)
}
WalletCommand::Export(wallet_handle, export_config, cb) => {
debug!(target: "wallet_command_executor", "Export command received");
self._export(wallet_handle, &export_config, cb)
}
WalletCommand::ExportContinue(wallet_handle, export_config, key_data, key_result, cb_id) => {
debug!(target: "wallet_command_executor", "ExportContinue command received");
self._export_continue(cb_id, wallet_handle, &export_config, key_data, key_result)
}
WalletCommand::Import(config, credentials, import_config, cb) => {
debug!(target: "wallet_command_executor", "Import command received");
self._import(&config, &credentials, &import_config, cb);
}
WalletCommand::ImportContinue(config, credential, key_result, wallet_handle, cb_id) => {
debug!(target: "wallet_command_executor", "ImportContinue command received");
self._import_continue(cb_id, wallet_handle, &config, &credential, key_result);
}
WalletCommand::GenerateKey(config, cb) => {
debug!(target: "wallet_command_executor", "DeriveKey command received");
cb(self._generate_key(config.as_ref()));
}
WalletCommand::DeriveKey(key_data, cb) => {
debug!(target: "wallet_command_executor", "DeriveKey command received");
self._derive_key(key_data, cb);
}
};
}
fn _register_type(&self,
type_: &str,
create: WalletCreate,
open: WalletOpen,
close: WalletClose,
delete: WalletDelete,
add_record: WalletAddRecord,
update_record_value: WalletUpdateRecordValue,
update_record_tags: WalletUpdateRecordTags,
add_record_tags: WalletAddRecordTags,
delete_record_tags: WalletDeleteRecordTags,
delete_record: WalletDeleteRecord,
get_record: WalletGetRecord,
get_record_id: WalletGetRecordId,
get_record_type: WalletGetRecordType,
get_record_value: WalletGetRecordValue,
get_record_tags: WalletGetRecordTags,
free_record: WalletFreeRecord,
get_storage_metadata: WalletGetStorageMetadata,
set_storage_metadata: WalletSetStorageMetadata,
free_storage_metadata: WalletFreeStorageMetadata,
search_records: WalletSearchRecords,
search_all_records: WalletSearchAllRecords,
get_search_total_count: WalletGetSearchTotalCount,
fetch_search_next_record: WalletFetchSearchNextRecord,
free_search: WalletFreeSearch) -> IndyResult<()> {
trace!("_register_type >>> type_: {:?}", type_);
self
.wallet_service
.register_wallet_storage(
type_, create, open, close, delete, add_record, update_record_value, update_record_tags,
add_record_tags, delete_record_tags, delete_record, get_record, get_record_id, get_record_type,
get_record_value, get_record_tags, free_record, get_storage_metadata, set_storage_metadata,
free_storage_metadata, search_records, search_all_records,
get_search_total_count, fetch_search_next_record, free_search)?;
trace!("_register_type <<< res: ()");
Ok(())
}
fn _create(&self,
config: &Config,
credentials: &Credentials,
cb: Box<dyn Fn(IndyResult<()>) + Send>) {
trace!("_create >>> config: {:?}, credentials: {:?}", config, secret!(credentials));
let key_data = KeyDerivationData::from_passphrase_with_new_salt(&credentials.key, &credentials.key_derivation_method);
let cb_id : CallbackHandle = indy_utils::sequence::get_next_id();
self.pending_callbacks.borrow_mut().insert(cb_id, cb);
let config = config.clone();
let credentials = credentials.clone();
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
key_data.clone(),
Box::new(move |master_key_res| {
CommandExecutor::instance().send(
Command::Wallet(
WalletCommand::CreateContinue(
config.clone(),
credentials.clone(),
key_data.clone(),
master_key_res,
cb_id
))).unwrap();
}))
)).unwrap();
trace!("_create <<<");
}
fn _create_continue(&self,
cb_id: CallbackHandle,
config: &Config,
credentials: &Credentials,
key_data: KeyDerivationData,
key_result: DeriveKeyResult<MasterKey>) {
let cb = get_cb!(self, cb_id );
cb(key_result
.and_then(|key| self.wallet_service.create_wallet(config, credentials, (&key_data,& key))))
}
fn _open(&self,
config: &Config,
credentials: &Credentials,
cb: Box<dyn Fn(IndyResult<WalletHandle>) + Send>) {
trace!("_open >>> config: {:?}, credentials: {:?}", config, secret!(credentials));
let (wallet_handle, key_derivation_data, rekey_data) = try_cb!(self.wallet_service.open_wallet_prepare(config, credentials), cb);
self.open_callbacks.borrow_mut().insert(wallet_handle, cb);
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
key_derivation_data,
Box::new(move |key_result| {
match (key_result, rekey_data.clone()) {
(Ok(key_result), Some(rekey_data)) => {
WalletCommandExecutor::_derive_rekey_and_continue(wallet_handle, key_result, rekey_data)
}
(key_result, _) => {
let key_result = key_result.map(|kr| (kr, None));
WalletCommandExecutor::_send_open_continue(wallet_handle, key_result)<|fim▁hole|> }
}),
))
).unwrap();
let res = wallet_handle;
trace!("_open <<< res: {:?}", res);
}
fn _derive_rekey_and_continue(wallet_handle: WalletHandle, key_result: MasterKey, rekey_data: KeyDerivationData) {
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
rekey_data,
Box::new(move |rekey_result| {
let key_result = key_result.clone();
let key_result = rekey_result.map(move |rekey_result| (key_result, Some(rekey_result)));
WalletCommandExecutor::_send_open_continue(wallet_handle, key_result)
}),
))
).unwrap();
}
fn _send_open_continue(wallet_handle: WalletHandle, key_result: DeriveKeyResult<(MasterKey, Option<MasterKey>)>) {
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::OpenContinue(
wallet_handle,
key_result,
))
).unwrap();
}
fn _open_continue(&self,
wallet_handle: WalletHandle,
key_result: DeriveKeyResult<(MasterKey, Option<MasterKey>)>) {
let cb = self.open_callbacks.borrow_mut().remove(&wallet_handle).unwrap();
cb(key_result
.and_then(|(key, rekey)| self.wallet_service.open_wallet_continue(wallet_handle, (&key, rekey.as_ref()))))
}
fn _close(&self,
wallet_handle: WalletHandle) -> IndyResult<()> {
trace!("_close >>> handle: {:?}", wallet_handle);
self.wallet_service.close_wallet(wallet_handle)?;
trace!("_close <<< res: ()");
Ok(())
}
fn _delete(&self,
config: &Config,
credentials: &Credentials,
cb: Box<dyn Fn(IndyResult<()>) + Send>) {
trace!("_delete >>> config: {:?}, credentials: {:?}", config, secret!(credentials));
let (metadata, key_derivation_data) = try_cb!(self.wallet_service.delete_wallet_prepare(&config, &credentials), cb);
let cb_id: CallbackHandle = indy_utils::sequence::get_next_id();
self.pending_callbacks.borrow_mut().insert(cb_id, cb);
let config = config.clone();
let credentials = credentials.clone();
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
key_derivation_data,
Box::new(move |key_result| {
let key_result = key_result.clone();
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeleteContinue(
config.clone(),
credentials.clone(),
metadata.clone(),
key_result,
cb_id)
)).unwrap()
}),
))
).unwrap();
trace!("_delete <<<");
}
fn _delete_continue(&self,
cb_id: CallbackHandle,
config: &Config,
credentials: &Credentials,
metadata: &Metadata,
key_result: DeriveKeyResult<MasterKey>) {
let cb = get_cb!(self, cb_id);
cb(key_result
.and_then(|key| self.wallet_service.delete_wallet_continue(config, credentials, metadata, &key)))
}
fn _export(&self,
wallet_handle: WalletHandle,
export_config: &ExportConfig,
cb: Box<dyn Fn(IndyResult<()>) + Send>) {
trace!("_export >>> handle: {:?}, export_config: {:?}", wallet_handle, secret!(export_config));
let key_data = KeyDerivationData::from_passphrase_with_new_salt(&export_config.key, &export_config.key_derivation_method);
let cb_id = indy_utils::sequence::get_next_id();
self.pending_callbacks.borrow_mut().insert(cb_id, cb);
let export_config = export_config.clone();
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
key_data.clone(),
Box::new(move |master_key_res| {
CommandExecutor::instance().send(Command::Wallet(WalletCommand::ExportContinue(
wallet_handle,
export_config.clone(),
key_data.clone(),
master_key_res,
cb_id,
))).unwrap();
})
))
).unwrap();
trace!("_export <<<");
}
fn _export_continue(&self,
cb_id: CallbackHandle,
wallet_handle: WalletHandle,
export_config: &ExportConfig,
key_data: KeyDerivationData,
key_result: DeriveKeyResult<MasterKey>) {
let cb = get_cb!(self, cb_id);
cb(key_result
.and_then(|key| self.wallet_service.export_wallet(wallet_handle, export_config, 0, (&key_data,& key)))) // TODO - later add proper versioning
}
fn _import(&self,
config: &Config,
credentials: &Credentials,
import_config: &ExportConfig,
cb: Box<dyn Fn(IndyResult<()>) + Send>) {
trace!("_import >>> config: {:?}, credentials: {:?}, import_config: {:?}",
config, secret!(credentials), secret!(import_config));
let (wallet_handle, key_data, import_key_data) = try_cb!(self.wallet_service.import_wallet_prepare(&config, &credentials, &import_config), cb);
let cb_id : CallbackHandle = indy_utils::sequence::get_next_id();
self.pending_callbacks.borrow_mut().insert(cb_id, cb);
let config = config.clone();
let credentials = credentials.clone();
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
import_key_data,
Box::new(move |import_key_result| {
let config = config.clone();
let credentials = credentials.clone();
CommandExecutor::instance().send(
Command::Wallet(WalletCommand::DeriveKey(
key_data.clone(),
Box::new(move |key_result| {
let import_key_result = import_key_result.clone();
CommandExecutor::instance().send(Command::Wallet(WalletCommand::ImportContinue(
config.clone(),
credentials.clone(),
import_key_result.and_then(|import_key| key_result.map(|key| (import_key, key))),
wallet_handle,
cb_id
))).unwrap();
}),
))
).unwrap();
}),
))
).unwrap();
trace!("_import <<<");
}
fn _import_continue(&self,
cb_id: CallbackHandle,
wallet_handle: WalletHandle,
config: &Config,
credential: &Credentials,
key_result: DeriveKeyResult<(MasterKey, MasterKey)>) {
let cb = get_cb!(self, cb_id);
cb(key_result
.and_then(|key| self.wallet_service.import_wallet_continue(wallet_handle, &config, &credential, key)))
}
fn _generate_key(&self,
config: Option<&KeyConfig>) -> IndyResult<String> {
trace!("_generate_key >>>config: {:?}", secret!(config));
let seed = config.and_then(|config| config.seed.as_ref().map(String::as_str));
let key = match self.crypto_service.convert_seed(seed)? {
Some(seed) => randombytes::randombytes_deterministic(chacha20poly1305_ietf::KEYBYTES, &randombytes::Seed::from_slice(&seed[..])?),
None => randombytes::randombytes(chacha20poly1305_ietf::KEYBYTES)
};
let res = key[..].to_base58();
trace!("_generate_key <<< res: {:?}", res);
Ok(res)
}
fn _derive_key(&self, key_data: KeyDerivationData, cb: Box<dyn Fn(DeriveKeyResult<MasterKey>) + Send>){
crate::commands::THREADPOOL.lock().unwrap().execute(move || cb(key_data.calc_master_key()));
}
}<|fim▁end|> | } |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Support code for rustc's built in unit-test and micro-benchmarking
//! framework.
//!
//! Almost all user code will only be interested in `Bencher` and
//! `black_box`. All other interactions (such as writing tests and
//! benchmarks themselves) should be done via the `#[test]` and
//! `#[bench]` attributes.
//!
//! See the [Testing Chapter](../book/ch11-00-testing.html) of the book for more details.
// Currently, not much of this is meant for users. It is intended to
// support the simplest interface possible for representing and
// running tests while providing a base that other test frameworks may
// build off of.
// N.B., this is also specified in this crate's Cargo.toml, but librustc_ast contains logic specific to
// this crate, which relies on this attribute (rather than the value of `--crate-name` passed by
// cargo) to detect this crate.
#![crate_name = "test"]
#![unstable(feature = "test", issue = "50297")]
#![doc(test(attr(deny(warnings))))]
#![feature(libc)]
#![feature(rustc_private)]
#![feature(nll)]
#![feature(available_concurrency)]
#![feature(bench_black_box)]
#![feature(internal_output_capture)]
#![feature(panic_unwind)]
#![feature(staged_api)]
#![feature(termination_trait_lib)]
#![feature(test)]
#![feature(total_cmp)]
// Public reexports
pub use self::bench::{black_box, Bencher};
pub use self::console::run_tests_console;
pub use self::options::{ColorConfig, Options, OutputFormat, RunIgnored, ShouldPanic};
pub use self::types::TestName::*;
pub use self::types::*;
pub use self::ColorConfig::*;
pub use cli::TestOpts;
// Module to be used by rustc to compile tests in libtest
pub mod test {
pub use crate::{
assert_test_result,
bench::Bencher,
cli::{parse_opts, TestOpts},
filter_tests,
helpers::metrics::{Metric, MetricMap},
options::{Concurrent, Options, RunIgnored, RunStrategy, ShouldPanic},
run_test, test_main, test_main_static,
test_result::{TestResult, TrFailed, TrFailedMsg, TrIgnored, TrOk},
time::{TestExecTime, TestTimeOptions},
types::{
DynTestFn, DynTestName, StaticBenchFn, StaticTestFn, StaticTestName, TestDesc,
TestDescAndFn, TestId, TestName, TestType,
},
};
}
use std::{
collections::VecDeque,
env, io,
io::prelude::Write,
panic::{self, catch_unwind, AssertUnwindSafe, PanicInfo},
process::{self, Command, Termination},
sync::mpsc::{channel, Sender},
sync::{Arc, Mutex},
thread,
time::{Duration, Instant},
};
pub mod bench;
mod cli;
mod console;
mod event;
mod formatters;
mod helpers;
mod options;
pub mod stats;
mod term;
mod test_result;
mod time;
mod types;
#[cfg(test)]
mod tests;
use event::{CompletedTest, TestEvent};
use helpers::concurrency::get_concurrency;
use helpers::exit_code::get_exit_code;
use options::{Concurrent, RunStrategy};
use test_result::*;
use time::TestExecTime;
// Process exit code to be used to indicate test failures.
const ERROR_EXIT_CODE: i32 = 101;
const SECONDARY_TEST_INVOKER_VAR: &str = "__RUST_TEST_INVOKE";
// The default console test runner. It accepts the command line
// arguments and a vector of test_descs.
pub fn test_main(args: &[String], tests: Vec<TestDescAndFn>, options: Option<Options>) {
let mut opts = match cli::parse_opts(args) {
Some(Ok(o)) => o,
Some(Err(msg)) => {
eprintln!("error: {}", msg);
process::exit(ERROR_EXIT_CODE);
}
None => return,
};
if let Some(options) = options {
opts.options = options;
}
if opts.list {
if let Err(e) = console::list_tests_console(&opts, tests) {
eprintln!("error: io error when listing tests: {:?}", e);
process::exit(ERROR_EXIT_CODE);
}
} else {
match console::run_tests_console(&opts, tests) {
Ok(true) => {}
Ok(false) => process::exit(ERROR_EXIT_CODE),
Err(e) => {
eprintln!("error: io error when listing tests: {:?}", e);
process::exit(ERROR_EXIT_CODE);
}
}
}
}
/// A variant optimized for invocation with a static test vector.
/// This will panic (intentionally) when fed any dynamic tests.
///
/// This is the entry point for the main function generated by `rustc --test`
/// when panic=unwind.
pub fn test_main_static(tests: &[&TestDescAndFn]) {
let args = env::args().collect::<Vec<_>>();
let owned_tests: Vec<_> = tests.iter().map(make_owned_test).collect();
test_main(&args, owned_tests, None)
}
/// A variant optimized for invocation with a static test vector.
/// This will panic (intentionally) when fed any dynamic tests.
///
/// Runs tests in panic=abort mode, which involves spawning subprocesses for
/// tests.
///
/// This is the entry point for the main function generated by `rustc --test`
/// when panic=abort.
pub fn test_main_static_abort(tests: &[&TestDescAndFn]) {
// If we're being run in SpawnedSecondary mode, run the test here. run_test
// will then exit the process.
if let Ok(name) = env::var(SECONDARY_TEST_INVOKER_VAR) {
env::remove_var(SECONDARY_TEST_INVOKER_VAR);
let test = tests
.iter()
.filter(|test| test.desc.name.as_slice() == name)
.map(make_owned_test)
.next()
.unwrap_or_else(|| panic!("couldn't find a test with the provided name '{}'", name));
let TestDescAndFn { desc, testfn } = test;
let testfn = match testfn {
StaticTestFn(f) => f,
_ => panic!("only static tests are supported"),
};
run_test_in_spawned_subprocess(desc, Box::new(testfn));
}
let args = env::args().collect::<Vec<_>>();
let owned_tests: Vec<_> = tests.iter().map(make_owned_test).collect();
test_main(&args, owned_tests, Some(Options::new().panic_abort(true)))
}
/// Clones static values for putting into a dynamic vector, which test_main()
/// needs to hand out ownership of tests to parallel test runners.
///
/// This will panic when fed any dynamic tests, because they cannot be cloned.
fn make_owned_test(test: &&TestDescAndFn) -> TestDescAndFn {
match test.testfn {
StaticTestFn(f) => TestDescAndFn { testfn: StaticTestFn(f), desc: test.desc.clone() },
StaticBenchFn(f) => TestDescAndFn { testfn: StaticBenchFn(f), desc: test.desc.clone() },
_ => panic!("non-static tests passed to test::test_main_static"),
}
}
/// Invoked when unit tests terminate. Should panic if the unit
/// Tests is considered a failure. By default, invokes `report()`
/// and checks for a `0` result.
pub fn assert_test_result<T: Termination>(result: T) {
let code = result.report();
assert_eq!(
code, 0,
"the test returned a termination value with a non-zero status code ({}) \
which indicates a failure",
code
);
}
pub fn run_tests<F>(
opts: &TestOpts,
tests: Vec<TestDescAndFn>,
mut notify_about_test_event: F,
) -> io::Result<()>
where
F: FnMut(TestEvent) -> io::Result<()>,
{
use std::collections::{self, HashMap};
use std::hash::BuildHasherDefault;
use std::sync::mpsc::RecvTimeoutError;
struct RunningTest {
join_handle: Option<thread::JoinHandle<()>>,
}
// Use a deterministic hasher
type TestMap =
HashMap<TestId, RunningTest, BuildHasherDefault<collections::hash_map::DefaultHasher>>;
struct TimeoutEntry {
id: TestId,
desc: TestDesc,
timeout: Instant,
}
let tests_len = tests.len();
let mut filtered_tests = filter_tests(opts, tests);
if !opts.bench_benchmarks {
filtered_tests = convert_benchmarks_to_tests(filtered_tests);
}
let filtered_tests = {
let mut filtered_tests = filtered_tests;
for test in filtered_tests.iter_mut() {
test.desc.name = test.desc.name.with_padding(test.testfn.padding());
}
filtered_tests
};
let filtered_out = tests_len - filtered_tests.len();
let event = TestEvent::TeFilteredOut(filtered_out);
notify_about_test_event(event)?;
let filtered_descs = filtered_tests.iter().map(|t| t.desc.clone()).collect();
let event = TestEvent::TeFiltered(filtered_descs);
notify_about_test_event(event)?;
let (filtered_tests, filtered_benchs): (Vec<_>, _) = filtered_tests
.into_iter()
.enumerate()
.map(|(i, e)| (TestId(i), e))
.partition(|(_, e)| matches!(e.testfn, StaticTestFn(_) | DynTestFn(_)));
let concurrency = opts.test_threads.unwrap_or_else(get_concurrency);
let mut remaining = filtered_tests;
remaining.reverse();
let mut pending = 0;
let (tx, rx) = channel::<CompletedTest>();
let run_strategy = if opts.options.panic_abort && !opts.force_run_in_process {
RunStrategy::SpawnPrimary
} else {
RunStrategy::InProcess
};
let mut running_tests: TestMap = HashMap::default();
let mut timeout_queue: VecDeque<TimeoutEntry> = VecDeque::new();
fn get_timed_out_tests(
running_tests: &TestMap,
timeout_queue: &mut VecDeque<TimeoutEntry>,
) -> Vec<TestDesc> {
let now = Instant::now();
let mut timed_out = Vec::new();
while let Some(timeout_entry) = timeout_queue.front() {
if now < timeout_entry.timeout {
break;
}
let timeout_entry = timeout_queue.pop_front().unwrap();
if running_tests.contains_key(&timeout_entry.id) {
timed_out.push(timeout_entry.desc);
}
}
timed_out
}
fn calc_timeout(timeout_queue: &VecDeque<TimeoutEntry>) -> Option<Duration> {
timeout_queue.front().map(|&TimeoutEntry { timeout: next_timeout, .. }| {
let now = Instant::now();
if next_timeout >= now { next_timeout - now } else { Duration::new(0, 0) }
})
}
if concurrency == 1 {
while !remaining.is_empty() {
let (id, test) = remaining.pop().unwrap();
let event = TestEvent::TeWait(test.desc.clone());
notify_about_test_event(event)?;
let join_handle =
run_test(opts, !opts.run_tests, id, test, run_strategy, tx.clone(), Concurrent::No);
assert!(join_handle.is_none());
let completed_test = rx.recv().unwrap();
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
}
} else {
while pending > 0 || !remaining.is_empty() {
while pending < concurrency && !remaining.is_empty() {
let (id, test) = remaining.pop().unwrap();
let timeout = time::get_default_test_timeout();
let desc = test.desc.clone();
let event = TestEvent::TeWait(desc.clone());
notify_about_test_event(event)?; //here no pad
let join_handle = run_test(
opts,
!opts.run_tests,
id,
test,
run_strategy,
tx.clone(),
Concurrent::Yes,
);
running_tests.insert(id, RunningTest { join_handle });
timeout_queue.push_back(TimeoutEntry { id, desc, timeout });
pending += 1;
}
let mut res;
loop {
if let Some(timeout) = calc_timeout(&timeout_queue) {
res = rx.recv_timeout(timeout);
for test in get_timed_out_tests(&running_tests, &mut timeout_queue) {
let event = TestEvent::TeTimeout(test);
notify_about_test_event(event)?;
}
match res {
Err(RecvTimeoutError::Timeout) => {
// Result is not yet ready, continue waiting.
}
_ => {
// We've got a result, stop the loop.
break;
}
}
} else {
res = rx.recv().map_err(|_| RecvTimeoutError::Disconnected);
break;
}
}
let mut completed_test = res.unwrap();
let running_test = running_tests.remove(&completed_test.id).unwrap();
if let Some(join_handle) = running_test.join_handle {
if let Err(_) = join_handle.join() {
if let TrOk = completed_test.result {
completed_test.result =
TrFailedMsg("panicked after reporting success".to_string());
}
}
}
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
pending -= 1;
}
}
if opts.bench_benchmarks {
// All benchmarks run at the end, in serial.
for (id, b) in filtered_benchs {
let event = TestEvent::TeWait(b.desc.clone());
notify_about_test_event(event)?;
run_test(opts, false, id, b, run_strategy, tx.clone(), Concurrent::No);
let completed_test = rx.recv().unwrap();
let event = TestEvent::TeResult(completed_test);
notify_about_test_event(event)?;
}
}
Ok(())
}
pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
let mut filtered = tests;
let matches_filter = |test: &TestDescAndFn, filter: &str| {
let test_name = test.desc.name.as_slice();
match opts.filter_exact {
true => test_name == filter,
false => test_name.contains(filter),
}
};
// Remove tests that don't match the test filter
if !opts.filters.is_empty() {
filtered.retain(|test| opts.filters.iter().any(|filter| matches_filter(test, filter)));
}
// Skip tests that match any of the skip filters
filtered.retain(|test| !opts.skip.iter().any(|sf| matches_filter(test, sf)));
// Excludes #[should_panic] tests
if opts.exclude_should_panic {
filtered.retain(|test| test.desc.should_panic == ShouldPanic::No);
}
// maybe unignore tests
match opts.run_ignored {
RunIgnored::Yes => {
filtered.iter_mut().for_each(|test| test.desc.ignore = false);
}
RunIgnored::Only => {
filtered.retain(|test| test.desc.ignore);
filtered.iter_mut().for_each(|test| test.desc.ignore = false);
}
RunIgnored::No => {}
}
// Sort the tests alphabetically
filtered.sort_by(|t1, t2| t1.desc.name.as_slice().cmp(t2.desc.name.as_slice()));
filtered
}
pub fn convert_benchmarks_to_tests(tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
// convert benchmarks to tests, if we're not benchmarking them
tests
.into_iter()
.map(|x| {
let testfn = match x.testfn {
DynBenchFn(bench) => DynTestFn(Box::new(move || {
bench::run_once(|b| __rust_begin_short_backtrace(|| bench.run(b)))
})),
StaticBenchFn(benchfn) => DynTestFn(Box::new(move || {
bench::run_once(|b| __rust_begin_short_backtrace(|| benchfn(b)))
})),
f => f,
};
TestDescAndFn { desc: x.desc, testfn }
})
.collect()
}
pub fn run_test(
opts: &TestOpts,
force_ignore: bool,
id: TestId,
test: TestDescAndFn,
strategy: RunStrategy,
monitor_ch: Sender<CompletedTest>,
concurrency: Concurrent,
) -> Option<thread::JoinHandle<()>> {
let TestDescAndFn { desc, testfn } = test;
// Emscripten can catch panics but other wasm targets cannot
let ignore_because_no_process_support = desc.should_panic != ShouldPanic::No
&& cfg!(target_arch = "wasm32")
&& !cfg!(target_os = "emscripten");
if force_ignore || desc.ignore || ignore_because_no_process_support {
let message = CompletedTest::new(id, desc, TrIgnored, None, Vec::new());
monitor_ch.send(message).unwrap();
return None;
}
struct TestRunOpts {
pub strategy: RunStrategy,
pub nocapture: bool,
pub concurrency: Concurrent,
pub time: Option<time::TestTimeOptions>,
}
fn run_test_inner(
id: TestId,
desc: TestDesc,
monitor_ch: Sender<CompletedTest>,
testfn: Box<dyn FnOnce() + Send>,
opts: TestRunOpts,
) -> Option<thread::JoinHandle<()>> {
let concurrency = opts.concurrency;
let name = desc.name.clone();
let runtest = move || match opts.strategy {
RunStrategy::InProcess => run_test_in_process(
id,
desc,
opts.nocapture,
opts.time.is_some(),
testfn,
monitor_ch,
opts.time,
),
RunStrategy::SpawnPrimary => spawn_test_subprocess(
id,
desc,
opts.nocapture,
opts.time.is_some(),
monitor_ch,
opts.time,
),
};
// If the platform is single-threaded we're just going to run
// the test synchronously, regardless of the concurrency
// level.
let supports_threads = !cfg!(target_os = "emscripten") && !cfg!(target_arch = "wasm32");
if concurrency == Concurrent::Yes && supports_threads {
let cfg = thread::Builder::new().name(name.as_slice().to_owned());
let mut runtest = Arc::new(Mutex::new(Some(runtest)));
let runtest2 = runtest.clone();
match cfg.spawn(move || runtest2.lock().unwrap().take().unwrap()()) {
Ok(handle) => Some(handle),
Err(e) if e.kind() == io::ErrorKind::WouldBlock => {
// `ErrorKind::WouldBlock` means hitting the thread limit on some
// platforms, so run the test synchronously here instead.
Arc::get_mut(&mut runtest).unwrap().get_mut().unwrap().take().unwrap()();
None
}
Err(e) => panic!("failed to spawn thread to run test: {}", e),
}
} else {
runtest();
None
}
}
let test_run_opts =
TestRunOpts { strategy, nocapture: opts.nocapture, concurrency, time: opts.time_options };
match testfn {
DynBenchFn(bencher) => {
// Benchmarks aren't expected to panic, so we run them all in-process.
crate::bench::benchmark(id, desc, monitor_ch, opts.nocapture, |harness| {
bencher.run(harness)
});
None
}
StaticBenchFn(benchfn) => {
// Benchmarks aren't expected to panic, so we run them all in-process.
crate::bench::benchmark(id, desc, monitor_ch, opts.nocapture, benchfn);
None
}
DynTestFn(f) => {
match strategy {
RunStrategy::InProcess => (),
_ => panic!("Cannot run dynamic test fn out-of-process"),
};
run_test_inner(
id,
desc,
monitor_ch,
Box::new(move || __rust_begin_short_backtrace(f)),
test_run_opts,
)
}
StaticTestFn(f) => run_test_inner(
id,
desc,
monitor_ch,
Box::new(move || __rust_begin_short_backtrace(f)),
test_run_opts,
),
}
}
/// Fixed frame used to clean the backtrace with `RUST_BACKTRACE=1`.
#[inline(never)]
fn __rust_begin_short_backtrace<F: FnOnce()>(f: F) {
f();
// prevent this frame from being tail-call optimised away
black_box(());
}
fn run_test_in_process(
id: TestId,
desc: TestDesc,
nocapture: bool,
report_time: bool,
testfn: Box<dyn FnOnce() + Send>,
monitor_ch: Sender<CompletedTest>,
time_opts: Option<time::TestTimeOptions>,
) {
// Buffer for capturing standard I/O
let data = Arc::new(Mutex::new(Vec::new()));
if !nocapture {
io::set_output_capture(Some(data.clone()));
}
let start = report_time.then(Instant::now);
let result = catch_unwind(AssertUnwindSafe(testfn));
let exec_time = start.map(|start| {
let duration = start.elapsed();
TestExecTime(duration)
});
io::set_output_capture(None);
let test_result = match result {
Ok(()) => calc_result(&desc, Ok(()), &time_opts, &exec_time),
Err(e) => calc_result(&desc, Err(e.as_ref()), &time_opts, &exec_time),
};
let stdout = data.lock().unwrap_or_else(|e| e.into_inner()).to_vec();
let message = CompletedTest::new(id, desc, test_result, exec_time, stdout);
monitor_ch.send(message).unwrap();
}
fn spawn_test_subprocess(
id: TestId,
desc: TestDesc,
nocapture: bool,
report_time: bool,
monitor_ch: Sender<CompletedTest>,
time_opts: Option<time::TestTimeOptions>,
) {
let (result, test_output, exec_time) = (|| {
let args = env::args().collect::<Vec<_>>();
let current_exe = &args[0];
let mut command = Command::new(current_exe);
command.env(SECONDARY_TEST_INVOKER_VAR, desc.name.as_slice());
if nocapture {
command.stdout(process::Stdio::inherit());
command.stderr(process::Stdio::inherit());
}
let start = report_time.then(Instant::now);
let output = match command.output() {
Ok(out) => out,
Err(e) => {
let err = format!("Failed to spawn {} as child for test: {:?}", args[0], e);
return (TrFailed, err.into_bytes(), None);
}
};
let exec_time = start.map(|start| {
let duration = start.elapsed();
TestExecTime(duration)
});
let std::process::Output { stdout, stderr, status } = output;
let mut test_output = stdout;
formatters::write_stderr_delimiter(&mut test_output, &desc.name);<|fim▁hole|> Ok(get_result_from_exit_code(&desc, exit_code, &time_opts, &exec_time))
})() {
Ok(r) => r,
Err(e) => {
write!(&mut test_output, "Unexpected error: {}", e).unwrap();
TrFailed
}
};
(result, test_output, exec_time)
})();
let message = CompletedTest::new(id, desc, result, exec_time, test_output);
monitor_ch.send(message).unwrap();
}
fn run_test_in_spawned_subprocess(desc: TestDesc, testfn: Box<dyn FnOnce() + Send>) -> ! {
let builtin_panic_hook = panic::take_hook();
let record_result = Arc::new(move |panic_info: Option<&'_ PanicInfo<'_>>| {
let test_result = match panic_info {
Some(info) => calc_result(&desc, Err(info.payload()), &None, &None),
None => calc_result(&desc, Ok(()), &None, &None),
};
// We don't support serializing TrFailedMsg, so just
// print the message out to stderr.
if let TrFailedMsg(msg) = &test_result {
eprintln!("{}", msg);
}
if let Some(info) = panic_info {
builtin_panic_hook(info);
}
if let TrOk = test_result {
process::exit(test_result::TR_OK);
} else {
process::exit(test_result::TR_FAILED);
}
});
let record_result2 = record_result.clone();
panic::set_hook(Box::new(move |info| record_result2(Some(&info))));
testfn();
record_result(None);
unreachable!("panic=abort callback should have exited the process")
}<|fim▁end|> | test_output.extend_from_slice(&stderr);
let result = match (|| -> Result<TestResult, String> {
let exit_code = get_exit_code(status)?; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.