prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>scale.js<|end_file_name|><|fim▁begin|>/**
* Scale Interpolation Function.<|fim▁hole|> * @param {number} b end scale
* @param {number} v progress
* @returns {string} the interpolated scale
*/
export default function scale(a, b, v) {
// eslint-disable-next-line no-bitwise
return `scale(${((a + (b - a) * v) * 1000 >> 0) / 1000})`;
}<|fim▁end|> | *
* @param {number} a start scale |
<|file_name|>PrivateCleverbotCommand.java<|end_file_name|><|fim▁begin|>package com.gmail.hexragon.gn4rBot.command.ai;
import com.gmail.hexragon.gn4rBot.managers.commands.CommandExecutor;
import com.gmail.hexragon.gn4rBot.managers.commands.annotations.Command;
import com.gmail.hexragon.gn4rBot.util.GnarMessage;
import com.google.code.chatterbotapi.ChatterBot;
import com.google.code.chatterbotapi.ChatterBotFactory;<|fim▁hole|>import com.google.code.chatterbotapi.ChatterBotSession;
import com.google.code.chatterbotapi.ChatterBotType;
import net.dv8tion.jda.entities.User;
import org.apache.commons.lang3.StringUtils;
import java.util.Map;
import java.util.WeakHashMap;
@Command(
aliases = {"cbot", "cleverbot"},
usage = "(query)",
description = "Talk to Clever-Bot."
)
public class PrivateCleverbotCommand extends CommandExecutor
{
private ChatterBotFactory factory = new ChatterBotFactory();
private ChatterBotSession session = null;
private Map<User, ChatterBotSession> sessionMap = new WeakHashMap<>();
@Override
public void execute(GnarMessage message, String[] args)
{
try
{
if (!sessionMap.containsKey(message.getAuthor()))
{
ChatterBot bot = factory.create(ChatterBotType.CLEVERBOT);
sessionMap.put(message.getAuthor(), bot.createSession());
}
message.replyRaw(sessionMap.get(message.getAuthor()).think(StringUtils.join(args, " ")));
}
catch (Exception e)
{
message.reply("Chat Bot encountered an exception. Restarting. `:[`");
sessionMap.remove(message.getAuthor());
}
}
}<|fim▁end|> | |
<|file_name|>atan.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np # type: ignore
import onnx
from ..base import Base
from . import expect
<|fim▁hole|> def export(): # type: () -> None
node = onnx.helper.make_node(
'Atan',
inputs=['x'],
outputs=['y'],
)
x = np.array([-1, 0, 1]).astype(np.float32)
y = np.arctan(x)
expect(node, inputs=[x], outputs=[y],
name='test_atan_example')
x = np.random.randn(3, 4, 5).astype(np.float32)
y = np.arctan(x)
expect(node, inputs=[x], outputs=[y],
name='test_atan')<|fim▁end|> |
class Atan(Base):
@staticmethod |
<|file_name|>default.py<|end_file_name|><|fim▁begin|># (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type<|fim▁hole|> callback: default
type: stdout
short_description: default Ansible screen output
version_added: historical
description:
- This is the default output callback for ansible-playbook.
extends_documentation_fragment:
- default_callback
requirements:
- set as stdout in configuration
'''
from ansible import constants as C
from ansible.playbook.task_include import TaskInclude
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
class CallbackModule(CallbackBase):
'''
This is the default callback interface, which simply prints messages
to stdout when new callback events are received.
'''
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'default'
def __init__(self):
self._play = None
self._last_task_banner = None
super(CallbackModule, self).__init__()
def v2_runner_on_failed(self, result, ignore_errors=False):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
self._handle_exception(result._result)
self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if delegated_vars:
self._display.display("fatal: [%s -> %s]: FAILED! => %s" % (result._host.get_name(), delegated_vars['ansible_host'],
self._dump_results(result._result)), color=C.COLOR_ERROR)
else:
self._display.display("fatal: [%s]: FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_ERROR)
if ignore_errors:
self._display.display("...ignoring", color=C.COLOR_SKIP)
def v2_runner_on_ok(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if isinstance(result._task, TaskInclude):
return
elif result._result.get('changed', False):
if delegated_vars:
msg = "changed: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "changed: [%s]" % result._host.get_name()
color = C.COLOR_CHANGED
else:
if delegated_vars:
msg = "ok: [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg = "ok: [%s]" % result._host.get_name()
color = C.COLOR_OK
self._handle_warnings(result._result)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % (self._dump_results(result._result),)
self._display.display(msg, color=color)
def v2_runner_on_skipped(self, result):
if self._plugin_options.get('show_skipped_hosts', C.DISPLAY_SKIPPED_HOSTS): # fallback on constants for inherited plugins missing docs
self._clean_results(result._result, result._task.action)
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
if result._task.loop and 'results' in result._result:
self._process_items(result)
else:
msg = "skipping: [%s]" % result._host.get_name()
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_runner_on_unreachable(self, result):
if self._play.strategy == 'free' and self._last_task_banner != result._task._uuid:
self._print_task_banner(result._task)
delegated_vars = result._result.get('_ansible_delegated_vars', None)
if delegated_vars:
self._display.display("fatal: [%s -> %s]: UNREACHABLE! => %s" % (result._host.get_name(), delegated_vars['ansible_host'],
self._dump_results(result._result)),
color=C.COLOR_UNREACHABLE)
else:
self._display.display("fatal: [%s]: UNREACHABLE! => %s" % (result._host.get_name(), self._dump_results(result._result)), color=C.COLOR_UNREACHABLE)
def v2_playbook_on_no_hosts_matched(self):
self._display.display("skipping: no hosts matched", color=C.COLOR_SKIP)
def v2_playbook_on_no_hosts_remaining(self):
self._display.banner("NO MORE HOSTS LEFT")
def v2_playbook_on_task_start(self, task, is_conditional):
if self._play.strategy != 'free':
self._print_task_banner(task)
def _print_task_banner(self, task):
# args can be specified as no_log in several places: in the task or in
# the argument spec. We can check whether the task is no_log but the
# argument spec can't be because that is only run on the target
# machine and we haven't run it thereyet at this time.
#
# So we give people a config option to affect display of the args so
# that they can secure this if they feel that their stdout is insecure
# (shoulder surfing, logging stdout straight to a file, etc).
args = ''
if not task.no_log and C.DISPLAY_ARGS_TO_STDOUT:
args = u', '.join(u'%s=%s' % a for a in task.args.items())
args = u' %s' % args
self._display.banner(u"TASK [%s%s]" % (task.get_name().strip(), args))
if self._display.verbosity >= 2:
path = task.get_path()
if path:
self._display.display(u"task path: %s" % path, color=C.COLOR_DEBUG)
self._last_task_banner = task._uuid
def v2_playbook_on_cleanup_task_start(self, task):
self._display.banner("CLEANUP TASK [%s]" % task.get_name().strip())
def v2_playbook_on_handler_task_start(self, task):
self._display.banner("RUNNING HANDLER [%s]" % task.get_name().strip())
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if not name:
msg = u"PLAY"
else:
msg = u"PLAY [%s]" % name
self._play = play
self._display.banner(msg)
def v2_on_file_diff(self, result):
if result._task.loop and 'results' in result._result:
for res in result._result['results']:
if 'diff' in res and res['diff'] and res.get('changed', False):
diff = self._get_diff(res['diff'])
if diff:
self._display.display(diff)
elif 'diff' in result._result and result._result['diff'] and result._result.get('changed', False):
diff = self._get_diff(result._result['diff'])
if diff:
self._display.display(diff)
def v2_runner_item_on_ok(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
if isinstance(result._task, TaskInclude):
return
elif result._result.get('changed', False):
msg = 'changed'
color = C.COLOR_CHANGED
else:
msg = 'ok'
color = C.COLOR_OK
if delegated_vars:
msg += ": [%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += ": [%s]" % result._host.get_name()
msg += " => (item=%s)" % (self._get_item(result._result),)
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
def v2_runner_item_on_failed(self, result):
delegated_vars = result._result.get('_ansible_delegated_vars', None)
self._clean_results(result._result, result._task.action)
self._handle_exception(result._result)
msg = "failed: "
if delegated_vars:
msg += "[%s -> %s]" % (result._host.get_name(), delegated_vars['ansible_host'])
else:
msg += "[%s]" % (result._host.get_name())
self._handle_warnings(result._result)
self._display.display(msg + " (item=%s) => %s" % (self._get_item(result._result), self._dump_results(result._result)), color=C.COLOR_ERROR)
def v2_runner_item_on_skipped(self, result):
if self._plugin_options.get('show_skipped_hosts', C.DISPLAY_SKIPPED_HOSTS): # fallback on constants for inherited plugins missing docs
self._clean_results(result._result, result._task.action)
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), self._get_item(result._result))
if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_include(self, included_file):
msg = 'included: %s for %s' % (included_file._filename, ", ".join([h.name for h in included_file._hosts]))
self._display.display(msg, color=C.COLOR_SKIP)
def v2_playbook_on_stats(self, stats):
self._display.banner("PLAY RECAP")
hosts = sorted(stats.processed.keys())
for h in hosts:
t = stats.summarize(h)
self._display.display(u"%s : %s %s %s %s" % (
hostcolor(h, t),
colorize(u'ok', t['ok'], C.COLOR_OK),
colorize(u'changed', t['changed'], C.COLOR_CHANGED),
colorize(u'unreachable', t['unreachable'], C.COLOR_UNREACHABLE),
colorize(u'failed', t['failures'], C.COLOR_ERROR)),
screen_only=True
)
self._display.display(u"%s : %s %s %s %s" % (
hostcolor(h, t, False),
colorize(u'ok', t['ok'], None),
colorize(u'changed', t['changed'], None),
colorize(u'unreachable', t['unreachable'], None),
colorize(u'failed', t['failures'], None)),
log_only=True
)
self._display.display("", screen_only=True)
# print custom stats
if self._plugin_options.get('show_custom_stats', C.SHOW_CUSTOM_STATS) and stats.custom: # fallback on constants for inherited plugins missing docs
self._display.banner("CUSTOM STATS: ")
# per host
# TODO: come up with 'pretty format'
for k in sorted(stats.custom.keys()):
if k == '_run':
continue
self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n', '')))
# print per run custom stats
if '_run' in stats.custom:
self._display.display("", screen_only=True)
self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n', ''))
self._display.display("", screen_only=True)
def v2_playbook_on_start(self, playbook):
if self._display.verbosity > 1:
from os.path import basename
self._display.banner("PLAYBOOK: %s" % basename(playbook._file_name))
if self._display.verbosity > 3:
# show CLI options
if self._options is not None:
for option in dir(self._options):
if option.startswith('_') or option in ['read_file', 'ensure_value', 'read_module']:
continue
val = getattr(self._options, option)
if val:
self._display.vvvv('%s: %s' % (option, val))
def v2_runner_retry(self, result):
task_name = result.task_name or result._task
msg = "FAILED - RETRYING: %s (%d retries left)." % (task_name, result._result['retries'] - result._result['attempts'])
if (self._display.verbosity > 2 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += "Result was: %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_DEBUG)<|fim▁end|> |
DOCUMENTATION = ''' |
<|file_name|>docker_test.go<|end_file_name|><|fim▁begin|>package strategy
import (
"reflect"
"strings"
"testing"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/validation"
kapi "k8s.io/kubernetes/pkg/api"
buildapi "github.com/openshift/origin/pkg/build/api"
_ "github.com/openshift/origin/pkg/build/api/install"
)
func TestDockerCreateBuildPod(t *testing.T) {
strategy := DockerBuildStrategy{
Image: "docker-test-image",
Codec: kapi.Codecs.LegacyCodec(buildapi.LegacySchemeGroupVersion),
}
build := mockDockerBuild()
actual, err := strategy.CreateBuildPod(build)
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if expected, actual := buildapi.GetBuildPodName(build), actual.ObjectMeta.Name; expected != actual {
t.Errorf("Expected %s, but got %s!", expected, actual)
}
if !reflect.DeepEqual(map[string]string{buildapi.BuildLabel: buildapi.LabelValue(build.Name)}, actual.Labels) {
t.Errorf("Pod Labels does not match Build Labels!")
}
if !reflect.DeepEqual(nodeSelector, actual.Spec.NodeSelector) {
t.Errorf("Pod NodeSelector does not match Build NodeSelector. Expected: %v, got: %v", nodeSelector, actual.Spec.NodeSelector)
}
container := actual.Spec.Containers[0]
if container.Name != "docker-build" {
t.Errorf("Expected docker-build, but got %s!", container.Name)
}
if container.Image != strategy.Image {
t.Errorf("Expected %s image, got %s!", container.Image, strategy.Image)
}
if container.ImagePullPolicy != kapi.PullIfNotPresent {
t.Errorf("Expected %v, got %v", kapi.PullIfNotPresent, container.ImagePullPolicy)
}
if actual.Spec.RestartPolicy != kapi.RestartPolicyNever {
t.Errorf("Expected never, got %#v", actual.Spec.RestartPolicy)
}
if len(container.Env) != 10 {
var keys []string
for _, env := range container.Env {
keys = append(keys, env.Name)
}
t.Fatalf("Expected 10 elements in Env table, got %d:\n%s", len(container.Env), strings.Join(keys, ", "))
}
if len(container.VolumeMounts) != 4 {
t.Fatalf("Expected 4 volumes in container, got %d", len(container.VolumeMounts))
}
if *actual.Spec.ActiveDeadlineSeconds != 60 {
t.Errorf("Expected ActiveDeadlineSeconds 60, got %d", *actual.Spec.ActiveDeadlineSeconds)
}<|fim▁hole|> for i, expected := range []string{dockerSocketPath, DockerPushSecretMountPath, DockerPullSecretMountPath, sourceSecretMountPath} {
if container.VolumeMounts[i].MountPath != expected {
t.Fatalf("Expected %s in VolumeMount[%d], got %s", expected, i, container.VolumeMounts[i].MountPath)
}
}
if len(actual.Spec.Volumes) != 4 {
t.Fatalf("Expected 4 volumes in Build pod, got %d", len(actual.Spec.Volumes))
}
if !kapi.Semantic.DeepEqual(container.Resources, build.Spec.Resources) {
t.Fatalf("Expected actual=expected, %v != %v", container.Resources, build.Spec.Resources)
}
found := false
foundIllegal := false
for _, v := range container.Env {
if v.Name == "BUILD_LOGLEVEL" && v.Value == "bar" {
found = true
}
if v.Name == "ILLEGAL" {
foundIllegal = true
}
}
if !found {
t.Fatalf("Expected variable BUILD_LOGLEVEL be defined for the container")
}
if foundIllegal {
t.Fatalf("Found illegal environment variable 'ILLEGAL' defined on container")
}
buildJSON, _ := runtime.Encode(kapi.Codecs.LegacyCodec(buildapi.LegacySchemeGroupVersion), build)
errorCases := map[int][]string{
0: {"BUILD", string(buildJSON)},
}
for index, exp := range errorCases {
if e := container.Env[index]; e.Name != exp[0] || e.Value != exp[1] {
t.Errorf("Expected %s:%s, got %s:%s!\n", exp[0], exp[1], e.Name, e.Value)
}
}
}
func TestDockerBuildLongName(t *testing.T) {
strategy := DockerBuildStrategy{
Image: "docker-test-image",
Codec: kapi.Codecs.LegacyCodec(buildapi.LegacySchemeGroupVersion),
}
build := mockDockerBuild()
build.Name = strings.Repeat("a", validation.DNS1123LabelMaxLength*2)
pod, err := strategy.CreateBuildPod(build)
if err != nil {
t.Fatalf("unexpected: %v", err)
}
if pod.Labels[buildapi.BuildLabel] != build.Name[:validation.DNS1123LabelMaxLength] {
t.Errorf("Unexpected build label value: %s", pod.Labels[buildapi.BuildLabel])
}
}
func mockDockerBuild() *buildapi.Build {
timeout := int64(60)
return &buildapi.Build{
ObjectMeta: metav1.ObjectMeta{
Name: "dockerBuild",
Labels: map[string]string{
"name": "dockerBuild",
},
},
Spec: buildapi.BuildSpec{
CommonSpec: buildapi.CommonSpec{
Revision: &buildapi.SourceRevision{
Git: &buildapi.GitSourceRevision{},
},
Source: buildapi.BuildSource{
Git: &buildapi.GitBuildSource{
URI: "http://my.build.com/the/dockerbuild/Dockerfile",
Ref: "master",
},
ContextDir: "my/test/dir",
SourceSecret: &kapi.LocalObjectReference{Name: "secretFoo"},
},
Strategy: buildapi.BuildStrategy{
DockerStrategy: &buildapi.DockerBuildStrategy{
PullSecret: &kapi.LocalObjectReference{Name: "bar"},
Env: []kapi.EnvVar{
{Name: "ILLEGAL", Value: "foo"},
{Name: "BUILD_LOGLEVEL", Value: "bar"},
},
},
},
Output: buildapi.BuildOutput{
To: &kapi.ObjectReference{
Kind: "DockerImage",
Name: "docker-registry/repository/dockerBuild",
},
PushSecret: &kapi.LocalObjectReference{Name: "foo"},
},
Resources: kapi.ResourceRequirements{
Limits: kapi.ResourceList{
kapi.ResourceName(kapi.ResourceCPU): resource.MustParse("10"),
kapi.ResourceName(kapi.ResourceMemory): resource.MustParse("10G"),
},
},
CompletionDeadlineSeconds: &timeout,
NodeSelector: nodeSelector,
},
},
Status: buildapi.BuildStatus{
Phase: buildapi.BuildPhaseNew,
},
}
}<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
setup(
name="simple-crawler",
version="0.1",
url="https://github.com/shonenada/crawler",
author="shonenada",
author_email="[email protected]",<|fim▁hole|> install_requires=["requests==2.2.1"],
)<|fim▁end|> | description="Simple crawler",
zip_safe=True,
platforms="any",
packages=find_packages(), |
<|file_name|>_express_route_connections_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteConnectionsOperations(object):
"""ExpressRouteConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
express_route_gateway_name, # type: str
connection_name, # type: str
put_express_route_connection_parameters, # type: "_models.ExpressRouteConnection"
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteConnection"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._serialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(put_express_route_connection_parameters, 'ExpressRouteConnection')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}/expressRouteConnections/{connectionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
express_route_gateway_name, # type: str
connection_name, # type: str
put_express_route_connection_parameters, # type: "_models.ExpressRouteConnection"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExpressRouteConnection"]
"""Creates a connection between an ExpressRoute gateway and an ExpressRoute circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_gateway_name: The name of the ExpressRoute gateway.
:type express_route_gateway_name: str
:param connection_name: The name of the connection subresource.
:type connection_name: str
:param put_express_route_connection_parameters: Parameters required in an
ExpressRouteConnection PUT operation.
:type put_express_route_connection_parameters: ~azure.mgmt.network.v2021_02_01.models.ExpressRouteConnection
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExpressRouteConnection or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_02_01.models.ExpressRouteConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
express_route_gateway_name=express_route_gateway_name,
connection_name=connection_name,
put_express_route_connection_parameters=put_express_route_connection_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._serialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}/expressRouteConnections/{connectionName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
express_route_gateway_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteConnection"
"""Gets the specified ExpressRouteConnection.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_gateway_name: The name of the ExpressRoute gateway.
:type express_route_gateway_name: str
:param connection_name: The name of the ExpressRoute connection.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_02_01.models.ExpressRouteConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._serialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}/expressRouteConnections/{connectionName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
express_route_gateway_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._serialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}/expressRouteConnections/{connectionName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
express_route_gateway_name, # type: str
connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes a connection to a ExpressRoute circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_gateway_name: The name of the ExpressRoute gateway.
:type express_route_gateway_name: str<|fim▁hole|> :param connection_name: The name of the connection subresource.
:type connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
express_route_gateway_name=express_route_gateway_name,
connection_name=connection_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._serialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}/expressRouteConnections/{connectionName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
express_route_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ExpressRouteConnectionList"
"""Lists ExpressRouteConnections.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param express_route_gateway_name: The name of the ExpressRoute gateway.
:type express_route_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteConnectionList, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_02_01.models.ExpressRouteConnectionList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteConnectionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'expressRouteGatewayName': self._serialize.url("express_route_gateway_name", express_route_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteConnectionList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteGateways/{expressRouteGatewayName}/expressRouteConnections'} # type: ignore<|fim▁end|> | |
<|file_name|>test_resource_group.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
import six
from heat.common import exception
from heat.common import grouputils
from heat.common import template_format
from heat.engine.resources.openstack.heat import resource_group
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.engine import stack as stackm
from heat.tests import common
from heat.tests import utils
template = {
"heat_template_version": "2013-05-23",
"resources": {
"group1": {
"type": "OS::Heat::ResourceGroup",
"properties": {
"count": 2,
"resource_def": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"Foo": "Bar"
}
}
}
}
}
}
template2 = {
"heat_template_version": "2013-05-23",
"resources": {
"dummy": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"Foo": "baz"
}
},
"group1": {
"type": "OS::Heat::ResourceGroup",
"properties": {
"count": 2,
"resource_def": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"Foo": {"get_attr": ["dummy", "Foo"]}
}
}
}
}
}
}
template_repl = {
"heat_template_version": "2013-05-23",
"resources": {
"group1": {
"type": "OS::Heat::ResourceGroup",
"properties": {
"count": 2,
"resource_def": {
"type": "ResourceWithListProp%index%",
"properties": {
"Foo": "Bar_%index%",
"listprop": [
"%index%_0",
"%index%_1",
"%index%_2"
]
}
}
}
}
}
}
template_attr = {
"heat_template_version": "2014-10-16",
"resources": {
"group1": {
"type": "OS::Heat::ResourceGroup",
"properties": {
"count": 2,
"resource_def": {
"type": "ResourceWithComplexAttributesType",
"properties": {
}
}
}
}
},
"outputs": {
"nested_strings": {
"value": {"get_attr": ["group1", "nested_dict", "string"]}
}
}
}
class ResourceGroupTest(common.HeatTestCase):
def setUp(self):
common.HeatTestCase.setUp(self)
self.m.StubOutWithMock(stackm.Stack, 'validate')
def test_assemble_nested(self):
"""Tests nested stack creation based on props.
Tests that the nested stack that implements the group is created
appropriately based on properties.
"""
stack = utils.parse_stack(template)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
templ = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"Foo": "Bar"
}
},
"1": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"Foo": "Bar"
}
},
"2": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"Foo": "Bar"
}
}
}
}
self.assertEqual(templ, resg._assemble_nested(['0', '1', '2']).t)
def test_assemble_nested_include(self):
templ = copy.deepcopy(template)
res_def = templ["resources"]["group1"]["properties"]['resource_def']
res_def['properties']['Foo'] = None
stack = utils.parse_stack(templ)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {}
}
}
}
self.assertEqual(expect, resg._assemble_nested(['0']).t)
expect['resources']["0"]['properties'] = {"Foo": None}
self.assertEqual(
expect, resg._assemble_nested(['0'], include_all=True).t)
def test_assemble_nested_include_zero(self):
templ = copy.deepcopy(template)
templ['resources']['group1']['properties']['count'] = 0
stack = utils.parse_stack(templ)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
expect = {
"heat_template_version": "2015-04-30",
}
self.assertEqual(expect, resg._assemble_nested([]).t)
def test_assemble_nested_with_metadata(self):
templ = copy.deepcopy(template)
res_def = templ["resources"]["group1"]["properties"]['resource_def']
res_def['properties']['Foo'] = None
res_def['metadata'] = {
'priority': 'low',
'role': 'webserver'
}
stack = utils.parse_stack(templ)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {},
"metadata": {
'priority': 'low',
'role': 'webserver'
}
}
}
}
self.assertEqual(expect, resg._assemble_nested(['0']).t)
def test_assemble_nested_rolling_update(self):
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"foo": "bar"
}
},
"1": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"foo": "baz"
}
}
}
}
resource_def = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
{"foo": "baz"})
stack = utils.parse_stack(template)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
resg._nested = get_fake_nested_stack(['0', '1'])
resg.build_resource_definition = mock.Mock(return_value=resource_def)
self.assertEqual(expect, resg._assemble_for_rolling_update(2, 1).t)
def test_assemble_nested_rolling_update_none(self):
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"foo": "bar"
}
},
"1": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"foo": "bar"
}
}
}
}
resource_def = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
{"foo": "baz"})
stack = utils.parse_stack(template)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
resg._nested = get_fake_nested_stack(['0', '1'])
resg.build_resource_definition = mock.Mock(return_value=resource_def)
self.assertEqual(expect, resg._assemble_for_rolling_update(2, 0).t)
def test_assemble_nested_rolling_update_failed_resource(self):
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"foo": "baz"
}
},
"1": {
"type": "OverwrittenFnGetRefIdType",
"properties": {
"foo": "bar"
}
}
}
}
resource_def = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
{"foo": "baz"})
stack = utils.parse_stack(template)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
resg._nested = get_fake_nested_stack(['0', '1'])
res0 = resg._nested['0']
res0.status = res0.FAILED
resg.build_resource_definition = mock.Mock(return_value=resource_def)
self.assertEqual(expect, resg._assemble_for_rolling_update(2, 1).t)
def test_assemble_nested_missing_param(self):
# Setup
# Change the standard testing template to use a get_param lookup
# within the resource definition
templ = copy.deepcopy(template)
res_def = templ['resources']['group1']['properties']['resource_def']
res_def['properties']['Foo'] = {'get_param': 'bar'}
stack = utils.parse_stack(templ)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
# Test - This should not raise a ValueError about "bar" not being
# provided
nested_tmpl = resg._assemble_nested(['0', '1'])
# Verify
expected = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "OverwrittenFnGetRefIdType",
"properties": {}
},
"1": {
"type": "OverwrittenFnGetRefIdType",
"properties": {}
}
}
}
self.assertEqual(expected, nested_tmpl.t)
def test_index_var(self):
stack = utils.parse_stack(template_repl)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "ResourceWithListProp%index%",
"properties": {
"Foo": "Bar_0",
"listprop": [
"0_0", "0_1", "0_2"
]
}
},
"1": {
"type": "ResourceWithListProp%index%",
"properties": {
"Foo": "Bar_1",
"listprop": [
"1_0", "1_1", "1_2"
]
}
},
"2": {
"type": "ResourceWithListProp%index%",
"properties": {
"Foo": "Bar_2",
"listprop": [
"2_0", "2_1", "2_2"
]
}
}
}
}
nested = resg._assemble_nested(['0', '1', '2']).t
for res in nested['resources']:
res_prop = nested['resources'][res]['properties']
res_prop['listprop'] = list(res_prop['listprop'])
self.assertEqual(expect, nested)
def test_custom_index_var(self):
templ = copy.deepcopy(template_repl)
templ['resources']['group1']['properties']['index_var'] = "__foo__"
stack = utils.parse_stack(templ)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
expect = {
"heat_template_version": "2015-04-30",<|fim▁hole|> "0": {
"type": "ResourceWithListProp%index%",
"properties": {
"Foo": "Bar_%index%",
"listprop": [
"%index%_0", "%index%_1", "%index%_2"
]
}
}
}
}
nested = resg._assemble_nested(['0']).t
res_prop = nested['resources']['0']['properties']
res_prop['listprop'] = list(res_prop['listprop'])
self.assertEqual(expect, nested)
props = copy.deepcopy(templ['resources']['group1']['properties'])
res_def = props['resource_def']
res_def['properties']['Foo'] = "Bar___foo__"
res_def['properties']['listprop'] = ["__foo___0",
"__foo___1",
"__foo___2"]
res_def['type'] = "ResourceWithListProp__foo__"
snip = snip.freeze(properties=props)
resg = resource_group.ResourceGroup('test', snip, stack)
expect = {
"heat_template_version": "2015-04-30",
"resources": {
"0": {
"type": "ResourceWithListProp__foo__",
"properties": {
"Foo": "Bar_0",
"listprop": [
"0_0", "0_1", "0_2"
]
}
}
}
}
nested = resg._assemble_nested(['0']).t
res_prop = nested['resources']['0']['properties']
res_prop['listprop'] = list(res_prop['listprop'])
self.assertEqual(expect, nested)
def test_assemble_no_properties(self):
templ = copy.deepcopy(template)
res_def = templ["resources"]["group1"]["properties"]['resource_def']
del res_def['properties']
stack = utils.parse_stack(templ)
resg = stack.resources['group1']
self.assertIsNone(resg.validate())
def test_invalid_res_type(self):
"""Test that error raised for unknown resource type."""
tmp = copy.deepcopy(template)
grp_props = tmp['resources']['group1']['properties']
grp_props['resource_def']['type'] = "idontexist"
stack = utils.parse_stack(tmp)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
exc = self.assertRaises(exception.StackValidationFailed,
resg.validate)
exp_msg = 'The Resource Type (idontexist) could not be found.'
self.assertIn(exp_msg, six.text_type(exc))
def test_reference_attr(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
self.assertIsNone(resgrp.validate())
def test_invalid_removal_policies_nolist(self):
"""Test that error raised for malformed removal_policies."""
tmp = copy.deepcopy(template)
grp_props = tmp['resources']['group1']['properties']
grp_props['removal_policies'] = 'notallowed'
stack = utils.parse_stack(tmp)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
exc = self.assertRaises(exception.StackValidationFailed,
resg.validate)
errstr = "removal_policies: \"'notallowed'\" is not a list"
self.assertIn(errstr, six.text_type(exc))
def test_invalid_removal_policies_nomap(self):
"""Test that error raised for malformed removal_policies."""
tmp = copy.deepcopy(template)
grp_props = tmp['resources']['group1']['properties']
grp_props['removal_policies'] = ['notallowed']
stack = utils.parse_stack(tmp)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
exc = self.assertRaises(exception.StackValidationFailed,
resg.validate)
errstr = '"notallowed" is not a map'
self.assertIn(errstr, six.text_type(exc))
def test_child_template(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
def check_res_names(names):
self.assertEqual(list(names), ['0', '1'])
return 'tmpl'
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp._assemble_nested = mock.Mock()
resgrp._assemble_nested.side_effect = check_res_names
resgrp.properties.data[resgrp.COUNT] = 2
self.assertEqual('tmpl', resgrp.child_template())
self.assertEqual(1, resgrp._assemble_nested.call_count)
def test_child_params(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
self.assertEqual({}, resgrp.child_params())
def test_handle_create(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp.create_with_template = mock.Mock(return_value=None)
self.assertIsNone(resgrp.handle_create())
self.assertEqual(1, resgrp.create_with_template.call_count)
def test_handle_create_with_batching(self):
stack = utils.parse_stack(tmpl_with_default_updt_policy())
defn = stack.t.resource_definitions(stack)['group1']
props = stack.t.t['resources']['group1']['properties'].copy()
props['count'] = 10
update_policy = {'batch_create': {'max_batch_size': 3}}
snip = defn.freeze(properties=props, update_policy=update_policy)
resgrp = resource_group.ResourceGroup('test', snip, stack)
self.patchobject(scheduler.TaskRunner, 'start')
checkers = resgrp.handle_create()
self.assertEqual(4, len(checkers))
def test_run_to_completion(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp._check_status_complete = mock.Mock(side_effect=[False, True])
resgrp.update_with_template = mock.Mock(return_value=None)
next(resgrp._run_to_completion(snip, 200))
self.assertEqual(1, resgrp.update_with_template.call_count)
def test_update_in_failed(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp.state_set('CREATE', 'FAILED')
resgrp._assemble_nested = mock.Mock(return_value='tmpl')
resgrp.properties.data[resgrp.COUNT] = 2
self.patchobject(scheduler.TaskRunner, 'start')
resgrp.handle_update(snip, None, None)
self.assertTrue(resgrp._assemble_nested.called)
def test_handle_delete(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp.delete_nested = mock.Mock(return_value=None)
resgrp.handle_delete()
resgrp.delete_nested.assert_called_once_with()
def test_handle_update_size(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp._assemble_nested = mock.Mock(return_value=None)
resgrp.properties.data[resgrp.COUNT] = 5
self.patchobject(scheduler.TaskRunner, 'start')
resgrp.handle_update(snip, None, None)
self.assertTrue(resgrp._assemble_nested.called)
class ResourceGroupBlackList(common.HeatTestCase):
"""This class tests ResourceGroup._name_blacklist()."""
# 1) no resource_list, empty blacklist
# 2) no resource_list, existing blacklist
# 3) resource_list not in nested()
# 4) resource_list (refid) not in nested()
# 5) resource_list in nested() -> saved
# 6) resource_list (refid) in nested() -> saved
scenarios = [
('1', dict(data_in=None, rm_list=[],
nested_rsrcs=[], expected=[],
saved=False)),
('2', dict(data_in='0,1,2', rm_list=[],
nested_rsrcs=[], expected=['0', '1', '2'],
saved=False)),
('3', dict(data_in='1,3', rm_list=['6'],
nested_rsrcs=['0', '1', '3'],
expected=['1', '3'],
saved=False)),
('4', dict(data_in='0,1', rm_list=['id-7'],
nested_rsrcs=['0', '1', '3'],
expected=['0', '1'],
saved=False)),
('5', dict(data_in='0,1', rm_list=['3'],
nested_rsrcs=['0', '1', '3'],
expected=['0', '1', '3'],
saved=True)),
('6', dict(data_in='0,1', rm_list=['id-3'],
nested_rsrcs=['0', '1', '3'],
expected=['0', '1', '3'],
saved=True)),
]
def test_blacklist(self):
stack = utils.parse_stack(template)
resg = stack['group1']
# mock properties
resg.properties = mock.MagicMock()
resg.properties.__getitem__.return_value = [
{'resource_list': self.rm_list}]
# mock data get/set
resg.data = mock.Mock()
resg.data.return_value.get.return_value = self.data_in
resg.data_set = mock.Mock()
# mock nested access
def stack_contains(name):
return name in self.nested_rsrcs
def by_refid(name):
rid = name.replace('id-', '')
if rid not in self.nested_rsrcs:
return None
res = mock.Mock()
res.name = rid
return res
nested = mock.MagicMock()
nested.__contains__.side_effect = stack_contains
nested.__iter__.side_effect = iter(self.nested_rsrcs)
nested.resource_by_refid.side_effect = by_refid
resg.nested = mock.Mock(return_value=nested)
blacklist = resg._name_blacklist()
self.assertEqual(set(self.expected), blacklist)
if self.saved:
resg.data_set.assert_called_once_with('name_blacklist',
','.join(blacklist))
class ResourceGroupEmptyParams(common.HeatTestCase):
"""This class tests ResourceGroup.build_resource_definition()."""
scenarios = [
('non_empty', dict(value='Bar', expected={'Foo': 'Bar'},
expected_include={'Foo': 'Bar'})),
('empty_None', dict(value=None, expected={},
expected_include={'Foo': None})),
('empty_boolean', dict(value=False, expected={'Foo': False},
expected_include={'Foo': False})),
('empty_string', dict(value='', expected={'Foo': ''},
expected_include={'Foo': ''})),
('empty_number', dict(value=0, expected={'Foo': 0},
expected_include={'Foo': 0})),
('empty_json', dict(value={}, expected={'Foo': {}},
expected_include={'Foo': {}})),
('empty_list', dict(value=[], expected={'Foo': []},
expected_include={'Foo': []}))
]
def test_definition(self):
templ = copy.deepcopy(template)
res_def = templ["resources"]["group1"]["properties"]['resource_def']
res_def['properties']['Foo'] = self.value
stack = utils.parse_stack(templ)
snip = stack.t.resource_definitions(stack)['group1']
resg = resource_group.ResourceGroup('test', snip, stack)
exp1 = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
self.expected)
exp2 = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
self.expected_include)
rdef = resg.get_resource_def()
self.assertEqual(exp1, resg.build_resource_definition('0', rdef))
rdef = resg.get_resource_def(include_all=True)
self.assertEqual(
exp2, resg.build_resource_definition('0', rdef))
class ResourceGroupNameListTest(common.HeatTestCase):
"""This class tests ResourceGroup._resource_names()."""
# 1) no blacklist, 0 count
# 2) no blacklist, x count
# 3) blacklist (not effecting)
# 4) blacklist with pruning
scenarios = [
('1', dict(blacklist=[], count=0,
expected=[])),
('2', dict(blacklist=[], count=4,
expected=['0', '1', '2', '3'])),
('3', dict(blacklist=['5', '6'], count=3,
expected=['0', '1', '2'])),
('4', dict(blacklist=['2', '4'], count=4,
expected=['0', '1', '3', '5'])),
]
def test_names(self):
stack = utils.parse_stack(template)
resg = stack['group1']
resg.properties = mock.MagicMock()
resg.properties.get.return_value = self.count
resg._name_blacklist = mock.MagicMock(return_value=self.blacklist)
self.assertEqual(self.expected, list(resg._resource_names()))
class ResourceGroupAttrTest(common.HeatTestCase):
def test_aggregate_attribs(self):
"""Test attribute aggregation.
Test attribute aggregation and that we mimic the nested resource's
attributes.
"""
resg = self._create_dummy_stack()
expected = ['0', '1']
self.assertEqual(expected, resg.FnGetAtt('foo'))
self.assertEqual(expected, resg.FnGetAtt('Foo'))
def test_index_dotted_attribs(self):
"""Test attribute aggregation.
Test attribute aggregation and that we mimic the nested resource's
attributes.
"""
resg = self._create_dummy_stack()
self.assertEqual('0', resg.FnGetAtt('resource.0.Foo'))
self.assertEqual('1', resg.FnGetAtt('resource.1.Foo'))
def test_index_path_attribs(self):
"""Test attribute aggregation.
Test attribute aggregation and that we mimic the nested resource's
attributes.
"""
resg = self._create_dummy_stack()
self.assertEqual('0', resg.FnGetAtt('resource.0', 'Foo'))
self.assertEqual('1', resg.FnGetAtt('resource.1', 'Foo'))
def test_index_deep_path_attribs(self):
"""Test attribute aggregation.
Test attribute aggregation and that we mimic the nested resource's
attributes.
"""
resg = self._create_dummy_stack(template_attr,
expect_attrs={'0': 2, '1': 2})
self.assertEqual(2, resg.FnGetAtt('resource.0',
'nested_dict', 'dict', 'b'))
self.assertEqual(2, resg.FnGetAtt('resource.1',
'nested_dict', 'dict', 'b'))
def test_aggregate_deep_path_attribs(self):
"""Test attribute aggregation.
Test attribute aggregation and that we mimic the nested resource's
attributes.
"""
resg = self._create_dummy_stack(template_attr,
expect_attrs={'0': 3, '1': 3})
expected = [3, 3]
self.assertEqual(expected, resg.FnGetAtt('nested_dict', 'list', 2))
def test_aggregate_refs(self):
"""Test resource id aggregation."""
resg = self._create_dummy_stack()
expected = ['ID-0', 'ID-1']
self.assertEqual(expected, resg.FnGetAtt("refs"))
def test_aggregate_refs_with_index(self):
"""Test resource id aggregation with index."""
resg = self._create_dummy_stack()
expected = ['ID-0', 'ID-1']
self.assertEqual(expected[0], resg.FnGetAtt("refs", 0))
self.assertEqual(expected[1], resg.FnGetAtt("refs", 1))
self.assertIsNone(resg.FnGetAtt("refs", 2))
def test_aggregate_refs_map(self):
resg = self._create_dummy_stack()
found = resg.FnGetAtt("refs_map")
expected = {'0': 'ID-0', '1': 'ID-1'}
self.assertEqual(expected, found)
def test_aggregate_outputs(self):
"""Test outputs aggregation."""
expected = {'0': ['foo', 'bar'], '1': ['foo', 'bar']}
resg = self._create_dummy_stack(template_attr, expect_attrs=expected)
self.assertEqual(expected, resg.FnGetAtt('attributes', 'list'))
def test_aggregate_outputs_no_path(self):
"""Test outputs aggregation with missing path."""
resg = self._create_dummy_stack(template_attr)
self.assertRaises(exception.InvalidTemplateAttribute,
resg.FnGetAtt, 'attributes')
def test_index_refs(self):
"""Tests getting ids of individual resources."""
resg = self._create_dummy_stack()
self.assertEqual("ID-0", resg.FnGetAtt('resource.0'))
self.assertEqual("ID-1", resg.FnGetAtt('resource.1'))
self.assertRaises(exception.InvalidTemplateAttribute, resg.FnGetAtt,
'resource.2')
@mock.patch.object(grouputils, 'get_rsrc_id')
def test_get_attribute(self, mock_get_rsrc_id):
stack = utils.parse_stack(template)
mock_get_rsrc_id.side_effect = ['0', '1']
rsrc = stack['group1']
self.assertEqual(['0', '1'], rsrc.FnGetAtt(rsrc.REFS))
def test_get_attribute_convg(self):
cache_data = {'group1': {
'uuid': mock.ANY,
'id': mock.ANY,
'action': 'CREATE',
'status': 'COMPLETE',
'attrs': {'refs': ['rsrc1', 'rsrc2']}
}}
stack = utils.parse_stack(template, cache_data=cache_data)
rsrc = stack['group1']
self.assertEqual(['rsrc1', 'rsrc2'], rsrc.FnGetAtt(rsrc.REFS))
def _create_dummy_stack(self, template_data=template, expect_count=2,
expect_attrs=None):
stack = utils.parse_stack(template_data)
resg = stack['group1']
fake_res = {}
if expect_attrs is None:
expect_attrs = {}
for resc in range(expect_count):
res = str(resc)
fake_res[res] = mock.Mock()
fake_res[res].stack = stack
fake_res[res].FnGetRefId.return_value = 'ID-%s' % res
if res in expect_attrs:
fake_res[res].FnGetAtt.return_value = expect_attrs[res]
else:
fake_res[res].FnGetAtt.return_value = res
resg.nested = mock.Mock(return_value=fake_res)
names = [str(name) for name in range(expect_count)]
resg._resource_names = mock.Mock(return_value=names)
return resg
class ReplaceTest(common.HeatTestCase):
# 1. no min_in_service
# 2. min_in_service > count and existing with no blacklist
# 3. min_in_service > count and existing with blacklist
# 4. existing > count and min_in_service with blacklist
# 5. existing > count and min_in_service with no blacklist
# 6. all existing blacklisted
# 7. count > existing and min_in_service with no blacklist
# 8. count > existing and min_in_service with blacklist
# 9. count < existing - blacklisted
# 10. pause_sec > 0
scenarios = [
('1', dict(min_in_service=0, count=2,
existing=['0', '1'], black_listed=['0'],
batch_size=1, pause_sec=0, tasks=2)),
('2', dict(min_in_service=3, count=2,
existing=['0', '1'], black_listed=[],
batch_size=2, pause_sec=0, tasks=3)),
('3', dict(min_in_service=3, count=2,
existing=['0', '1'], black_listed=['0'],
batch_size=2, pause_sec=0, tasks=3)),
('4', dict(min_in_service=3, count=2,
existing=['0', '1', '2', '3'], black_listed=['2', '3'],
batch_size=1, pause_sec=0, tasks=4)),
('5', dict(min_in_service=2, count=2,
existing=['0', '1', '2', '3'], black_listed=[],
batch_size=2, pause_sec=0, tasks=2)),
('6', dict(min_in_service=2, count=3,
existing=['0', '1'], black_listed=['0', '1'],
batch_size=2, pause_sec=0, tasks=2)),
('7', dict(min_in_service=0, count=5,
existing=['0', '1'], black_listed=[],
batch_size=1, pause_sec=0, tasks=5)),
('8', dict(min_in_service=0, count=5,
existing=['0', '1'], black_listed=['0'],
batch_size=1, pause_sec=0, tasks=5)),
('9', dict(min_in_service=0, count=3,
existing=['0', '1', '2', '3', '4', '5'],
black_listed=['0'],
batch_size=2, pause_sec=0, tasks=2)),
('10', dict(min_in_service=0, count=3,
existing=['0', '1', '2', '3', '4', '5'],
black_listed=['0'],
batch_size=2, pause_sec=10, tasks=3))]
def setUp(self):
super(ReplaceTest, self).setUp()
templ = copy.deepcopy(template)
self.stack = utils.parse_stack(templ)
snip = self.stack.t.resource_definitions(self.stack)['group1']
self.group = resource_group.ResourceGroup('test', snip, self.stack)
self.group.update_with_template = mock.Mock()
self.group.check_update_complete = mock.Mock()
def test_rolling_updates(self):
self.group._nested = get_fake_nested_stack(self.existing)
self.group.get_size = mock.Mock(return_value=self.count)
self.group._name_blacklist = mock.Mock(
return_value=set(self.black_listed))
tasks = self.group._replace(self.min_in_service, self.batch_size,
self.pause_sec)
self.assertEqual(self.tasks,
len(tasks))
def tmpl_with_bad_updt_policy():
t = copy.deepcopy(template)
rg = t['resources']['group1']
rg["update_policy"] = {"foo": {}}
return t
def tmpl_with_default_updt_policy():
t = copy.deepcopy(template)
rg = t['resources']['group1']
rg["update_policy"] = {"rolling_update": {}}
return t
def tmpl_with_updt_policy():
t = copy.deepcopy(template)
rg = t['resources']['group1']
rg["update_policy"] = {"rolling_update": {
"min_in_service": "1",
"max_batch_size": "2",
"pause_time": "1"
}}
return t
def get_fake_nested_stack(names):
nested_t = '''
heat_template_version: 2015-04-30
description: Resource Group
resources:
'''
resource_snip = '''
'%s':
type: OverwrittenFnGetRefIdType
properties:
foo: bar
'''
resources = [nested_t]
for res_name in names:
resources.extend([resource_snip % res_name])
nested_t = ''.join(resources)
return utils.parse_stack(template_format.parse(nested_t))
class RollingUpdatePolicyTest(common.HeatTestCase):
def setUp(self):
super(RollingUpdatePolicyTest, self).setUp()
def test_parse_without_update_policy(self):
stack = utils.parse_stack(template)
stack.validate()
grp = stack['group1']
self.assertFalse(grp.update_policy['rolling_update'])
def test_parse_with_update_policy(self):
tmpl = tmpl_with_updt_policy()
stack = utils.parse_stack(tmpl)
stack.validate()
tmpl_grp = tmpl['resources']['group1']
tmpl_policy = tmpl_grp['update_policy']['rolling_update']
tmpl_batch_sz = int(tmpl_policy['max_batch_size'])
grp = stack['group1']
self.assertTrue(grp.update_policy)
self.assertEqual(2, len(grp.update_policy))
self.assertIn('rolling_update', grp.update_policy)
policy = grp.update_policy['rolling_update']
self.assertTrue(policy and len(policy) > 0)
self.assertEqual(1, int(policy['min_in_service']))
self.assertEqual(tmpl_batch_sz, int(policy['max_batch_size']))
self.assertEqual(1, policy['pause_time'])
def test_parse_with_default_update_policy(self):
tmpl = tmpl_with_default_updt_policy()
stack = utils.parse_stack(tmpl)
stack.validate()
grp = stack['group1']
self.assertTrue(grp.update_policy)
self.assertEqual(2, len(grp.update_policy))
self.assertIn('rolling_update', grp.update_policy)
policy = grp.update_policy['rolling_update']
self.assertTrue(policy and len(policy) > 0)
self.assertEqual(0, int(policy['min_in_service']))
self.assertEqual(1, int(policy['max_batch_size']))
self.assertEqual(0, policy['pause_time'])
def test_parse_with_bad_update_policy(self):
tmpl = tmpl_with_bad_updt_policy()
stack = utils.parse_stack(tmpl)
error = self.assertRaises(
exception.StackValidationFailed, stack.validate)
self.assertIn("foo", six.text_type(error))
class RollingUpdatePolicyDiffTest(common.HeatTestCase):
def setUp(self):
super(RollingUpdatePolicyDiffTest, self).setUp()
def validate_update_policy_diff(self, current, updated):
# load current stack
current_stack = utils.parse_stack(current)
current_grp = current_stack['group1']
current_grp_json = current_grp.frozen_definition()
updated_stack = utils.parse_stack(updated)
updated_grp = updated_stack['group1']
updated_grp_json = updated_grp.t.freeze()
# identify the template difference
tmpl_diff = updated_grp.update_template_diff(
updated_grp_json, current_grp_json)
self.assertTrue(tmpl_diff.update_policy_changed())
# test application of the new update policy in handle_update
current_grp._try_rolling_update = mock.Mock()
current_grp._assemble_nested_for_size = mock.Mock()
self.patchobject(scheduler.TaskRunner, 'start')
current_grp.handle_update(updated_grp_json, tmpl_diff, None)
self.assertEqual(updated_grp_json._update_policy or {},
current_grp.update_policy.data)
def test_update_policy_added(self):
self.validate_update_policy_diff(template,
tmpl_with_updt_policy())
def test_update_policy_updated(self):
updt_template = tmpl_with_updt_policy()
grp = updt_template['resources']['group1']
policy = grp['update_policy']['rolling_update']
policy['min_in_service'] = '2'
policy['max_batch_size'] = '4'
policy['pause_time'] = '90'
self.validate_update_policy_diff(tmpl_with_updt_policy(),
updt_template)
def test_update_policy_removed(self):
self.validate_update_policy_diff(tmpl_with_updt_policy(),
template)
class RollingUpdateTest(common.HeatTestCase):
def setUp(self):
super(RollingUpdateTest, self).setUp()
def check_with_update(self, with_policy=False, with_diff=False):
current = copy.deepcopy(template)
self.current_stack = utils.parse_stack(current)
self.current_grp = self.current_stack['group1']
current_grp_json = self.current_grp.frozen_definition()
prop_diff, tmpl_diff = None, None
updated = tmpl_with_updt_policy() if (
with_policy) else copy.deepcopy(template)
if with_diff:
res_def = updated['resources']['group1'][
'properties']['resource_def']
res_def['properties']['Foo'] = 'baz'
prop_diff = dict(
{'count': 2,
'resource_def': {'properties': {'Foo': 'baz'},
'type': 'OverwrittenFnGetRefIdType'}})
updated_stack = utils.parse_stack(updated)
updated_grp = updated_stack['group1']
updated_grp_json = updated_grp.t.freeze()
tmpl_diff = updated_grp.update_template_diff(
updated_grp_json, current_grp_json)
self.current_grp._replace = mock.Mock(return_value=[])
self.current_grp._assemble_nested = mock.Mock()
self.patchobject(scheduler.TaskRunner, 'start')
self.current_grp.handle_update(updated_grp_json, tmpl_diff, prop_diff)
def test_update_without_policy_prop_diff(self):
self.check_with_update(with_diff=True)
self.assertTrue(self.current_grp._assemble_nested.called)
def test_update_with_policy_prop_diff(self):
self.check_with_update(with_policy=True, with_diff=True)
self.current_grp._replace.assert_called_once_with(1, 2, 1)
self.assertTrue(self.current_grp._assemble_nested.called)
def test_update_time_not_sufficient(self):
current = copy.deepcopy(template)
self.stack = utils.parse_stack(current)
self.current_grp = self.stack['group1']
self.stack.timeout_secs = mock.Mock(return_value=200)
err = self.assertRaises(ValueError, self.current_grp._update_timeout,
3, 100)
self.assertIn('The current update policy will result in stack update '
'timeout.', six.text_type(err))
def test_update_time_sufficient(self):
current = copy.deepcopy(template)
self.stack = utils.parse_stack(current)
self.current_grp = self.stack['group1']
self.stack.timeout_secs = mock.Mock(return_value=400)
self.assertEqual(200, self.current_grp._update_timeout(3, 100))
class TestUtils(common.HeatTestCase):
# 1. No existing no blacklist
# 2. Existing with no blacklist
# 3. Existing with blacklist
scenarios = [
('1', dict(existing=[], black_listed=[], count=0)),
('2', dict(existing=['0', '1'], black_listed=[], count=0)),
('3', dict(existing=['0', '1'], black_listed=['0'], count=1)),
('4', dict(existing=['0', '1'], black_listed=['1', '2'], count=1))
]
def setUp(self):
super(TestUtils, self).setUp()
def test_count_black_listed(self):
stack = utils.parse_stack(template2)
snip = stack.t.resource_definitions(stack)['group1']
resgrp = resource_group.ResourceGroup('test', snip, stack)
resgrp._nested = get_fake_nested_stack(self.existing)
resgrp._name_blacklist = mock.Mock(return_value=set(self.black_listed))
rcount = resgrp._count_black_listed()
self.assertEqual(self.count, rcount)
class TestGetBatches(common.HeatTestCase):
scenarios = [
('4_4_1_0', dict(targ_cap=4, init_cap=4, bat_size=1, min_serv=0,
batches=[
(4, 1, ['4']),
(4, 1, ['3']),
(4, 1, ['2']),
(4, 1, ['1']),
])),
('4_4_1_4', dict(targ_cap=4, init_cap=4, bat_size=1, min_serv=4,
batches=[
(5, 1, ['5']),
(5, 1, ['4']),
(5, 1, ['3']),
(5, 1, ['2']),
(5, 1, ['1']),
(4, 0, []),
])),
('4_4_1_5', dict(targ_cap=4, init_cap=4, bat_size=1, min_serv=5,
batches=[
(5, 1, ['5']),
(5, 1, ['4']),
(5, 1, ['3']),
(5, 1, ['2']),
(5, 1, ['1']),
(4, 0, []),
])),
('4_4_2_0', dict(targ_cap=4, init_cap=4, bat_size=2, min_serv=0,
batches=[
(4, 2, ['4', '3']),
(4, 2, ['2', '1']),
])),
('4_4_2_4', dict(targ_cap=4, init_cap=4, bat_size=2, min_serv=4,
batches=[
(6, 2, ['6', '5']),
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
(4, 0, []),
])),
('5_5_2_0', dict(targ_cap=5, init_cap=5, bat_size=2, min_serv=0,
batches=[
(5, 2, ['5', '4']),
(5, 2, ['3', '2']),
(5, 1, ['1']),
])),
('5_5_2_4', dict(targ_cap=5, init_cap=5, bat_size=2, min_serv=4,
batches=[
(6, 2, ['6', '5']),
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
(5, 0, []),
])),
('3_3_2_0', dict(targ_cap=3, init_cap=3, bat_size=2, min_serv=0,
batches=[
(3, 2, ['3', '2']),
(3, 1, ['1']),
])),
('3_3_2_4', dict(targ_cap=3, init_cap=3, bat_size=2, min_serv=4,
batches=[
(5, 2, ['5', '4']),
(5, 2, ['3', '2']),
(4, 1, ['1']),
(3, 0, []),
])),
('4_4_4_0', dict(targ_cap=4, init_cap=4, bat_size=4, min_serv=0,
batches=[
(4, 4, ['4', '3', '2', '1']),
])),
('4_4_5_0', dict(targ_cap=4, init_cap=4, bat_size=5, min_serv=0,
batches=[
(4, 4, ['4', '3', '2', '1']),
])),
('4_4_4_1', dict(targ_cap=4, init_cap=4, bat_size=4, min_serv=1,
batches=[
(5, 4, ['5', '4', '3', '2']),
(4, 1, ['1']),
])),
('4_4_6_1', dict(targ_cap=4, init_cap=4, bat_size=6, min_serv=1,
batches=[
(5, 4, ['5', '4', '3', '2']),
(4, 1, ['1']),
])),
('4_4_4_2', dict(targ_cap=4, init_cap=4, bat_size=4, min_serv=2,
batches=[
(6, 4, ['6', '5', '4', '3']),
(4, 2, ['2', '1']),
])),
('4_4_4_4', dict(targ_cap=4, init_cap=4, bat_size=4, min_serv=4,
batches=[
(8, 4, ['8', '7', '6', '5']),
(8, 4, ['4', '3', '2', '1']),
(4, 0, []),
])),
('4_4_5_6', dict(targ_cap=4, init_cap=4, bat_size=5, min_serv=6,
batches=[
(8, 4, ['8', '7', '6', '5']),
(8, 4, ['4', '3', '2', '1']),
(4, 0, []),
])),
('4_7_1_0', dict(targ_cap=4, init_cap=7, bat_size=1, min_serv=0,
batches=[
(4, 1, ['4']),
(4, 1, ['3']),
(4, 1, ['2']),
(4, 1, ['1']),
])),
('4_7_1_4', dict(targ_cap=4, init_cap=7, bat_size=1, min_serv=4,
batches=[
(5, 1, ['4']),
(5, 1, ['3']),
(5, 1, ['2']),
(5, 1, ['1']),
(4, 0, []),
])),
('4_7_1_5', dict(targ_cap=4, init_cap=7, bat_size=1, min_serv=5,
batches=[
(5, 1, ['4']),
(5, 1, ['3']),
(5, 1, ['2']),
(5, 1, ['1']),
(4, 0, []),
])),
('4_7_2_0', dict(targ_cap=4, init_cap=7, bat_size=2, min_serv=0,
batches=[
(4, 2, ['4', '3']),
(4, 2, ['2', '1']),
])),
('4_7_2_4', dict(targ_cap=4, init_cap=7, bat_size=2, min_serv=4,
batches=[
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
(4, 0, []),
])),
('5_7_2_0', dict(targ_cap=5, init_cap=7, bat_size=2, min_serv=0,
batches=[
(5, 2, ['5', '4']),
(5, 2, ['3', '2']),
(5, 1, ['1']),
])),
('5_7_2_4', dict(targ_cap=5, init_cap=7, bat_size=2, min_serv=4,
batches=[
(6, 2, ['5', '4']),
(6, 2, ['3', '2']),
(5, 1, ['1']),
])),
('4_7_4_4', dict(targ_cap=4, init_cap=7, bat_size=4, min_serv=4,
batches=[
(8, 4, ['8', '4', '3', '2']),
(5, 1, ['1']),
(4, 0, []),
])),
('4_7_5_6', dict(targ_cap=4, init_cap=7, bat_size=5, min_serv=6,
batches=[
(8, 4, ['8', '4', '3', '2']),
(5, 1, ['1']),
(4, 0, []),
])),
('6_4_1_0', dict(targ_cap=6, init_cap=4, bat_size=1, min_serv=0,
batches=[
(5, 1, ['5']),
(6, 1, ['6']),
(6, 1, ['4']),
(6, 1, ['3']),
(6, 1, ['2']),
(6, 1, ['1']),
])),
('6_4_1_4', dict(targ_cap=6, init_cap=4, bat_size=1, min_serv=4,
batches=[
(5, 1, ['5']),
(6, 1, ['6']),
(6, 1, ['4']),
(6, 1, ['3']),
(6, 1, ['2']),
(6, 1, ['1']),
])),
('6_4_1_5', dict(targ_cap=6, init_cap=4, bat_size=1, min_serv=5,
batches=[
(5, 1, ['5']),
(6, 1, ['6']),
(6, 1, ['4']),
(6, 1, ['3']),
(6, 1, ['2']),
(6, 1, ['1']),
])),
('6_4_2_0', dict(targ_cap=6, init_cap=4, bat_size=2, min_serv=0,
batches=[
(6, 2, ['5', '6']),
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
])),
('6_4_2_4', dict(targ_cap=6, init_cap=4, bat_size=2, min_serv=4,
batches=[
(6, 2, ['5', '6']),
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
])),
('6_5_2_0', dict(targ_cap=6, init_cap=5, bat_size=2, min_serv=0,
batches=[
(6, 2, ['6', '5']),
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
])),
('6_5_2_4', dict(targ_cap=6, init_cap=5, bat_size=2, min_serv=4,
batches=[
(6, 2, ['6', '5']),
(6, 2, ['4', '3']),
(6, 2, ['2', '1']),
])),
('6_3_2_0', dict(targ_cap=6, init_cap=3, bat_size=2, min_serv=0,
batches=[
(5, 2, ['4', '5']),
(6, 2, ['6', '3']),
(6, 2, ['2', '1']),
])),
('6_3_2_4', dict(targ_cap=6, init_cap=3, bat_size=2, min_serv=4,
batches=[
(5, 2, ['4', '5']),
(6, 2, ['6', '3']),
(6, 2, ['2', '1']),
])),
('6_4_4_0', dict(targ_cap=6, init_cap=4, bat_size=4, min_serv=0,
batches=[
(6, 4, ['5', '6', '4', '3']),
(6, 2, ['2', '1']),
])),
('6_4_5_0', dict(targ_cap=6, init_cap=4, bat_size=5, min_serv=0,
batches=[
(6, 5, ['5', '6', '4', '3', '2']),
(6, 1, ['1']),
])),
('6_4_4_1', dict(targ_cap=6, init_cap=4, bat_size=4, min_serv=1,
batches=[
(6, 4, ['5', '6', '4', '3']),
(6, 2, ['2', '1']),
])),
('6_4_6_1', dict(targ_cap=6, init_cap=4, bat_size=6, min_serv=1,
batches=[
(7, 6, ['5', '6', '7', '4', '3', '2']),
(6, 1, ['1']),
])),
('6_4_4_2', dict(targ_cap=6, init_cap=4, bat_size=4, min_serv=2,
batches=[
(6, 4, ['5', '6', '4', '3']),
(6, 2, ['2', '1']),
])),
('6_4_4_4', dict(targ_cap=6, init_cap=4, bat_size=4, min_serv=4,
batches=[
(8, 4, ['8', '7', '6', '5']),
(8, 4, ['4', '3', '2', '1']),
(6, 0, []),
])),
('6_4_5_6', dict(targ_cap=6, init_cap=4, bat_size=5, min_serv=6,
batches=[
(9, 5, ['9', '8', '7', '6', '5']),
(10, 4, ['10', '4', '3', '2']),
(7, 1, ['1']),
(6, 0, []),
])),
]
def setUp(self):
super(TestGetBatches, self).setUp()
self.stack = utils.parse_stack(template)
self.grp = self.stack['group1']
self.grp._name_blacklist = mock.Mock(return_value={'0'})
def test_get_batches(self):
batches = list(self.grp._get_batches(self.targ_cap,
self.init_cap,
self.bat_size,
self.min_serv))
self.assertEqual([(s, u) for s, u, n in self.batches], batches)
def test_assemble(self):
old_def = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
{"foo": "baz"})
new_def = rsrc_defn.ResourceDefinition(
None,
"OverwrittenFnGetRefIdType",
{"foo": "bar"})
resources = [(str(i), old_def) for i in range(self.init_cap + 1)]
self.grp.get_size = mock.Mock(return_value=self.targ_cap)
self.patchobject(grouputils, 'get_member_definitions',
return_value=resources)
self.grp.build_resource_definition = mock.Mock(return_value=new_def)
all_updated_names = set()
for size, max_upd, names in self.batches:
template = self.grp._assemble_for_rolling_update(size,
max_upd,
names)
res_dict = template.resource_definitions(self.stack)
expected_names = set(map(str, range(1, size + 1)))
self.assertEqual(expected_names, set(res_dict))
all_updated_names &= expected_names
all_updated_names |= set(names)
updated = set(n for n, v in res_dict.items() if v != old_def)
self.assertEqual(all_updated_names, updated)
resources[:] = sorted(res_dict.items(), key=lambda i: int(i[0]))<|fim▁end|> | "resources": { |
<|file_name|>auth.spec.ts<|end_file_name|><|fim▁begin|>/*!
* @license
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
import * as jwt from 'jsonwebtoken';
import * as _ from 'lodash';
import * as chai from 'chai';
import * as sinon from 'sinon';
import * as sinonChai from 'sinon-chai';
import * as chaiAsPromised from 'chai-as-promised';
import * as utils from '../utils';
import * as mocks from '../../resources/mocks';
import { FirebaseApp } from '../../../src/app/firebase-app';
import {
AuthRequestHandler, TenantAwareAuthRequestHandler, AbstractAuthRequestHandler,
} from '../../../src/auth/auth-api-request';
import { AuthClientErrorCode, FirebaseAuthError } from '../../../src/utils/error';
import * as validator from '../../../src/utils/validator';
import { FirebaseTokenVerifier } from '../../../src/auth/token-verifier';
import {
OIDCConfig, SAMLConfig, OIDCConfigServerResponse, SAMLConfigServerResponse,
} from '../../../src/auth/auth-config';
import { deepCopy } from '../../../src/utils/deep-copy';
import { ServiceAccountCredential } from '../../../src/app/credential-internal';
import { HttpClient } from '../../../src/utils/api-request';
import {
Auth, TenantAwareAuth, BaseAuth, UserRecord, DecodedIdToken,
UpdateRequest, AuthProviderConfigFilter, TenantManager,
} from '../../../src/auth/index';
chai.should();
chai.use(sinonChai);
chai.use(chaiAsPromised);
const expect = chai.expect;
interface AuthTest {
name: string;
supportsTenantManagement: boolean;
Auth: new (...args: any[]) => BaseAuth;
RequestHandler: new (...args: any[]) => AbstractAuthRequestHandler;
init(app: FirebaseApp): BaseAuth;
}
interface EmailActionTest {
api: string;
requestType: string;
requiresSettings: boolean;
}
/**
* @param {string=} tenantId The optional tenant Id.
* @return {object} A sample valid server response as returned from getAccountInfo
* endpoint.
*/
function getValidGetAccountInfoResponse(tenantId?: string): {kind: string; users: any[]} {
const userResponse: any = {
localId: 'abcdefghijklmnopqrstuvwxyz',
email: '[email protected]',
emailVerified: true,
displayName: 'John Doe',
phoneNumber: '+11234567890',
providerUserInfo: [
{
providerId: 'google.com',
displayName: 'John Doe',
photoUrl: 'https://lh3.googleusercontent.com/1234567890/photo.jpg',
federatedId: '1234567890',
email: '[email protected]',
rawId: '1234567890',
},
{
providerId: 'facebook.com',
displayName: 'John Smith',
photoUrl: 'https://facebook.com/0987654321/photo.jpg',
federatedId: '0987654321',
email: '[email protected]',
rawId: '0987654321',
},
{
providerId: 'phone',
phoneNumber: '+11234567890',
rawId: '+11234567890',
},
],
mfaInfo: [
{
mfaEnrollmentId: 'enrolledSecondFactor1',
phoneInfo: '+16505557348',
displayName: 'Spouse\'s phone number',
enrolledAt: new Date().toISOString(),
},
{
mfaEnrollmentId: 'enrolledSecondFactor2',
phoneInfo: '+16505551000',
},
],
photoUrl: 'https://lh3.googleusercontent.com/1234567890/photo.jpg',
validSince: '1476136676',
lastLoginAt: '1476235905000',
createdAt: '1476136676000',
};
if (typeof tenantId !== 'undefined') {
userResponse.tenantId = tenantId;
}
return {
kind: 'identitytoolkit#GetAccountInfoResponse',
users: [userResponse],
};
}
/**
* Returns a user record corresponding to the getAccountInfo response.
*
* @param {any} serverResponse Raw getAccountInfo response.
* @return {Object} The corresponding user record.
*/
function getValidUserRecord(serverResponse: any): UserRecord {
return new UserRecord(serverResponse.users[0]);
}
/**
* Generates a mock decoded ID token with the provided parameters.
*
* @param {string} uid The uid corresponding to the ID token.
* @param {Date} authTime The authentication time of the ID token.
* @param {string=} tenantId The optional tenant ID.
* @return {DecodedIdToken} The generated decoded ID token.
*/
function getDecodedIdToken(uid: string, authTime: Date, tenantId?: string): DecodedIdToken {
return {
iss: 'https://securetoken.google.com/project123456789',
aud: 'project123456789',
auth_time: Math.floor(authTime.getTime() / 1000),
sub: uid,
iat: Math.floor(authTime.getTime() / 1000),
exp: Math.floor(authTime.getTime() / 1000 + 3600),
firebase: {
identities: {},
sign_in_provider: 'custom',
tenant: tenantId,
},
uid,
};
}
/**
* Generates a mock decoded session cookie with the provided parameters.
*
* @param {string} uid The uid corresponding to the session cookie.
* @param {Date} authTime The authentication time of the session cookie.
* @param {string=} tenantId The optional tenant ID.
* @return {DecodedIdToken} The generated decoded session cookie.
*/
function getDecodedSessionCookie(uid: string, authTime: Date, tenantId?: string): DecodedIdToken {
return {
iss: 'https://session.firebase.google.com/project123456789',
aud: 'project123456789',
auth_time: Math.floor(authTime.getTime() / 1000),
sub: uid,
iat: Math.floor(authTime.getTime() / 1000),
exp: Math.floor(authTime.getTime() / 1000 + 3600),
firebase: {
identities: {},
sign_in_provider: 'custom',
tenant: tenantId,
},
uid,
};
}
/**
* Generates a mock OIDC config server response for the corresponding provider ID.
*
* @param {string} providerId The provider ID whose sample OIDCConfigServerResponse is to be returned.
* @return {OIDCConfigServerResponse} The corresponding sample OIDCConfigServerResponse.
*/
function getOIDCConfigServerResponse(providerId: string): OIDCConfigServerResponse {
return {
name: `projects/project_id/oauthIdpConfigs/${providerId}`,
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
}
/**
* Generates a mock SAML config server response for the corresponding provider ID.
*
* @param {string} providerId The provider ID whose sample SAMLConfigServerResponse is to be returned.
* @return {SAMLConfigServerResponse} The corresponding sample SAMLConfigServerResponse.
*/
function getSAMLConfigServerResponse(providerId: string): SAMLConfigServerResponse {
return {
name: `projects/project_id/inboundSamlConfigs/${providerId}`,
idpConfig: {
idpEntityId: 'IDP_ENTITY_ID',
ssoUrl: 'https://example.com/login',
signRequest: true,
idpCertificates: [
{ x509Certificate: 'CERT1' },
{ x509Certificate: 'CERT2' },
],
},
spConfig: {
spEntityId: 'RP_ENTITY_ID',
callbackUri: 'https://projectId.firebaseapp.com/__/auth/handler',
},
displayName: 'SAML_DISPLAY_NAME',
enabled: true,
};
}
const INVALID_PROVIDER_IDS = [
undefined, null, NaN, 0, 1, true, false, '', [], [1, 'a'], {}, { a: 1 }, _.noop];
const TENANT_ID = 'tenantId';
const AUTH_CONFIGS: AuthTest[] = [
{
name: 'Auth',
Auth,
supportsTenantManagement: true,
RequestHandler: AuthRequestHandler,
init: (app: FirebaseApp) => {
return new Auth(app);
},
},
{
name: 'TenantAwareAuth',
Auth: TenantAwareAuth,
supportsTenantManagement: false,
RequestHandler: TenantAwareAuthRequestHandler,
init: (app: FirebaseApp) => {
return new TenantAwareAuth(app, TENANT_ID);
},
},
];
AUTH_CONFIGS.forEach((testConfig) => {
describe(testConfig.name, () => {
let auth: BaseAuth;
let mockApp: FirebaseApp;
let getTokenStub: sinon.SinonStub;
let oldProcessEnv: NodeJS.ProcessEnv;
let nullAccessTokenAuth: BaseAuth;
let malformedAccessTokenAuth: BaseAuth;
let rejectedPromiseAccessTokenAuth: BaseAuth;
beforeEach(() => {
mockApp = mocks.app();
getTokenStub = utils.stubGetAccessToken(undefined, mockApp);
auth = testConfig.init(mockApp);
nullAccessTokenAuth = testConfig.init(mocks.appReturningNullAccessToken());
malformedAccessTokenAuth = testConfig.init(mocks.appReturningMalformedAccessToken());
rejectedPromiseAccessTokenAuth = testConfig.init(mocks.appRejectedWhileFetchingAccessToken());
oldProcessEnv = process.env;
// Project ID not set in the environment.
delete process.env.GOOGLE_CLOUD_PROJECT;
delete process.env.GCLOUD_PROJECT;
});
afterEach(() => {
getTokenStub.restore();
process.env = oldProcessEnv;
return mockApp.delete();
});
if (testConfig.Auth === Auth) {
// Run tests for Auth.
describe('Constructor', () => {
const invalidApps = [null, NaN, 0, 1, true, false, '', 'a', [], [1, 'a'], {}, { a: 1 }, _.noop];
invalidApps.forEach((invalidApp) => {
it('should throw given invalid app: ' + JSON.stringify(invalidApp), () => {
expect(() => {
const authAny: any = Auth;
return new authAny(invalidApp);
}).to.throw('First argument passed to admin.auth() must be a valid Firebase app instance.');
});
});
it('should throw given no app', () => {
expect(() => {
const authAny: any = Auth;
return new authAny();
}).to.throw('First argument passed to admin.auth() must be a valid Firebase app instance.');
});
it('should reject given no project ID', () => {
const authWithoutProjectId = new Auth(mocks.mockCredentialApp());
authWithoutProjectId.getUser('uid')
.should.eventually.be.rejectedWith(
'Failed to determine project ID for Auth. Initialize the SDK with service '
+ 'account credentials or set project ID as an app option. Alternatively set the '
+ 'GOOGLE_CLOUD_PROJECT environment variable.');
});
it('should not throw given a valid app', () => {
expect(() => {
return new Auth(mockApp);
}).not.to.throw();
});
});
describe('app', () => {
it('returns the app from the constructor', () => {
// We expect referential equality here
expect((auth as Auth).app).to.equal(mockApp);
});
it('is read-only', () => {
expect(() => {
(auth as any).app = mockApp;
}).to.throw('Cannot set property app of #<Auth> which has only a getter');
});
});
describe('tenantManager()', () => {
it('should return a TenantManager with the expected attributes', () => {
const tenantManager1 = (auth as Auth).tenantManager();
const tenantManager2 = new TenantManager(mockApp);
expect(tenantManager1).to.deep.equal(tenantManager2);
});
it('should return the same cached instance', () => {
const tenantManager1 = (auth as Auth).tenantManager();
const tenantManager2 = (auth as Auth).tenantManager();
expect(tenantManager1).to.equal(tenantManager2);
});
});
}
describe('createCustomToken()', () => {
it('should return a jwt', async () => {
const token = await auth.createCustomToken('uid1');
const decodedToken = jwt.decode(token, { complete: true });
expect(decodedToken).to.have.property('header').that.has.property('typ', 'JWT');
});
if (testConfig.Auth === TenantAwareAuth) {
it('should contain tenant_id', async () => {
const token = await auth.createCustomToken('uid1');
expect(jwt.decode(token)).to.have.property('tenant_id', TENANT_ID);
});
} else {
it('should not contain tenant_id', async () => {
const token = await auth.createCustomToken('uid1');
expect(jwt.decode(token)).to.not.have.property('tenant_id');
});
}
it('should be eventually rejected if a cert credential is not specified', () => {
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
// Force the service account ID discovery to fail.
getTokenStub = sinon.stub(HttpClient.prototype, 'send').rejects(utils.errorFrom({}));
return mockCredentialAuth.createCustomToken(mocks.uid, mocks.developerClaims)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-credential');
});
it('should be fulfilled given an app which returns null access tokens', () => {
getTokenStub = sinon.stub(ServiceAccountCredential.prototype, 'getAccessToken')
.resolves(null as any);
// createCustomToken() does not rely on an access token and therefore works in this scenario.
return auth.createCustomToken(mocks.uid, mocks.developerClaims)
.should.eventually.be.fulfilled;
});
it('should be fulfilled given an app which returns invalid access tokens', () => {
getTokenStub = sinon.stub(ServiceAccountCredential.prototype, 'getAccessToken')
.resolves('malformed' as any);
// createCustomToken() does not rely on an access token and therefore works in this scenario.
return auth.createCustomToken(mocks.uid, mocks.developerClaims)
.should.eventually.be.fulfilled;
});
it('should be fulfilled given an app which fails to generate access tokens', () => {
getTokenStub = sinon.stub(ServiceAccountCredential.prototype, 'getAccessToken').rejects('error');
// createCustomToken() does not rely on an access token and therefore works in this scenario.
return auth.createCustomToken(mocks.uid, mocks.developerClaims)
.should.eventually.be.fulfilled;
});
});
it('verifyIdToken() should reject when project ID is not specified', () => {
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
const expected = 'Must initialize app with a cert credential or set your Firebase project ID ' +
'as the GOOGLE_CLOUD_PROJECT environment variable to call verifyIdToken().';
return mockCredentialAuth.verifyIdToken(mocks.generateIdToken())
.should.eventually.be.rejectedWith(expected);
});
it('verifySessionCookie() should reject when project ID is not specified', () => {
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
const expected = 'Must initialize app with a cert credential or set your Firebase project ID ' +
'as the GOOGLE_CLOUD_PROJECT environment variable to call verifySessionCookie().';
return mockCredentialAuth.verifySessionCookie(mocks.generateSessionCookie())
.should.eventually.be.rejectedWith(expected);
});
describe('verifyIdToken()', () => {
let stub: sinon.SinonStub;
let mockIdToken: string;
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedUserRecord = getValidUserRecord(getValidGetAccountInfoResponse(tenantId));
// Set auth_time of token to expected user's tokensValidAfterTime.
expect(
expectedUserRecord.tokensValidAfterTime,
"getValidUserRecord didn't properly set tokensValueAfterTime",
).to.exist;
const validSince = new Date(expectedUserRecord.tokensValidAfterTime!);
// Set expected uid to expected user's.
const uid = expectedUserRecord.uid;
// Set expected decoded ID token with expected UID and auth time.
const decodedIdToken = getDecodedIdToken(uid, validSince, tenantId);
let clock: sinon.SinonFakeTimers;
// Stubs used to simulate underlying api calls.
const stubs: sinon.SinonStub[] = [];
beforeEach(() => {
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(decodedIdToken);
stubs.push(stub);
mockIdToken = mocks.generateIdToken();
clock = sinon.useFakeTimers(validSince.getTime());
});
afterEach(() => {
_.forEach(stubs, (s) => s.restore());
clock.restore();
});
it('should forward on the call to the token generator\'s verifyIdToken() method', () => {
// Stub getUser call.
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser');
stubs.push(getUserStub);
return auth.verifyIdToken(mockIdToken).then((result) => {
// Confirm getUser never called.
expect(getUserStub).not.to.have.been.called;
expect(result).to.deep.equal(decodedIdToken);
expect(stub).to.have.been.calledOnce.and.calledWith(mockIdToken);
});
});
it('should reject when underlying idTokenVerifier.verifyJWT() rejects with expected error', () => {
const expectedError = new FirebaseAuthError(
AuthClientErrorCode.INVALID_ARGUMENT, 'Decoding Firebase ID token failed');
// Restore verifyIdToken stub.
stub.restore();
// Simulate ID token is invalid.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.rejects(expectedError);
stubs.push(stub);
return auth.verifyIdToken(mockIdToken)
.should.eventually.be.rejectedWith('Decoding Firebase ID token failed');
});
it('should be rejected with checkRevoked set to true and corresponding user disabled', () => {
const expectedAccountInfoResponse = getValidGetAccountInfoResponse(tenantId);
expectedAccountInfoResponse.users[0].disabled = true;
const expectedUserRecordDisabled = getValidUserRecord(expectedAccountInfoResponse);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecordDisabled);
expect(expectedUserRecordDisabled.disabled).to.be.equal(true);
stubs.push(getUserStub);
return auth.verifyIdToken(mockIdToken, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/user-disabled');
});
});
it('verifyIdToken() should reject user disabled before ID tokens revoked', () => {
const expectedAccountInfoResponse = getValidGetAccountInfoResponse(tenantId);
const expectedAccountInfoResponseUserDisabled = Object.assign({}, expectedAccountInfoResponse);
expectedAccountInfoResponseUserDisabled.users[0].disabled = true;
const expectedUserRecordDisabled = getValidUserRecord(expectedAccountInfoResponseUserDisabled);
const validSince = new Date(expectedUserRecordDisabled.tokensValidAfterTime!);
// Restore verifyIdToken stub.
stub.restore();
// One second before validSince.
const oneSecBeforeValidSince = new Date(validSince.getTime() - 1000);
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(getDecodedIdToken(expectedUserRecordDisabled.uid, oneSecBeforeValidSince));
stubs.push(stub);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecordDisabled);
expect(expectedUserRecordDisabled.disabled).to.be.equal(true);
stubs.push(getUserStub);
return auth.verifyIdToken(mockIdToken, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/user-disabled');
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(expectedUserRecordDisabled.uid);
});
});
it('should work with a non-cert credential when the GOOGLE_CLOUD_PROJECT environment variable is present', () => {
process.env.GOOGLE_CLOUD_PROJECT = mocks.projectId;
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
return mockCredentialAuth.verifyIdToken(mockIdToken).then(() => {
expect(stub).to.have.been.calledOnce.and.calledWith(mockIdToken);
});
});
it('should work with a non-cert credential when the GCLOUD_PROJECT environment variable is present', () => {
process.env.GCLOUD_PROJECT = mocks.projectId;
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
return mockCredentialAuth.verifyIdToken(mockIdToken).then(() => {
expect(stub).to.have.been.calledOnce.and.calledWith(mockIdToken);
});
});
it('should be fulfilled given an app which returns null access tokens', () => {
// verifyIdToken() does not rely on an access token and therefore works in this scenario.
return nullAccessTokenAuth.verifyIdToken(mockIdToken)
.should.eventually.be.fulfilled;
});
it('should be fulfilled given an app which returns invalid access tokens', () => {
// verifyIdToken() does not rely on an access token and therefore works in this scenario.
return malformedAccessTokenAuth.verifyIdToken(mockIdToken)
.should.eventually.be.fulfilled;
});
it('should be fulfilled given an app which fails to generate access tokens', () => {
// verifyIdToken() does not rely on an access token and therefore works in this scenario.
return rejectedPromiseAccessTokenAuth.verifyIdToken(mockIdToken)
.should.eventually.be.fulfilled;
});
it('should be fulfilled with checkRevoked set to true using an unrevoked ID token', () => {
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecord);
stubs.push(getUserStub);
// Verify ID token while checking if revoked.
return auth.verifyIdToken(mockIdToken, true)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
expect(result).to.deep.equal(decodedIdToken);
});
});
it('should be rejected with checkRevoked set to true using a revoked ID token', () => {
// One second before validSince.
const oneSecBeforeValidSince = new Date(validSince.getTime() - 1000);
// Restore verifyIdToken stub.
stub.restore();
// Simulate revoked ID token returned with auth_time one second before validSince.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(getDecodedIdToken(uid, oneSecBeforeValidSince));
stubs.push(stub);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecord);
stubs.push(getUserStub);
// Verify ID token while checking if revoked.
return auth.verifyIdToken(mockIdToken, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/id-token-revoked');
});
});
it('should be fulfilled with checkRevoked set to false using a revoked ID token', () => {
// One second before validSince.
const oneSecBeforeValidSince = new Date(validSince.getTime() - 1000);
const oneSecBeforeValidSinceDecodedIdToken =
getDecodedIdToken(uid, oneSecBeforeValidSince, tenantId);
// Restore verifyIdToken stub.
stub.restore();
// Simulate revoked ID token returned with auth_time one second before validSince.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(oneSecBeforeValidSinceDecodedIdToken);
stubs.push(stub);
// Verify ID token without checking if revoked.
// This call should succeed.
return auth.verifyIdToken(mockIdToken, false)
.then((result) => {
expect(result).to.deep.equal(oneSecBeforeValidSinceDecodedIdToken);
});
});
it('should be rejected with checkRevoked set to true if underlying RPC fails', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.rejects(expectedError);
stubs.push(getUserStub);
// Verify ID token while checking if revoked.
// This should fail with the underlying RPC error.
return auth.verifyIdToken(mockIdToken, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
it('should be fulfilled with checkRevoked set to true when no validSince available', () => {
// Simulate no validSince set on the user.
const noValidSinceGetAccountInfoResponse = getValidGetAccountInfoResponse(tenantId);
delete (noValidSinceGetAccountInfoResponse.users[0] as any).validSince;
const noValidSinceExpectedUserRecord =
getValidUserRecord(noValidSinceGetAccountInfoResponse);
// Confirm null tokensValidAfterTime on user.
expect(noValidSinceExpectedUserRecord.tokensValidAfterTime).to.be.undefined;
// Simulate getUser returns the expected user with no validSince.
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(noValidSinceExpectedUserRecord);
stubs.push(getUserStub);
// Verify ID token while checking if revoked.
return auth.verifyIdToken(mockIdToken, true)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
expect(result).to.deep.equal(decodedIdToken);
});
});
it('should be rejected with checkRevoked set to true using an invalid ID token', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_CREDENTIAL);
// Restore verifyIdToken stub.
stub.restore();
// Simulate ID token is invalid.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.rejects(expectedError);
stubs.push(stub);
// Verify ID token while checking if revoked.
// This should fail with the underlying token generator verifyIdToken error.
return auth.verifyIdToken(mockIdToken, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
if (testConfig.Auth === TenantAwareAuth) {
it('should be rejected with ID token missing tenant ID', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.MISMATCHING_TENANT_ID);
// Restore verifyIdToken stub.
stub.restore();
// Simulate JWT does not contain tenant ID.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.returns(Promise.resolve(getDecodedIdToken(uid, validSince)));
// Verify ID token.
return auth.verifyIdToken(mockIdToken)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.deep.include(expectedError);
});
});
it('should be rejected with ID token containing mismatching tenant ID', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.MISMATCHING_TENANT_ID);
// Restore verifyIdToken stub.
stub.restore();
// Simulate JWT does not contain matching tenant ID.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.returns(Promise.resolve(getDecodedIdToken(uid, validSince, 'otherTenantId')));
// Verify ID token.
return auth.verifyIdToken(mockIdToken)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.deep.include(expectedError);
});
});
}
});
describe('verifySessionCookie()', () => {
let stub: sinon.SinonStub;
let mockSessionCookie: string;
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedUserRecord = getValidUserRecord(getValidGetAccountInfoResponse(tenantId));
// Set auth_time of token to expected user's tokensValidAfterTime.
if (!expectedUserRecord.tokensValidAfterTime) {
throw new Error("getValidUserRecord didn't properly set tokensValidAfterTime.");
}
const validSince = new Date(expectedUserRecord.tokensValidAfterTime);
// Set expected uid to expected user's.
const uid = expectedUserRecord.uid;
// Set expected decoded session cookie with expected UID and auth time.
const decodedSessionCookie = getDecodedSessionCookie(uid, validSince, tenantId);
let clock: sinon.SinonFakeTimers;
// Stubs used to simulate underlying api calls.
const stubs: sinon.SinonStub[] = [];
beforeEach(() => {
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(decodedSessionCookie);
stubs.push(stub);
mockSessionCookie = mocks.generateSessionCookie();
clock = sinon.useFakeTimers(validSince.getTime());
});
afterEach(() => {
_.forEach(stubs, (s) => s.restore());
clock.restore();
});
it('should forward on the call to the token verifier\'s verifySessionCookie() method', () => {
// Stub getUser call.
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser');
stubs.push(getUserStub);
return auth.verifySessionCookie(mockSessionCookie).then((result) => {
// Confirm getUser never called.
expect(getUserStub).not.to.have.been.called;
expect(result).to.deep.equal(decodedSessionCookie);
expect(stub).to.have.been.calledOnce.and.calledWith(mockSessionCookie);
});
});
it('should reject when underlying sessionCookieVerifier.verifyJWT() rejects with expected error', () => {
const expectedError = new FirebaseAuthError(
AuthClientErrorCode.INVALID_ARGUMENT, 'Decoding Firebase session cookie failed');
// Restore verifySessionCookie stub.
stub.restore();
// Simulate session cookie is invalid.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.rejects(expectedError);
stubs.push(stub);
return auth.verifySessionCookie(mockSessionCookie)
.should.eventually.be.rejectedWith('Decoding Firebase session cookie failed');
});
it('should work with a non-cert credential when the GOOGLE_CLOUD_PROJECT environment variable is present', () => {
process.env.GOOGLE_CLOUD_PROJECT = mocks.projectId;
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
return mockCredentialAuth.verifySessionCookie(mockSessionCookie).then(() => {
expect(stub).to.have.been.calledOnce.and.calledWith(mockSessionCookie);
});
});
it('should work with a non-cert credential when the GCLOUD_PROJECT environment variable is present', () => {
process.env.GCLOUD_PROJECT = mocks.projectId;
const mockCredentialAuth = testConfig.init(mocks.mockCredentialApp());
return mockCredentialAuth.verifySessionCookie(mockSessionCookie).then(() => {
expect(stub).to.have.been.calledOnce.and.calledWith(mockSessionCookie);
});
});
it('should be fulfilled given an app which returns null access tokens', () => {
// verifySessionCookie() does not rely on an access token and therefore works in this scenario.
return nullAccessTokenAuth.verifySessionCookie(mockSessionCookie)
.should.eventually.be.fulfilled;
});
it('should be fulfilled given an app which returns invalid access tokens', () => {
// verifySessionCookie() does not rely on an access token and therefore works in this scenario.
return malformedAccessTokenAuth.verifySessionCookie(mockSessionCookie)
.should.eventually.be.fulfilled;
});
it('should be fulfilled given an app which fails to generate access tokens', () => {
// verifySessionCookie() does not rely on an access token and therefore works in this scenario.
return rejectedPromiseAccessTokenAuth.verifySessionCookie(mockSessionCookie)
.should.eventually.be.fulfilled;
});
it('should be fulfilled with checkRevoked set to true using an unrevoked session cookie', () => {
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecord);
stubs.push(getUserStub);
// Verify ID token while checking if revoked.
return auth.verifySessionCookie(mockSessionCookie, true)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
expect(result).to.deep.equal(decodedSessionCookie);
});
});
it('should be rejected with checkRevoked set to true using a revoked session cookie', () => {
// One second before validSince.
const oneSecBeforeValidSince = new Date(validSince.getTime() - 1000);
// Restore verifySessionCookie stub.
stub.restore();
// Simulate revoked session cookie returned with auth_time one second before validSince.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(getDecodedSessionCookie(uid, oneSecBeforeValidSince));
stubs.push(stub);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecord);
stubs.push(getUserStub);
// Verify session cookie while checking if revoked.
return auth.verifySessionCookie(mockSessionCookie, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/session-cookie-revoked');
});
});
it('should be fulfilled with checkRevoked set to false using a revoked session cookie', () => {
// One second before validSince.
const oneSecBeforeValidSince = new Date(validSince.getTime() - 1000);
const oneSecBeforeValidSinceDecodedSessionCookie =
getDecodedSessionCookie(uid, oneSecBeforeValidSince, tenantId);
// Restore verifySessionCookie stub.
stub.restore();
// Simulate revoked session cookie returned with auth_time one second before validSince.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(oneSecBeforeValidSinceDecodedSessionCookie);
stubs.push(stub);
// Verify session cookie without checking if revoked.
// This call should succeed.
return auth.verifySessionCookie(mockSessionCookie, false)
.then((result) => {
expect(result).to.deep.equal(oneSecBeforeValidSinceDecodedSessionCookie);
});
});
it('should be rejected with checkRevoked set to true if underlying RPC fails', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.rejects(expectedError);
stubs.push(getUserStub);
// Verify session cookie while checking if revoked.
// This should fail with the underlying RPC error.
return auth.verifySessionCookie(mockSessionCookie, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
it('should be rejected with checkRevoked set to true and corresponding user disabled', () => {
const expectedAccountInfoResponse = getValidGetAccountInfoResponse(tenantId);
expectedAccountInfoResponse.users[0].disabled = true;
const expectedUserRecordDisabled = getValidUserRecord(expectedAccountInfoResponse);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecordDisabled);
expect(expectedUserRecordDisabled.disabled).to.be.equal(true);
stubs.push(getUserStub);
return auth.verifySessionCookie(mockSessionCookie, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/user-disabled');
});
});
it('verifySessionCookie() should reject user disabled before ID tokens revoked', () => {
const expectedAccountInfoResponse = getValidGetAccountInfoResponse(tenantId);
const expectedAccountInfoResponseUserDisabled = Object.assign({}, expectedAccountInfoResponse);
expectedAccountInfoResponseUserDisabled.users[0].disabled = true;
const expectedUserRecordDisabled = getValidUserRecord(expectedAccountInfoResponseUserDisabled);
const validSince = new Date(expectedUserRecordDisabled.tokensValidAfterTime!);
// Restore verifySessionCookie stub.
stub.restore();
// One second before validSince.
const oneSecBeforeValidSince = new Date(validSince.getTime() - 1000);
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.resolves(getDecodedIdToken(expectedUserRecordDisabled.uid, oneSecBeforeValidSince));
stubs.push(stub);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(expectedUserRecordDisabled);
expect(expectedUserRecordDisabled.disabled).to.be.equal(true);
stubs.push(getUserStub);
return auth.verifySessionCookie(mockSessionCookie, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(expectedUserRecordDisabled.uid);
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/user-disabled');
});
});
it('should be fulfilled with checkRevoked set to true when no validSince available', () => {
// Simulate no validSince set on the user.
const noValidSinceGetAccountInfoResponse = getValidGetAccountInfoResponse(tenantId);
delete (noValidSinceGetAccountInfoResponse.users[0] as any).validSince;
const noValidSinceExpectedUserRecord =
getValidUserRecord(noValidSinceGetAccountInfoResponse);
// Confirm null tokensValidAfterTime on user.
expect(noValidSinceExpectedUserRecord.tokensValidAfterTime).to.be.undefined;
// Simulate getUser returns the expected user with no validSince.
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(noValidSinceExpectedUserRecord);
stubs.push(getUserStub);
// Verify session cookie while checking if revoked.
return auth.verifySessionCookie(mockSessionCookie, true)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
expect(result).to.deep.equal(decodedSessionCookie);
});
});
it('should be rejected with checkRevoked set to true using an invalid session cookie', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_CREDENTIAL);
// Restore verifySessionCookie stub.
stub.restore();
// Simulate session cookie is invalid.
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.rejects(expectedError);
stubs.push(stub);
// Verify session cookie while checking if revoked.
// This should fail with the underlying token generator verifySessionCookie error.
return auth.verifySessionCookie(mockSessionCookie, true)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
if (testConfig.Auth === TenantAwareAuth) {
it('should be rejected with session cookie missing tenant ID', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.MISMATCHING_TENANT_ID);
// Restore verifyIdToken stub.
stub.restore();
// Simulate JWT does not contain tenant ID..
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.returns(Promise.resolve(getDecodedSessionCookie(uid, validSince)));
// Verify session cookie token.
return auth.verifySessionCookie(mockSessionCookie)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.deep.include(expectedError);
});
});
it('should be rejected with ID token containing mismatching tenant ID', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.MISMATCHING_TENANT_ID);
// Restore verifyIdToken stub.
stub.restore();
// Simulate JWT does not contain matching tenant ID..
stub = sinon.stub(FirebaseTokenVerifier.prototype, 'verifyJWT')
.returns(Promise.resolve(getDecodedSessionCookie(uid, validSince, 'otherTenantId')));
// Verify session cookie token.
return auth.verifySessionCookie(mockSessionCookie)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.deep.include(expectedError);
});
});
}
});
describe('getUser()', () => {
const uid = 'abcdefghijklmnopqrstuvwxyz';
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedGetAccountInfoResult = getValidGetAccountInfoResponse(tenantId);
const expectedUserRecord = getValidUserRecord(expectedGetAccountInfoResult);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => sinon.spy(validator, 'isUid'));
afterEach(() => {
(validator.isUid as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no uid', () => {
return (auth as any).getUser()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-uid');
});
it('should be rejected given an invalid uid', () => {
const invalidUid = ('a' as any).repeat(129);
return auth.getUser(invalidUid)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-uid');
expect(validator.isUid).to.have.been.calledOnce.and.calledWith(invalidUid);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.getUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.getUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.getUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with a UserRecord on success', () => {
// Stub getAccountInfoByUid to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves(expectedGetAccountInfoResult);
stubs.push(stub);
return auth.getUser(uid)
.then((userRecord) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected user record response returned.
expect(userRecord).to.deep.equal(expectedUserRecord);
});
});
it('should throw an error when the backend returns an error', () => {
// Stub getAccountInfoByUid to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.rejects(expectedError);
stubs.push(stub);
return auth.getUser(uid)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('getUserByEmail()', () => {
const email = '[email protected]';
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedGetAccountInfoResult = getValidGetAccountInfoResponse(tenantId);
const expectedUserRecord = getValidUserRecord(expectedGetAccountInfoResult);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => sinon.spy(validator, 'isEmail'));
afterEach(() => {
(validator.isEmail as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no email', () => {
return (auth as any).getUserByEmail()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-email');
});
it('should be rejected given an invalid email', () => {
const invalidEmail = 'name-example-com';
return auth.getUserByEmail(invalidEmail)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-email');
expect(validator.isEmail).to.have.been.calledOnce.and.calledWith(invalidEmail);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.getUserByEmail(email)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.getUserByEmail(email)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.getUserByEmail(email)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with a UserRecord on success', () => {
// Stub getAccountInfoByEmail to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByEmail')
.resolves(expectedGetAccountInfoResult);
stubs.push(stub);
return auth.getUserByEmail(email)
.then((userRecord) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(email);
// Confirm expected user record response returned.
expect(userRecord).to.deep.equal(expectedUserRecord);
});
});
it('should throw an error when the backend returns an error', () => {
// Stub getAccountInfoByEmail to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByEmail')
.rejects(expectedError);
stubs.push(stub);
return auth.getUserByEmail(email)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(email);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('getUserByPhoneNumber()', () => {
const phoneNumber = '+11234567890';
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedGetAccountInfoResult = getValidGetAccountInfoResponse(tenantId);
const expectedUserRecord = getValidUserRecord(expectedGetAccountInfoResult);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => sinon.spy(validator, 'isPhoneNumber'));
afterEach(() => {
(validator.isPhoneNumber as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no phone number', () => {
return (auth as any).getUserByPhoneNumber()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-phone-number');
});
it('should be rejected given an invalid phone number', () => {
const invalidPhoneNumber = 'invalid';
return auth.getUserByPhoneNumber(invalidPhoneNumber)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-phone-number');
expect(validator.isPhoneNumber)
.to.have.been.calledOnce.and.calledWith(invalidPhoneNumber);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.getUserByPhoneNumber(phoneNumber)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.getUserByPhoneNumber(phoneNumber)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.getUserByPhoneNumber(phoneNumber)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with a UserRecord on success', () => {
// Stub getAccountInfoByPhoneNumber to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByPhoneNumber')
.resolves(expectedGetAccountInfoResult);
stubs.push(stub);
return auth.getUserByPhoneNumber(phoneNumber)
.then((userRecord) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(phoneNumber);
// Confirm expected user record response returned.
expect(userRecord).to.deep.equal(expectedUserRecord);
});
});
it('should throw an error when the backend returns an error', () => {
// Stub getAccountInfoByPhoneNumber to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByPhoneNumber')
.rejects(expectedError);
stubs.push(stub);
return auth.getUserByPhoneNumber(phoneNumber)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(phoneNumber);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('getUserByProviderUid()', () => {
const providerId = 'google.com';
const providerUid = 'google_uid';
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedGetAccountInfoResult = getValidGetAccountInfoResponse(tenantId);
const expectedUserRecord = getValidUserRecord(expectedGetAccountInfoResult);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => sinon.spy(validator, 'isEmail'));
afterEach(() => {
(validator.isEmail as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no provider id', () => {
expect(() => (auth as any).getUserByProviderUid())
.to.throw(FirebaseAuthError)
.with.property('code', 'auth/invalid-provider-id');
});
it('should be rejected given an invalid provider id', () => {
expect(() => auth.getUserByProviderUid('', 'uid'))
.to.throw(FirebaseAuthError)
.with.property('code', 'auth/invalid-provider-id');
});
it('should be rejected given an invalid provider uid', () => {
expect(() => auth.getUserByProviderUid('id', ''))
.to.throw(FirebaseAuthError)
.with.property('code', 'auth/invalid-provider-id');
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.getUserByProviderUid(providerId, providerUid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.getUserByProviderUid(providerId, providerUid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.getUserByProviderUid(providerId, providerUid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with a UserRecord on success', () => {
// Stub getAccountInfoByEmail to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByFederatedUid')
.resolves(expectedGetAccountInfoResult);
stubs.push(stub);
return auth.getUserByProviderUid(providerId, providerUid)
.then((userRecord) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId, providerUid);
// Confirm expected user record response returned.
expect(userRecord).to.deep.equal(expectedUserRecord);
});
});
describe('non-federated providers', () => {
let invokeRequestHandlerStub: sinon.SinonStub;
beforeEach(() => {
invokeRequestHandlerStub = sinon.stub(testConfig.RequestHandler.prototype, 'invokeRequestHandler')
.resolves({
// nothing here is checked; we just need enough to not crash.
users: [{
localId: 1,
}],
});
});
afterEach(() => {
invokeRequestHandlerStub.restore();
});
it('phone lookups should use phoneNumber field', async () => {
await auth.getUserByProviderUid('phone', '+15555550001');
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
phoneNumber: ['+15555550001'],
});
});
it('email lookups should use email field', async () => {
await auth.getUserByProviderUid('email', '[email protected]');
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
email: ['[email protected]'],
});
});
});
it('should throw an error when the backend returns an error', () => {
// Stub getAccountInfoByFederatedUid to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByFederatedUid')
.rejects(expectedError);
stubs.push(stub);
return auth.getUserByProviderUid(providerId, providerUid)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId, providerUid);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('getUsers()', () => {
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
stubs.forEach((stub) => stub.restore());
stubs = [];
});
it('should throw when given a non array parameter', () => {
const nonArrayValues = [ null, undefined, 42, 3.14, "i'm not an array", {} ];
nonArrayValues.forEach((v) => {
expect(() => auth.getUsers(v as any))
.to.throw(FirebaseAuthError)
.with.property('code', 'auth/argument-error');
});
});
it('should return no results when given no identifiers', () => {
return auth.getUsers([])
.then((getUsersResult) => {
expect(getUsersResult.users).to.deep.equal([]);
expect(getUsersResult.notFound).to.deep.equal([]);
});
});
it('should return no users when given identifiers that do not exist', () => {
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByIdentifiers')
.resolves({});
stubs.push(stub);
const notFoundIds = [{ uid: 'id that doesnt exist' }];
return auth.getUsers(notFoundIds)
.then((getUsersResult) => {
expect(getUsersResult.users).to.deep.equal([]);
expect(getUsersResult.notFound).to.deep.equal(notFoundIds);
});
});
it('returns users by various identifier types in a single call', async () => {
const mockUsers = [{
localId: 'uid1',
email: '[email protected]',
phoneNumber: '+15555550001',
}, {
localId: 'uid2',
email: '[email protected]',
phoneNumber: '+15555550002',
}, {
localId: 'uid3',
email: '[email protected]',
phoneNumber: '+15555550003',
}, {
localId: 'uid4',
email: '[email protected]',
phoneNumber: '+15555550004',
providerUserInfo: [{
providerId: 'google.com',
rawId: 'google_uid4',
}],
}];
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByIdentifiers')
.resolves({ users: mockUsers });
stubs.push(stub);
const users = await auth.getUsers([
{ uid: 'uid1' },
{ email: '[email protected]' },
{ phoneNumber: '+15555550003' },
{ providerId: 'google.com', providerUid: 'google_uid4' },
{ uid: 'this-user-doesnt-exist' },
]);
expect(users.users).to.have.deep.members(mockUsers.map((u) => new UserRecord(u)));
expect(users.notFound).to.have.deep.members([{ uid: 'this-user-doesnt-exist' }]);
});
});
describe('deleteUser()', () => {
const uid = 'abcdefghijklmnopqrstuvwxyz';
const expectedDeleteAccountResult = { kind: 'identitytoolkit#DeleteAccountResponse' };
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => sinon.spy(validator, 'isUid'));
afterEach(() => {
(validator.isUid as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no uid', () => {
return (auth as any).deleteUser()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-uid');
});
it('should be rejected given an invalid uid', () => {
const invalidUid = ('a' as any).repeat(129);
return auth.deleteUser(invalidUid)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-uid');
expect(validator.isUid).to.have.been.calledOnce.and.calledWith(invalidUid);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.deleteUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.deleteUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.deleteUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with void on success', () => {
// Stub deleteAccount to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteAccount')
.resolves(expectedDeleteAccountResult);
stubs.push(stub);
return auth.deleteUser(uid)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected result is undefined.
expect(result).to.be.undefined;
});
});
it('should throw an error when the backend returns an error', () => {
// Stub deleteAccount to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteAccount')
.rejects(expectedError);
stubs.push(stub);
return auth.deleteUser(uid)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('deleteUsers()', () => {
it('should succeed given an empty list', () => {
return auth.deleteUsers([])
.then((deleteUsersResult) => {
expect(deleteUsersResult.successCount).to.equal(0);
expect(deleteUsersResult.failureCount).to.equal(0);
expect(deleteUsersResult.errors).to.have.length(0);
});
});
it('should index errors correctly in result', async () => {
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteAccounts')
.resolves({
errors: [{
index: 0,
localId: 'uid1',
message: 'NOT_DISABLED : Disable the account before batch deletion.',
}, {
index: 2,
localId: 'uid3',
message: 'something awful',
}],
});
try {
const deleteUsersResult = await auth.deleteUsers(['uid1', 'uid2', 'uid3', 'uid4']);
expect(deleteUsersResult.successCount).to.equal(2);
expect(deleteUsersResult.failureCount).to.equal(2);
expect(deleteUsersResult.errors).to.have.length(2);
expect(deleteUsersResult.errors[0].index).to.equal(0);
expect(deleteUsersResult.errors[0].error).to.have.property('code', 'auth/user-not-disabled');
expect(deleteUsersResult.errors[1].index).to.equal(2);
expect(deleteUsersResult.errors[1].error).to.have.property('code', 'auth/internal-error');
} finally {
stub.restore();
}
});
it('should resolve with void on success', async () => {
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteAccounts')
.resolves({});
try {
await auth.deleteUsers(['uid1', 'uid2', 'uid3'])
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(['uid1', 'uid2', 'uid3']);
expect(result.failureCount).to.equal(0);
expect(result.successCount).to.equal(3);
expect(result.errors).to.be.empty;
});
} finally {
stub.restore();
}
});
});
describe('createUser()', () => {
const uid = 'abcdefghijklmnopqrstuvwxyz';
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedGetAccountInfoResult = getValidGetAccountInfoResponse(tenantId);
const expectedUserRecord = getValidUserRecord(expectedGetAccountInfoResult);
const expectedError = new FirebaseAuthError(
AuthClientErrorCode.INTERNAL_ERROR,
'Unable to create the user record provided.');
const unableToCreateUserError = new FirebaseAuthError(
AuthClientErrorCode.INTERNAL_ERROR,
'Unable to create the user record provided.');
const propertiesToCreate = {
displayName: expectedUserRecord.displayName,
photoURL: expectedUserRecord.photoURL,
email: expectedUserRecord.email,
emailVerified: expectedUserRecord.emailVerified,
password: 'password',
phoneNumber: expectedUserRecord.phoneNumber,
};
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => sinon.spy(validator, 'isNonNullObject'));
afterEach(() => {
(validator.isNonNullObject as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no properties', () => {
return (auth as any).createUser()
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
it('should be rejected given invalid properties', () => {
return auth.createUser(null as any)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/argument-error');
expect(validator.isNonNullObject).to.have.been.calledOnce.and.calledWith(null);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.createUser(propertiesToCreate)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.createUser(propertiesToCreate)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.createUser(propertiesToCreate)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with a UserRecord on createNewAccount request success', () => {
// Stub createNewAccount to return expected uid.
const createUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'createNewAccount')
.resolves(uid);
// Stub getAccountInfoByUid to return expected result.
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves(expectedGetAccountInfoResult);
stubs.push(createUserStub);
stubs.push(getUserStub);
return auth.createUser(propertiesToCreate)
.then((userRecord) => {
// Confirm underlying API called with expected parameters.
expect(createUserStub).to.have.been.calledOnce.and.calledWith(propertiesToCreate);
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected user record response returned.
expect(userRecord).to.deep.equal(expectedUserRecord);
});
});
it('should throw an error when createNewAccount returns an error', () => {
// Stub createNewAccount to throw a backend error.
const createUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'createNewAccount')
.rejects(expectedError);
stubs.push(createUserStub);
return auth.createUser(propertiesToCreate)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(createUserStub).to.have.been.calledOnce.and.calledWith(propertiesToCreate);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
it('should throw an error when getUser returns a User not found error', () => {
// Stub createNewAccount to return expected uid.
const createUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'createNewAccount')
.resolves(uid);
// Stub getAccountInfoByUid to throw user not found error.
const userNotFoundError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.rejects(userNotFoundError);
stubs.push(createUserStub);
stubs.push(getUserStub);
return auth.createUser(propertiesToCreate)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(createUserStub).to.have.been.calledOnce.and.calledWith(propertiesToCreate);
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error.toString()).to.equal(unableToCreateUserError.toString());
});
});
it('should echo getUser error if an error occurs while retrieving the user record', () => {
// Stub createNewAccount to return expected uid.
const createUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'createNewAccount')
.resolves(uid);
// Stub getAccountInfoByUid to throw expected error.
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.rejects(expectedError);
stubs.push(createUserStub);
stubs.push(getUserStub);
return auth.createUser(propertiesToCreate)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(createUserStub).to.have.been.calledOnce.and.calledWith(propertiesToCreate);
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned (same error thrown by getUser).
expect(error).to.equal(expectedError);
});
});
});
describe('updateUser()', () => {
const uid = 'abcdefghijklmnopqrstuvwxyz';
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const expectedGetAccountInfoResult = getValidGetAccountInfoResponse(tenantId);
const expectedUserRecord = getValidUserRecord(expectedGetAccountInfoResult);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
const propertiesToEdit = {
displayName: expectedUserRecord.displayName,
photoURL: expectedUserRecord.photoURL,
email: expectedUserRecord.email,
emailVerified: expectedUserRecord.emailVerified,
password: 'password',
phoneNumber: expectedUserRecord.phoneNumber,
providerToLink: {
providerId: 'google.com',
uid: 'google_uid',
},
};
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => {
sinon.spy(validator, 'isUid');
sinon.spy(validator, 'isNonNullObject');
});
afterEach(() => {
(validator.isUid as any).restore();
(validator.isNonNullObject as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no uid', () => {
return (auth as any).updateUser(undefined, propertiesToEdit)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-uid');
});
it('should be rejected given an invalid uid', () => {
const invalidUid = ('a' as any).repeat(129);
return auth.updateUser(invalidUid, propertiesToEdit)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-uid');
expect(validator.isUid).to.have.been.calledOnce.and.calledWith(invalidUid);
});
});
it('should be rejected given no properties', () => {
return (auth as any).updateUser(uid)
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
it('should be rejected given invalid properties', () => {
return auth.updateUser(uid, null as unknown as UpdateRequest)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/argument-error');
expect(validator.isNonNullObject).to.have.been.calledWith(null);
});
});
const invalidUpdateRequests: UpdateRequest[] = [
{ providerToLink: { uid: 'google_uid' } },
{ providerToLink: { providerId: 'google.com' } },
{ providerToLink: { providerId: 'google.com', uid: '' } },
{ providerToLink: { providerId: '', uid: 'google_uid' } },
];
invalidUpdateRequests.forEach((invalidUpdateRequest) => {
it('should be rejected given an UpdateRequest with an invalid providerToLink parameter', () => {
expect(() => {
auth.updateUser(uid, invalidUpdateRequest);
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
});
it('should rename providerToLink property to linkProviderUserInfo', async () => {
const invokeRequestHandlerStub = sinon.stub(testConfig.RequestHandler.prototype, 'invokeRequestHandler')
.resolves({
localId: uid,
});
// Stub getAccountInfoByUid to return a valid result (unchecked; we
// just need it to be valid so as to not crash.)
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves(expectedGetAccountInfoResult);
stubs.push(invokeRequestHandlerStub);
stubs.push(getUserStub);
await auth.updateUser(uid, {
providerToLink: {
providerId: 'google.com',
uid: 'google_uid',
},
});
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
localId: uid,
linkProviderUserInfo: {
providerId: 'google.com',
rawId: 'google_uid',
},
});
});
INVALID_PROVIDER_IDS.forEach((invalidProviderId) => {
it('should be rejected given a deleteProvider list with an invalid provider ID '
+ JSON.stringify(invalidProviderId), () => {
expect(() => {
auth.updateUser(uid, {
providersToUnlink: [ invalidProviderId as any ],
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');<|fim▁hole|> it('should merge deletion of phone provider with the providersToUnlink list', async () => {
const invokeRequestHandlerStub = sinon.stub(testConfig.RequestHandler.prototype, 'invokeRequestHandler')
.resolves({
localId: uid,
});
// Stub getAccountInfoByUid to return a valid result (unchecked; we
// just need it to be valid so as to not crash.)
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves(expectedGetAccountInfoResult);
stubs.push(invokeRequestHandlerStub);
stubs.push(getUserStub);
await auth.updateUser(uid, {
phoneNumber: null,
providersToUnlink: [ 'google.com' ],
});
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
localId: uid,
deleteProvider: [ 'phone', 'google.com' ],
});
});
describe('non-federated providers', () => {
let invokeRequestHandlerStub: sinon.SinonStub;
let getAccountInfoByUidStub: sinon.SinonStub;
beforeEach(() => {
invokeRequestHandlerStub = sinon.stub(testConfig.RequestHandler.prototype, 'invokeRequestHandler')
.resolves({
// nothing here is checked; we just need enough to not crash.
users: [{
localId: 1,
}],
});
getAccountInfoByUidStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves({
// nothing here is checked; we just need enough to not crash.
users: [{
localId: 1,
}],
});
});
afterEach(() => {
invokeRequestHandlerStub.restore();
getAccountInfoByUidStub.restore();
});
it('specifying both email and providerId=email should be rejected', () => {
expect(() => {
auth.updateUser(uid, {
email: '[email protected]',
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
it('specifying both phoneNumber and providerId=phone should be rejected', () => {
expect(() => {
auth.updateUser(uid, {
phoneNumber: '+15555550001',
providerToLink: {
providerId: 'phone',
uid: '+15555550001',
},
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
it('email linking should use email field', async () => {
await auth.updateUser(uid, {
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
});
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
localId: uid,
email: '[email protected]',
});
});
it('phone linking should use phoneNumber field', async () => {
await auth.updateUser(uid, {
providerToLink: {
providerId: 'phone',
uid: '+15555550001',
},
});
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
localId: uid,
phoneNumber: '+15555550001',
});
});
it('specifying both phoneNumber=null and providersToUnlink=phone should be rejected', () => {
expect(() => {
auth.updateUser(uid, {
phoneNumber: null,
providersToUnlink: ['phone'],
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
it('doesnt mutate the properties parameter', async () => {
const properties: UpdateRequest = {
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
};
await auth.updateUser(uid, properties);
expect(properties).to.deep.equal({
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
});
});
});
describe('non-federated providers', () => {
let invokeRequestHandlerStub: sinon.SinonStub;
let getAccountInfoByUidStub: sinon.SinonStub;
beforeEach(() => {
invokeRequestHandlerStub = sinon.stub(testConfig.RequestHandler.prototype, 'invokeRequestHandler')
.resolves({
// nothing here is checked; we just need enough to not crash.
users: [{
localId: 1,
}],
});
getAccountInfoByUidStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves({
// nothing here is checked; we just need enough to not crash.
users: [{
localId: 1,
}],
});
});
afterEach(() => {
invokeRequestHandlerStub.restore();
getAccountInfoByUidStub.restore();
});
it('specifying both email and providerId=email should be rejected', () => {
expect(() => {
auth.updateUser(uid, {
email: '[email protected]',
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
it('specifying both phoneNumber and providerId=phone should be rejected', () => {
expect(() => {
auth.updateUser(uid, {
phoneNumber: '+15555550001',
providerToLink: {
providerId: 'phone',
uid: '+15555550001',
},
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
it('email linking should use email field', async () => {
await auth.updateUser(uid, {
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
});
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
localId: uid,
email: '[email protected]',
});
});
it('phone linking should use phoneNumber field', async () => {
await auth.updateUser(uid, {
providerToLink: {
providerId: 'phone',
uid: '+15555550001',
},
});
expect(invokeRequestHandlerStub).to.have.been.calledOnce.and.calledWith(
sinon.match.any, sinon.match.any, {
localId: uid,
phoneNumber: '+15555550001',
});
});
it('specifying both phoneNumber=null and providersToUnlink=phone should be rejected', () => {
expect(() => {
auth.updateUser(uid, {
phoneNumber: null,
providersToUnlink: ['phone'],
});
}).to.throw(FirebaseAuthError).with.property('code', 'auth/argument-error');
});
it('doesnt mutate the properties parameter', async () => {
const properties: UpdateRequest = {
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
};
await auth.updateUser(uid, properties);
expect(properties).to.deep.equal({
providerToLink: {
providerId: 'email',
uid: '[email protected]',
},
});
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.updateUser(uid, propertiesToEdit)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.updateUser(uid, propertiesToEdit)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.updateUser(uid, propertiesToEdit)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve with a UserRecord on updateExistingAccount request success', () => {
// Stub updateExistingAccount to return expected uid.
const updateUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateExistingAccount')
.resolves(uid);
// Stub getAccountInfoByUid to return expected result.
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.resolves(expectedGetAccountInfoResult);
stubs.push(updateUserStub);
stubs.push(getUserStub);
return auth.updateUser(uid, propertiesToEdit)
.then((userRecord) => {
// Confirm underlying API called with expected parameters.
expect(updateUserStub).to.have.been.calledOnce.and.calledWith(uid, propertiesToEdit);
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected user record response returned.
expect(userRecord).to.deep.equal(expectedUserRecord);
});
});
it('should throw an error when updateExistingAccount returns an error', () => {
// Stub updateExistingAccount to throw a backend error.
const updateUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateExistingAccount')
.rejects(expectedError);
stubs.push(updateUserStub);
return auth.updateUser(uid, propertiesToEdit)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(updateUserStub).to.have.been.calledOnce.and.calledWith(uid, propertiesToEdit);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
it('should echo getUser error if an error occurs while retrieving the user record', () => {
// Stub updateExistingAccount to return expected uid.
const updateUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateExistingAccount')
.resolves(uid);
// Stub getAccountInfoByUid to throw an expected error.
const getUserStub = sinon.stub(testConfig.RequestHandler.prototype, 'getAccountInfoByUid')
.rejects(expectedError);
stubs.push(updateUserStub);
stubs.push(getUserStub);
return auth.updateUser(uid, propertiesToEdit)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(updateUserStub).to.have.been.calledOnce.and.calledWith(uid, propertiesToEdit);
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned (same error thrown by getUser).
expect(error).to.equal(expectedError);
});
});
});
describe('setCustomUserClaims()', () => {
const uid = 'abcdefghijklmnopqrstuvwxyz';
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
const customClaims = {
admin: true,
groupId: '123456',
};
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => {
sinon.spy(validator, 'isUid');
sinon.spy(validator, 'isObject');
});
afterEach(() => {
(validator.isUid as any).restore();
(validator.isObject as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no uid', () => {
return (auth as any).setCustomUserClaims(undefined, customClaims)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-uid');
});
it('should be rejected given an invalid uid', () => {
const invalidUid = ('a' as any).repeat(129);
return auth.setCustomUserClaims(invalidUid, customClaims)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-uid');
expect(validator.isUid).to.have.been.calledOnce.and.calledWith(invalidUid);
});
});
it('should be rejected given no custom user claims', () => {
return (auth as any).setCustomUserClaims(uid)
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
it('should be rejected given invalid custom user claims', () => {
return auth.setCustomUserClaims(uid, 'invalid' as any)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/argument-error');
expect(validator.isObject).to.have.been.calledOnce.and.calledWith('invalid');
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.setCustomUserClaims(uid, customClaims)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.setCustomUserClaims(uid, customClaims)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.setCustomUserClaims(uid, customClaims)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve on setCustomUserClaims request success', () => {
// Stub setCustomUserClaims to return expected uid.
const setCustomUserClaimsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'setCustomUserClaims')
.resolves(uid);
stubs.push(setCustomUserClaimsStub);
return auth.setCustomUserClaims(uid, customClaims)
.then((response) => {
expect(response).to.be.undefined;
// Confirm underlying API called with expected parameters.
expect(setCustomUserClaimsStub)
.to.have.been.calledOnce.and.calledWith(uid, customClaims);
});
});
it('should throw an error when setCustomUserClaims returns an error', () => {
// Stub setCustomUserClaims to throw a backend error.
const setCustomUserClaimsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'setCustomUserClaims')
.rejects(expectedError);
stubs.push(setCustomUserClaimsStub);
return auth.setCustomUserClaims(uid, customClaims)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(setCustomUserClaimsStub)
.to.have.been.calledOnce.and.calledWith(uid, customClaims);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('listUsers()', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INTERNAL_ERROR);
const pageToken = 'PAGE_TOKEN';
const maxResult = 500;
const downloadAccountResponse: any = {
users: [
{ localId: 'UID1' },
{ localId: 'UID2' },
{ localId: 'UID3' },
],
nextPageToken: 'NEXT_PAGE_TOKEN',
};
const expectedResult: any = {
users: [
new UserRecord({ localId: 'UID1' }),
new UserRecord({ localId: 'UID2' }),
new UserRecord({ localId: 'UID3' }),
],
pageToken: 'NEXT_PAGE_TOKEN',
};
const emptyDownloadAccountResponse: any = {
users: [],
};
const emptyExpectedResult: any = {
users: [],
};
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => {
sinon.spy(validator, 'isNonEmptyString');
sinon.spy(validator, 'isNumber');
});
afterEach(() => {
(validator.isNonEmptyString as any).restore();
(validator.isNumber as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given an invalid page token', () => {
const invalidToken = {};
return auth.listUsers(undefined, invalidToken as any)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-page-token');
});
});
it('should be rejected given an invalid max result', () => {
const invalidResults = 5000;
return auth.listUsers(invalidResults)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/argument-error');
expect(validator.isNumber)
.to.have.been.calledOnce.and.calledWith(invalidResults);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.listUsers(maxResult)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.listUsers(maxResult)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.listUsers(maxResult)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve on downloadAccount request success with users in response', () => {
// Stub downloadAccount to return expected response.
const downloadAccountStub = sinon
.stub(testConfig.RequestHandler.prototype, 'downloadAccount')
.resolves(downloadAccountResponse);
stubs.push(downloadAccountStub);
return auth.listUsers(maxResult, pageToken)
.then((response) => {
expect(response).to.deep.equal(expectedResult);
// Confirm underlying API called with expected parameters.
expect(downloadAccountStub)
.to.have.been.calledOnce.and.calledWith(maxResult, pageToken);
});
});
it('should resolve on downloadAccount request success with default options', () => {
// Stub downloadAccount to return expected response.
const downloadAccountStub = sinon
.stub(testConfig.RequestHandler.prototype, 'downloadAccount')
.resolves(downloadAccountResponse);
stubs.push(downloadAccountStub);
return auth.listUsers()
.then((response) => {
expect(response).to.deep.equal(expectedResult);
// Confirm underlying API called with expected parameters.
expect(downloadAccountStub)
.to.have.been.calledOnce.and.calledWith(undefined, undefined);
});
});
it('should resolve on downloadAccount request success with no users in response', () => {
// Stub downloadAccount to return expected response.
const downloadAccountStub = sinon
.stub(testConfig.RequestHandler.prototype, 'downloadAccount')
.resolves(emptyDownloadAccountResponse);
stubs.push(downloadAccountStub);
return auth.listUsers(maxResult, pageToken)
.then((response) => {
expect(response).to.deep.equal(emptyExpectedResult);
// Confirm underlying API called with expected parameters.
expect(downloadAccountStub)
.to.have.been.calledOnce.and.calledWith(maxResult, pageToken);
});
});
it('should throw an error when downloadAccount returns an error', () => {
// Stub downloadAccount to throw a backend error.
const downloadAccountStub = sinon
.stub(testConfig.RequestHandler.prototype, 'downloadAccount')
.rejects(expectedError);
stubs.push(downloadAccountStub);
return auth.listUsers(maxResult, pageToken)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(downloadAccountStub)
.to.have.been.calledOnce.and.calledWith(maxResult, pageToken);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('revokeRefreshTokens()', () => {
const uid = 'abcdefghijklmnopqrstuvwxyz';
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => {
sinon.spy(validator, 'isUid');
});
afterEach(() => {
(validator.isUid as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no uid', () => {
return (auth as any).revokeRefreshTokens(undefined)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-uid');
});
it('should be rejected given an invalid uid', () => {
const invalidUid = ('a' as any).repeat(129);
return auth.revokeRefreshTokens(invalidUid)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-uid');
expect(validator.isUid).to.have.been.calledOnce.and.calledWith(invalidUid);
});
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.revokeRefreshTokens(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.revokeRefreshTokens(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.revokeRefreshTokens(uid)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve on underlying revokeRefreshTokens request success', () => {
// Stub revokeRefreshTokens to return expected uid.
const revokeRefreshTokensStub =
sinon.stub(testConfig.RequestHandler.prototype, 'revokeRefreshTokens')
.resolves(uid);
stubs.push(revokeRefreshTokensStub);
return auth.revokeRefreshTokens(uid)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(revokeRefreshTokensStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected response returned.
expect(result).to.be.undefined;
});
});
it('should throw when underlying revokeRefreshTokens request returns an error', () => {
// Stub revokeRefreshTokens to throw a backend error.
const revokeRefreshTokensStub =
sinon.stub(testConfig.RequestHandler.prototype, 'revokeRefreshTokens')
.rejects(expectedError);
stubs.push(revokeRefreshTokensStub);
return auth.revokeRefreshTokens(uid)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(revokeRefreshTokensStub).to.have.been.calledOnce.and.calledWith(uid);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('importUsers()', () => {
const users = [
{ uid: '1234', email: '[email protected]', passwordHash: Buffer.from('password') },
{ uid: '5678', phoneNumber: 'invalid' },
];
const options = {
hash: {
algorithm: 'BCRYPT' as any,
},
};
const expectedUserImportResultError =
new FirebaseAuthError(AuthClientErrorCode.INVALID_PHONE_NUMBER);
const expectedOptionsError =
new FirebaseAuthError(AuthClientErrorCode.INVALID_HASH_ALGORITHM);
const expectedServerError =
new FirebaseAuthError(AuthClientErrorCode.INTERNAL_ERROR);
const expectedUserImportResult = {
successCount: 1,
failureCount: 1,
errors: [
{
index: 1,
error: expectedUserImportResultError,
},
],
};
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given an app which returns null access tokens', () => {
return nullAccessTokenAuth.importUsers(users, options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return malformedAccessTokenAuth.importUsers(users, options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return rejectedPromiseAccessTokenAuth.importUsers(users, options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve on underlying uploadAccount request resolution', () => {
// Stub uploadAccount to return expected result.
const uploadAccountStub =
sinon.stub(testConfig.RequestHandler.prototype, 'uploadAccount')
.resolves(expectedUserImportResult);
stubs.push(uploadAccountStub);
return auth.importUsers(users, options)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(uploadAccountStub).to.have.been.calledOnce.and.calledWith(users, options);
// Confirm expected response returned.
expect(result).to.be.equal(expectedUserImportResult);
});
});
it('should reject when underlying uploadAccount request rejects with an error', () => {
// Stub uploadAccount to reject with expected error.
const uploadAccountStub =
sinon.stub(testConfig.RequestHandler.prototype, 'uploadAccount')
.rejects(expectedServerError);
stubs.push(uploadAccountStub);
return auth.importUsers(users, options)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(uploadAccountStub).to.have.been.calledOnce.and.calledWith(users, options);
// Confirm expected error returned.
expect(error).to.equal(expectedServerError);
});
});
it('should throw and fail quickly when underlying uploadAccount throws', () => {
// Stub uploadAccount to throw with expected error.
const uploadAccountStub =
sinon.stub(testConfig.RequestHandler.prototype, 'uploadAccount')
.throws(expectedOptionsError);
stubs.push(uploadAccountStub);
expect(() => {
return auth.importUsers(users, { hash: { algorithm: 'invalid' as any } });
}).to.throw(expectedOptionsError);
});
if (testConfig.Auth === TenantAwareAuth) {
it('should throw and fail quickly when users provided have mismatching tenant IDs', () => {
const usersCopy = deepCopy(users);
// Simulate one user with mismatching tenant ID.
(usersCopy[0] as any).tenantId = 'otherTenantId';
expect(() => {
return auth.importUsers(usersCopy, options);
}).to.throw('UserRecord of index "0" has mismatching tenant ID "otherTenantId"');
});
it('should resolve when users provided have matching tenant IDs', () => {
// Stub uploadAccount to return expected result.
const uploadAccountStub =
sinon.stub(testConfig.RequestHandler.prototype, 'uploadAccount')
.returns(Promise.resolve(expectedUserImportResult));
const usersCopy = deepCopy(users);
usersCopy.forEach((user) => {
(user as any).tenantId = TENANT_ID;
});
stubs.push(uploadAccountStub);
return auth.importUsers(usersCopy, options)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(uploadAccountStub).to.have.been.calledOnce.and.calledWith(usersCopy, options);
// Confirm expected response returned.
expect(result).to.be.equal(expectedUserImportResult);
});
});
}
});
describe('createSessionCookie()', () => {
const tenantId = testConfig.supportsTenantManagement ? undefined : TENANT_ID;
const idToken = 'ID_TOKEN';
const options = { expiresIn: 60 * 60 * 24 * 1000 };
const sessionCookie = 'SESSION_COOKIE';
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_ID_TOKEN);
const expectedUserRecord = getValidUserRecord(getValidGetAccountInfoResponse(tenantId));
// Set auth_time of token to expected user's tokensValidAfterTime.
if (!expectedUserRecord.tokensValidAfterTime) {
throw new Error("getValidUserRecord didn't properly set tokensValidAfterTime.");
}
const validSince = new Date(expectedUserRecord.tokensValidAfterTime);
// Set expected uid to expected user's.
const uid = expectedUserRecord.uid;
// Set expected decoded ID token with expected UID and auth time.
const decodedIdToken = getDecodedIdToken(uid, validSince, tenantId);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
beforeEach(() => {
// If verifyIdToken stubbed, restore it.
if (testConfig.Auth.prototype.verifyIdToken.restore) {
testConfig.Auth.prototype.verifyIdToken.restore();
}
sinon.spy(validator, 'isNonEmptyString');
});
afterEach(() => {
(validator.isNonEmptyString as any).restore();
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no ID token', () => {
return (auth as any).createSessionCookie(undefined, options)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-id-token');
});
it('should be rejected given an invalid ID token', () => {
const invalidIdToken = {} as any;
return auth.createSessionCookie(invalidIdToken, options)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-id-token');
});
});
it('should be rejected given no session duration', () => {
// Simulate auth.verifyIdToken() succeeds if called.
stubs.push(sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.resolve(decodedIdToken)));
return (auth as any).createSessionCookie(idToken, undefined)
.should.eventually.be.rejected.and.have.property(
'code', 'auth/invalid-session-cookie-duration');
});
it('should be rejected given an invalid session duration', () => {
// Invalid object.
const invalidOptions = {} as any;
return auth.createSessionCookie(idToken, invalidOptions)
.should.eventually.be.rejected.and.have.property(
'code', 'auth/invalid-session-cookie-duration');
});
it('should be rejected given out of range session duration', () => {
// Simulate auth.verifyIdToken() succeeds if called.
stubs.push(sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.resolve(decodedIdToken)));
// 1 minute duration.
const invalidOptions = { expiresIn: 60 * 1000 };
return auth.createSessionCookie(idToken, invalidOptions)
.should.eventually.be.rejected.and.have.property(
'code', 'auth/invalid-session-cookie-duration');
});
it('should be rejected given an app which returns null access tokens', () => {
// Simulate auth.verifyIdToken() succeeds if called.
stubs.push(sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.resolve(decodedIdToken)));
return nullAccessTokenAuth.createSessionCookie(idToken, options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
stubs.push(sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.resolve(decodedIdToken)));
return malformedAccessTokenAuth.createSessionCookie(idToken, options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
stubs.push(sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.resolve(decodedIdToken)));
return rejectedPromiseAccessTokenAuth.createSessionCookie(idToken, options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve on underlying createSessionCookie request success', () => {
// Simulate auth.verifyIdToken() succeeds if called.
const verifyIdTokenStub = sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.resolve(decodedIdToken));
// Stub createSessionCookie to return expected sessionCookie.
const createSessionCookieStub =
sinon.stub(testConfig.RequestHandler.prototype, 'createSessionCookie')
.resolves(sessionCookie);
stubs.push(createSessionCookieStub);
return auth.createSessionCookie(idToken, options)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(createSessionCookieStub)
.to.have.been.calledOnce.and.calledWith(idToken, options.expiresIn);
// TenantAwareAuth should verify the ID token first.
if (testConfig.Auth === TenantAwareAuth) {
expect(verifyIdTokenStub)
.to.have.been.calledOnce.and.calledWith(idToken);
} else {
expect(verifyIdTokenStub).to.have.not.been.called;
}
// Confirm expected response returned.
expect(result).to.be.equal(sessionCookie);
});
});
it('should throw when underlying createSessionCookie request returns an error', () => {
// Simulate auth.verifyIdToken() succeeds if called.
stubs.push(sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.resolves(decodedIdToken));
// Stub createSessionCookie to throw a backend error.
const createSessionCookieStub =
sinon.stub(testConfig.RequestHandler.prototype, 'createSessionCookie')
.rejects(expectedError);
stubs.push(createSessionCookieStub);
return auth.createSessionCookie(idToken, options)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(createSessionCookieStub)
.to.have.been.calledOnce.and.calledWith(idToken, options.expiresIn);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
if (testConfig.Auth === TenantAwareAuth) {
it('should be rejected when ID token provided is invalid', () => {
// Simulate auth.verifyIdToken() fails when called.
const verifyIdTokenStub = sinon.stub(testConfig.Auth.prototype, 'verifyIdToken')
.returns(Promise.reject(expectedError));
stubs.push(verifyIdTokenStub);
return auth.createSessionCookie(idToken, options)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(verifyIdTokenStub)
.to.have.been.calledOnce.and.calledWith(idToken);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
}
});
const emailActionFlows: EmailActionTest[] = [
{ api: 'generatePasswordResetLink', requestType: 'PASSWORD_RESET', requiresSettings: false },
{ api: 'generateEmailVerificationLink', requestType: 'VERIFY_EMAIL', requiresSettings: false },
{ api: 'generateSignInWithEmailLink', requestType: 'EMAIL_SIGNIN', requiresSettings: true },
];
emailActionFlows.forEach((emailActionFlow) => {
describe(`${emailActionFlow.api}()`, () => {
const email = '[email protected]';
const actionCodeSettings = {
url: 'https://www.example.com/path/file?a=1&b=2',
handleCodeInApp: true,
iOS: {
bundleId: 'com.example.ios',
},
android: {
packageName: 'com.example.android',
installApp: true,
minimumVersion: '6',
},
dynamicLinkDomain: 'custom.page.link',
};
const expectedLink = 'https://custom.page.link?link=' +
encodeURIComponent('https://projectId.firebaseapp.com/__/auth/action?oobCode=CODE') +
'&apn=com.example.android&ibi=com.example.ios';
const expectedError = new FirebaseAuthError(AuthClientErrorCode.USER_NOT_FOUND);
// Stubs used to simulate underlying api calls.
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no email', () => {
return (auth as any)[emailActionFlow.api](undefined, actionCodeSettings)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-email');
});
it('should be rejected given an invalid email', () => {
return (auth as any)[emailActionFlow.api]('invalid', actionCodeSettings)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-email');
});
it('should be rejected given an invalid ActionCodeSettings object', () => {
return (auth as any)[emailActionFlow.api](email, 'invalid')
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
it('should be rejected given an app which returns null access tokens', () => {
return (nullAccessTokenAuth as any)[emailActionFlow.api](email, actionCodeSettings)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return (malformedAccessTokenAuth as any)[emailActionFlow.api](email, actionCodeSettings)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return (rejectedPromiseAccessTokenAuth as any)[emailActionFlow.api](email, actionCodeSettings)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should resolve when called with actionCodeSettings with a generated link on success', () => {
// Stub getEmailActionLink to return expected link.
const getEmailActionLinkStub = sinon.stub(testConfig.RequestHandler.prototype, 'getEmailActionLink')
.resolves(expectedLink);
stubs.push(getEmailActionLinkStub);
return (auth as any)[emailActionFlow.api](email, actionCodeSettings)
.then((actualLink: string) => {
// Confirm underlying API called with expected parameters.
expect(getEmailActionLinkStub).to.have.been.calledOnce.and.calledWith(
emailActionFlow.requestType, email, actionCodeSettings);
// Confirm expected user record response returned.
expect(actualLink).to.equal(expectedLink);
});
});
if (emailActionFlow.requiresSettings) {
it('should reject when called without actionCodeSettings', () => {
return (auth as any)[emailActionFlow.api](email, undefined)
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
} else {
it('should resolve when called without actionCodeSettings with a generated link on success', () => {
// Stub getEmailActionLink to return expected link.
const getEmailActionLinkStub = sinon.stub(testConfig.RequestHandler.prototype, 'getEmailActionLink')
.resolves(expectedLink);
stubs.push(getEmailActionLinkStub);
return (auth as any)[emailActionFlow.api](email)
.then((actualLink: string) => {
// Confirm underlying API called with expected parameters.
expect(getEmailActionLinkStub).to.have.been.calledOnce.and.calledWith(
emailActionFlow.requestType, email, undefined);
// Confirm expected user record response returned.
expect(actualLink).to.equal(expectedLink);
});
});
}
it('should throw an error when getEmailAction returns an error', () => {
// Stub getEmailActionLink to throw a backend error.
const getEmailActionLinkStub = sinon.stub(testConfig.RequestHandler.prototype, 'getEmailActionLink')
.rejects(expectedError);
stubs.push(getEmailActionLinkStub);
return (auth as any)[emailActionFlow.api](email, actionCodeSettings)
.then(() => {
throw new Error('Unexpected success');
}, (error: any) => {
// Confirm underlying API called with expected parameters.
expect(getEmailActionLinkStub).to.have.been.calledOnce.and.calledWith(
emailActionFlow.requestType, email, actionCodeSettings);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
});
describe('getProviderConfig()', () => {
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no provider ID', () => {
return (auth as any).getProviderConfig()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-provider-id');
});
INVALID_PROVIDER_IDS.forEach((invalidProviderId) => {
it(`should be rejected given an invalid provider ID "${JSON.stringify(invalidProviderId)}"`, () => {
return (auth as Auth).getProviderConfig(invalidProviderId as any)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-provider-id');
});
});
});
it('should be rejected given an app which returns null access tokens', () => {
const providerId = 'oidc.provider';
return (nullAccessTokenAuth as Auth).getProviderConfig(providerId)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
const providerId = 'oidc.provider';
return (malformedAccessTokenAuth as Auth).getProviderConfig(providerId)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
const providerId = 'oidc.provider';
return (rejectedPromiseAccessTokenAuth as Auth).getProviderConfig(providerId)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
describe('using OIDC configurations', () => {
const providerId = 'oidc.provider';
const serverResponse = {
name: `projects/project_id/oauthIdpConfigs/${providerId}`,
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
const expectedConfig = new OIDCConfig(serverResponse);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.CONFIGURATION_NOT_FOUND);
it('should resolve with an OIDCConfig on success', () => {
// Stub getOAuthIdpConfig to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getOAuthIdpConfig')
.resolves(serverResponse);
stubs.push(stub);
return (auth as Auth).getProviderConfig(providerId)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected config returned.
expect(result).to.deep.equal(expectedConfig);
});
});
it('should throw an error when the backend returns an error', () => {
// Stub getOAuthIdpConfig to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getOAuthIdpConfig')
.rejects(expectedError);
stubs.push(stub);
return (auth as Auth).getProviderConfig(providerId)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('using SAML configurations', () => {
const providerId = 'saml.provider';
const serverResponse = {
name: `projects/project_id/inboundSamlConfigs/${providerId}`,
idpConfig: {
idpEntityId: 'IDP_ENTITY_ID',
ssoUrl: 'https://example.com/login',
signRequest: true,
idpCertificates: [
{ x509Certificate: 'CERT1' },
{ x509Certificate: 'CERT2' },
],
},
spConfig: {
spEntityId: 'RP_ENTITY_ID',
callbackUri: 'https://projectId.firebaseapp.com/__/auth/handler',
},
displayName: 'SAML_DISPLAY_NAME',
enabled: true,
};
const expectedConfig = new SAMLConfig(serverResponse);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.CONFIGURATION_NOT_FOUND);
it('should resolve with a SAMLConfig on success', () => {
// Stub getInboundSamlConfig to return expected result.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getInboundSamlConfig')
.resolves(serverResponse);
stubs.push(stub);
return (auth as Auth).getProviderConfig(providerId)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected config returned.
expect(result).to.deep.equal(expectedConfig);
});
});
it('should throw an error when the backend returns an error', () => {
// Stub getInboundSamlConfig to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'getInboundSamlConfig')
.rejects(expectedError);
stubs.push(stub);
return (auth as Auth).getProviderConfig(providerId)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
});
describe('listProviderConfigs()', () => {
const options: AuthProviderConfigFilter = {
type: 'oidc',
};
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no options', () => {
return (auth as any).listProviderConfigs()
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
it('should be rejected given an invalid AuthProviderConfigFilter type', () => {
const invalidOptions = {
type: 'unsupported',
};
return (auth as Auth).listProviderConfigs(invalidOptions as any)
.should.eventually.be.rejected.and.have.property('code', 'auth/argument-error');
});
it('should be rejected given an app which returns null access tokens', () => {
return (nullAccessTokenAuth as Auth).listProviderConfigs(options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return (malformedAccessTokenAuth as Auth).listProviderConfigs(options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return (rejectedPromiseAccessTokenAuth as Auth).listProviderConfigs(options)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
describe('using OIDC type filter', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INTERNAL_ERROR);
const pageToken = 'PAGE_TOKEN';
const maxResults = 50;
const filterOptions: AuthProviderConfigFilter = {
type: 'oidc',
pageToken,
maxResults,
};
const listConfigsResponse: any = {
oauthIdpConfigs : [
getOIDCConfigServerResponse('oidc.provider1'),
getOIDCConfigServerResponse('oidc.provider2'),
],
nextPageToken: 'NEXT_PAGE_TOKEN',
};
const expectedResult: any = {
providerConfigs: [
new OIDCConfig(listConfigsResponse.oauthIdpConfigs[0]),
new OIDCConfig(listConfigsResponse.oauthIdpConfigs[1]),
],
pageToken: 'NEXT_PAGE_TOKEN',
};
const emptyListConfigsResponse: any = {
oauthIdpConfigs: [],
};
const emptyExpectedResult: any = {
providerConfigs: [],
};
it('should resolve on success with configs in response', () => {
// Stub listOAuthIdpConfigs to return expected response.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listOAuthIdpConfigs')
.resolves(listConfigsResponse);
stubs.push(listConfigsStub);
return auth.listProviderConfigs(filterOptions)
.then((response) => {
expect(response).to.deep.equal(expectedResult);
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(maxResults, pageToken);
});
});
it('should resolve on success with default options', () => {
// Stub listOAuthIdpConfigs to return expected response.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listOAuthIdpConfigs')
.resolves(listConfigsResponse);
stubs.push(listConfigsStub);
return (auth as Auth).listProviderConfigs({ type: 'oidc' })
.then((response) => {
expect(response).to.deep.equal(expectedResult);
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(undefined, undefined);
});
});
it('should resolve on success with no configs in response', () => {
// Stub listOAuthIdpConfigs to return expected response.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listOAuthIdpConfigs')
.resolves(emptyListConfigsResponse);
stubs.push(listConfigsStub);
return auth.listProviderConfigs(filterOptions)
.then((response) => {
expect(response).to.deep.equal(emptyExpectedResult);
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(maxResults, pageToken);
});
});
it('should throw an error when listOAuthIdpConfigs returns an error', () => {
// Stub listOAuthIdpConfigs to throw a backend error.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listOAuthIdpConfigs')
.rejects(expectedError);
stubs.push(listConfigsStub);
return auth.listProviderConfigs(filterOptions)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(maxResults, pageToken);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('using SAML type filter', () => {
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INTERNAL_ERROR);
const pageToken = 'PAGE_TOKEN';
const maxResults = 50;
const filterOptions: AuthProviderConfigFilter = {
type: 'saml',
pageToken,
maxResults,
};
const listConfigsResponse: any = {
inboundSamlConfigs : [
getSAMLConfigServerResponse('saml.provider1'),
getSAMLConfigServerResponse('saml.provider2'),
],
nextPageToken: 'NEXT_PAGE_TOKEN',
};
const expectedResult: any = {
providerConfigs: [
new SAMLConfig(listConfigsResponse.inboundSamlConfigs[0]),
new SAMLConfig(listConfigsResponse.inboundSamlConfigs[1]),
],
pageToken: 'NEXT_PAGE_TOKEN',
};
const emptyListConfigsResponse: any = {
inboundSamlConfigs: [],
};
const emptyExpectedResult: any = {
providerConfigs: [],
};
it('should resolve on success with configs in response', () => {
// Stub listInboundSamlConfigs to return expected response.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listInboundSamlConfigs')
.resolves(listConfigsResponse);
stubs.push(listConfigsStub);
return auth.listProviderConfigs(filterOptions)
.then((response) => {
expect(response).to.deep.equal(expectedResult);
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(maxResults, pageToken);
});
});
it('should resolve on success with default options', () => {
// Stub listInboundSamlConfigs to return expected response.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listInboundSamlConfigs')
.resolves(listConfigsResponse);
stubs.push(listConfigsStub);
return (auth as Auth).listProviderConfigs({ type: 'saml' })
.then((response) => {
expect(response).to.deep.equal(expectedResult);
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(undefined, undefined);
});
});
it('should resolve on success with no configs in response', () => {
// Stub listInboundSamlConfigs to return expected response.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listInboundSamlConfigs')
.resolves(emptyListConfigsResponse);
stubs.push(listConfigsStub);
return auth.listProviderConfigs(filterOptions)
.then((response) => {
expect(response).to.deep.equal(emptyExpectedResult);
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(maxResults, pageToken);
});
});
it('should throw an error when listInboundSamlConfigs returns an error', () => {
// Stub listInboundSamlConfigs to throw a backend error.
const listConfigsStub = sinon
.stub(testConfig.RequestHandler.prototype, 'listInboundSamlConfigs')
.rejects(expectedError);
stubs.push(listConfigsStub);
return auth.listProviderConfigs(filterOptions)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(listConfigsStub)
.to.have.been.calledOnce.and.calledWith(maxResults, pageToken);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
});
describe('deleteProviderConfig()', () => {
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no provider ID', () => {
return (auth as any).deleteProviderConfig()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-provider-id');
});
INVALID_PROVIDER_IDS.forEach((invalidProviderId) => {
it(`should be rejected given an invalid provider ID "${JSON.stringify(invalidProviderId)}"`, () => {
return (auth as Auth).deleteProviderConfig(invalidProviderId as any)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-provider-id');
});
});
});
it('should be rejected given an app which returns null access tokens', () => {
const providerId = 'oidc.provider';
return (nullAccessTokenAuth as Auth).deleteProviderConfig(providerId)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
const providerId = 'oidc.provider';
return (malformedAccessTokenAuth as Auth).deleteProviderConfig(providerId)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
const providerId = 'oidc.provider';
return (rejectedPromiseAccessTokenAuth as Auth).deleteProviderConfig(providerId)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
describe('using OIDC configurations', () => {
const providerId = 'oidc.provider';
const expectedError = new FirebaseAuthError(AuthClientErrorCode.CONFIGURATION_NOT_FOUND);
it('should resolve with void on success', () => {
// Stub deleteOAuthIdpConfig to resolve.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteOAuthIdpConfig')
.resolves();
stubs.push(stub);
return (auth as Auth).deleteProviderConfig(providerId)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected result returned.
expect(result).to.be.undefined;
});
});
it('should throw an error when the backend returns an error', () => {
// Stub deleteOAuthIdpConfig to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteOAuthIdpConfig')
.rejects(expectedError);
stubs.push(stub);
return (auth as Auth).deleteProviderConfig(providerId)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('using SAML configurations', () => {
const providerId = 'saml.provider';
const expectedError = new FirebaseAuthError(AuthClientErrorCode.CONFIGURATION_NOT_FOUND);
it('should resolve with void on success', () => {
// Stub deleteInboundSamlConfig to resolve.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteInboundSamlConfig')
.resolves();
stubs.push(stub);
return (auth as Auth).deleteProviderConfig(providerId)
.then((result) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected result returned.
expect(result).to.be.undefined;
});
});
it('should throw an error when the backend returns an error', () => {
// Stub deleteInboundSamlConfig to throw a backend error.
const stub = sinon.stub(testConfig.RequestHandler.prototype, 'deleteInboundSamlConfig')
.rejects(expectedError);
stubs.push(stub);
return (auth as Auth).deleteProviderConfig(providerId)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(stub).to.have.been.calledOnce.and.calledWith(providerId);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
});
describe('updateProviderConfig()', () => {
const oidcConfigOptions = {
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no provider ID', () => {
return (auth as any).updateProviderConfig(undefined, oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-provider-id');
});
INVALID_PROVIDER_IDS.forEach((invalidProviderId) => {
it(`should be rejected given an invalid provider ID "${JSON.stringify(invalidProviderId)}"`, () => {
return (auth as Auth).updateProviderConfig(invalidProviderId as any, oidcConfigOptions)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-provider-id');
});
});
});
it('should be rejected given no options', () => {
const providerId = 'oidc.provider';
return (auth as any).updateProviderConfig(providerId)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error: FirebaseAuthError) => {
expect(error).to.have.property('code', 'auth/invalid-config');
});
});
it('should be rejected given an app which returns null access tokens', () => {
const providerId = 'oidc.provider';
return (nullAccessTokenAuth as Auth).updateProviderConfig(providerId, oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
const providerId = 'oidc.provider';
return (malformedAccessTokenAuth as Auth).updateProviderConfig(providerId, oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
const providerId = 'oidc.provider';
return (rejectedPromiseAccessTokenAuth as Auth).updateProviderConfig(providerId, oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
describe('using OIDC configurations', () => {
const providerId = 'oidc.provider';
const configOptions = {
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
const serverResponse = {
name: `projects/project_id/oauthIdpConfigs/${providerId}`,
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
const expectedConfig = new OIDCConfig(serverResponse);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_CONFIG);
it('should resolve with an OIDCConfig on updateOAuthIdpConfig request success', () => {
// Stub updateOAuthIdpConfig to return expected server response.
const updateConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateOAuthIdpConfig')
.resolves(serverResponse);
stubs.push(updateConfigStub);
return auth.updateProviderConfig(providerId, configOptions)
.then((actualConfig) => {
// Confirm underlying API called with expected parameters.
expect(updateConfigStub).to.have.been.calledOnce.and.calledWith(providerId, configOptions);
// Confirm expected config response returned.
expect(actualConfig).to.deep.equal(expectedConfig);
});
});
it('should throw an error when updateOAuthIdpConfig returns an error', () => {
// Stub updateOAuthIdpConfig to throw a backend error.
const updateConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateOAuthIdpConfig')
.rejects(expectedError);
stubs.push(updateConfigStub);
return auth.updateProviderConfig(providerId, configOptions)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(updateConfigStub).to.have.been.calledOnce.and.calledWith(providerId, configOptions);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('using SAML configurations', () => {
const providerId = 'saml.provider';
const configOptions = {
displayName: 'SAML_DISPLAY_NAME',
enabled: true,
idpEntityId: 'IDP_ENTITY_ID',
ssoURL: 'https://example.com/login',
x509Certificates: ['CERT1', 'CERT2'],
rpEntityId: 'RP_ENTITY_ID',
callbackURL: 'https://projectId.firebaseapp.com/__/auth/handler',
enableRequestSigning: true,
};
const serverResponse = {
name: `projects/project_id/inboundSamlConfigs/${providerId}`,
idpConfig: {
idpEntityId: 'IDP_ENTITY_ID',
ssoUrl: 'https://example.com/login',
signRequest: true,
idpCertificates: [
{ x509Certificate: 'CERT1' },
{ x509Certificate: 'CERT2' },
],
},
spConfig: {
spEntityId: 'RP_ENTITY_ID',
callbackUri: 'https://projectId.firebaseapp.com/__/auth/handler',
},
displayName: 'SAML_DISPLAY_NAME',
enabled: true,
};
const expectedConfig = new SAMLConfig(serverResponse);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_CONFIG);
it('should resolve with a SAMLConfig on updateInboundSamlConfig request success', () => {
// Stub updateInboundSamlConfig to return expected server response.
const updateConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateInboundSamlConfig')
.resolves(serverResponse);
stubs.push(updateConfigStub);
return auth.updateProviderConfig(providerId, configOptions)
.then((actualConfig) => {
// Confirm underlying API called with expected parameters.
expect(updateConfigStub).to.have.been.calledOnce.and.calledWith(providerId, configOptions);
// Confirm expected config response returned.
expect(actualConfig).to.deep.equal(expectedConfig);
});
});
it('should throw an error when updateInboundSamlConfig returns an error', () => {
// Stub updateInboundSamlConfig to throw a backend error.
const updateConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'updateInboundSamlConfig')
.rejects(expectedError);
stubs.push(updateConfigStub);
return auth.updateProviderConfig(providerId, configOptions)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(updateConfigStub).to.have.been.calledOnce.and.calledWith(providerId, configOptions);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
});
describe('createProviderConfig()', () => {
const oidcConfigOptions = {
providerId: 'oidc.provider',
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
let stubs: sinon.SinonStub[] = [];
afterEach(() => {
_.forEach(stubs, (stub) => stub.restore());
stubs = [];
});
it('should be rejected given no configuration options', () => {
return (auth as any).createProviderConfig()
.should.eventually.be.rejected.and.have.property('code', 'auth/invalid-config');
});
it('should be rejected given an invalid provider ID', () => {
const invalidConfigOptions = deepCopy(oidcConfigOptions);
invalidConfigOptions.providerId = 'unsupported';
return (auth as Auth).createProviderConfig(invalidConfigOptions)
.then(() => {
throw new Error('Unexpected success');
})
.catch((error) => {
expect(error).to.have.property('code', 'auth/invalid-provider-id');
});
});
it('should be rejected given an app which returns null access tokens', () => {
return (nullAccessTokenAuth as Auth).createProviderConfig(oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which returns invalid access tokens', () => {
return (malformedAccessTokenAuth as Auth).createProviderConfig(oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
it('should be rejected given an app which fails to generate access tokens', () => {
return (rejectedPromiseAccessTokenAuth as Auth).createProviderConfig(oidcConfigOptions)
.should.eventually.be.rejected.and.have.property('code', 'app/invalid-credential');
});
describe('using OIDC configurations', () => {
const providerId = 'oidc.provider';
const configOptions = {
providerId,
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
const serverResponse = {
name: `projects/project_id/oauthIdpConfigs/${providerId}`,
displayName: 'OIDC_DISPLAY_NAME',
enabled: true,
clientId: 'CLIENT_ID',
issuer: 'https://oidc.com/issuer',
};
const expectedConfig = new OIDCConfig(serverResponse);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_CONFIG);
it('should resolve with an OIDCConfig on createOAuthIdpConfig request success', () => {
// Stub createOAuthIdpConfig to return expected server response.
const createConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'createOAuthIdpConfig')
.resolves(serverResponse);
stubs.push(createConfigStub);
return (auth as Auth).createProviderConfig(configOptions)
.then((actualConfig) => {
// Confirm underlying API called with expected parameters.
expect(createConfigStub).to.have.been.calledOnce.and.calledWith(configOptions);
// Confirm expected config response returned.
expect(actualConfig).to.deep.equal(expectedConfig);
});
});
it('should throw an error when createOAuthIdpConfig returns an error', () => {
// Stub createOAuthIdpConfig to throw a backend error.
const createConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'createOAuthIdpConfig')
.rejects(expectedError);
stubs.push(createConfigStub);
return (auth as Auth).createProviderConfig(configOptions)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(createConfigStub).to.have.been.calledOnce.and.calledWith(configOptions);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
describe('using SAML configurations', () => {
const providerId = 'saml.provider';
const configOptions = {
providerId,
displayName: 'SAML_DISPLAY_NAME',
enabled: true,
idpEntityId: 'IDP_ENTITY_ID',
ssoURL: 'https://example.com/login',
x509Certificates: ['CERT1', 'CERT2'],
rpEntityId: 'RP_ENTITY_ID',
callbackURL: 'https://projectId.firebaseapp.com/__/auth/handler',
enableRequestSigning: true,
};
const serverResponse = {
name: `projects/project_id/inboundSamlConfigs/${providerId}`,
idpConfig: {
idpEntityId: 'IDP_ENTITY_ID',
ssoUrl: 'https://example.com/login',
signRequest: true,
idpCertificates: [
{ x509Certificate: 'CERT1' },
{ x509Certificate: 'CERT2' },
],
},
spConfig: {
spEntityId: 'RP_ENTITY_ID',
callbackUri: 'https://projectId.firebaseapp.com/__/auth/handler',
},
displayName: 'SAML_DISPLAY_NAME',
enabled: true,
};
const expectedConfig = new SAMLConfig(serverResponse);
const expectedError = new FirebaseAuthError(AuthClientErrorCode.INVALID_CONFIG);
it('should resolve with a SAMLConfig on createInboundSamlConfig request success', () => {
// Stub createInboundSamlConfig to return expected server response.
const createConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'createInboundSamlConfig')
.resolves(serverResponse);
stubs.push(createConfigStub);
return (auth as Auth).createProviderConfig(configOptions)
.then((actualConfig) => {
// Confirm underlying API called with expected parameters.
expect(createConfigStub).to.have.been.calledOnce.and.calledWith(configOptions);
// Confirm expected config response returned.
expect(actualConfig).to.deep.equal(expectedConfig);
});
});
it('should throw an error when createInboundSamlConfig returns an error', () => {
// Stub createInboundSamlConfig to throw a backend error.
const createConfigStub = sinon.stub(testConfig.RequestHandler.prototype, 'createInboundSamlConfig')
.rejects(expectedError);
stubs.push(createConfigStub);
return (auth as Auth).createProviderConfig(configOptions)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm underlying API called with expected parameters.
expect(createConfigStub).to.have.been.calledOnce.and.calledWith(configOptions);
// Confirm expected error returned.
expect(error).to.equal(expectedError);
});
});
});
});
describe('auth emulator support', () => {
let mockAuth = testConfig.init(mocks.app());
const userRecord = getValidUserRecord(getValidGetAccountInfoResponse());
const validSince = new Date(userRecord.tokensValidAfterTime!);
const stubs: sinon.SinonStub[] = [];
let clock: sinon.SinonFakeTimers;
beforeEach(() => {
process.env.FIREBASE_AUTH_EMULATOR_HOST = '127.0.0.1:9099';
mockAuth = testConfig.init(mocks.app());
clock = sinon.useFakeTimers(validSince.getTime());
});
afterEach(() => {
_.forEach(stubs, (s) => s.restore());
delete process.env.FIREBASE_AUTH_EMULATOR_HOST;
clock.restore();
});
it('createCustomToken() generates an unsigned token', async () => {
const token = await mockAuth.createCustomToken('uid1');
// Check the decoded token has the right algorithm
const decoded = jwt.decode(token, { complete: true });
expect(decoded).to.have.property('header').that.has.property('alg', 'none');
expect(decoded).to.have.property('payload').that.has.property('uid', 'uid1');
// Make sure this doesn't throw
jwt.verify(token, '', { algorithms: ['none'] });
});
it('verifyIdToken() should reject revoked ID tokens', () => {
const uid = userRecord.uid;
// One second before validSince.
const oneSecBeforeValidSince = Math.floor(validSince.getTime() / 1000 - 1);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(userRecord);
stubs.push(getUserStub);
const unsignedToken = mocks.generateIdToken({
algorithm: 'none',
subject: uid,
}, {
iat: oneSecBeforeValidSince,
auth_time: oneSecBeforeValidSince,
});
// verifyIdToken should force checking revocation in emulator mode,
// even if checkRevoked=false.
return mockAuth.verifyIdToken(unsignedToken, false)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/id-token-revoked');
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
});
});
it('verifySessionCookie() should reject revoked session cookies', () => {
const uid = userRecord.uid;
// One second before validSince.
const oneSecBeforeValidSince = Math.floor(validSince.getTime() / 1000 - 1);
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser')
.resolves(userRecord);
stubs.push(getUserStub);
const unsignedToken = mocks.generateIdToken({
algorithm: 'none',
subject: uid,
issuer: 'https://session.firebase.google.com/' + mocks.projectId,
}, {
iat: oneSecBeforeValidSince,
auth_time: oneSecBeforeValidSince,
});
// verifySessionCookie should force checking revocation in emulator
// mode, even if checkRevoked=false.
return mockAuth.verifySessionCookie(unsignedToken, false)
.then(() => {
throw new Error('Unexpected success');
}, (error) => {
// Confirm expected error returned.
expect(error).to.have.property('code', 'auth/session-cookie-revoked');
// Confirm underlying API called with expected parameters.
expect(getUserStub).to.have.been.calledOnce.and.calledWith(uid);
});
});
it('verifyIdToken() rejects an unsigned token if auth emulator is unreachable', async () => {
const unsignedToken = mocks.generateIdToken({
algorithm: 'none'
});
const errorMessage = 'Error while making request: connect ECONNREFUSED 127.0.0.1. Error code: ECONNREFUSED';
const getUserStub = sinon.stub(testConfig.Auth.prototype, 'getUser').rejects(new Error(errorMessage));
stubs.push(getUserStub);
// Since revocation check is forced on in emulator mode, this will call
// the getUser method and get rejected (instead of succeed locally).
await expect(mockAuth.verifyIdToken(unsignedToken))
.to.be.rejectedWith(errorMessage);
});
});
});
});<|fim▁end|> | });
});
|
<|file_name|>simple.py<|end_file_name|><|fim▁begin|># Copyright (C) 2016 Matt Griswold <[email protected]>
#
# This file is part of bgpfu
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections.abc
import ipaddress
from bgpfu.prefixlist import PrefixListBase
def _try_combine(aggregate, current):
"try combining and replacing the last element on the aggregate list"
if aggregate and aggregate[-1]:
supernet = aggregate[-1].supernet()
if supernet == current.supernet():
aggregate[-1] = supernet
return True
return False
def _do_aggregate(prefixlist):
if len(prefixlist) <= 1:
return prefixlist
prefixlist = sorted(prefixlist)
# TODO check for default and skip it?
aggregate = []
while True:
current = None
for pfx in prefixlist:
if not current:
current = pfx
continue
if current.overlaps(pfx):
continue
# try joining 2
supernet = current.supernet()
if supernet == pfx.supernet():
current = supernet
continue
# nothing to combine, shift
aggregate.append(current)
current = pfx
if current:
if not _try_combine(aggregate, current):
aggregate.append(current)
if len(aggregate) == len(prefixlist):
return aggregate
prefixlist = aggregate
aggregate = []
class SimplePrefixList(PrefixListBase, collections.abc.MutableSequence):
"""
Simple PrefixList implemenatation using collections
*NOTE* loses prefix length info on aggregate
"""
def __init__(self, prefixes=None):
if prefixes:
self._prefixes = list(map(ipaddress.ip_network, list(map(str, prefixes))))
else:
self._prefixes = []
def __getitem__(self, i):
return self._prefixes[i]
def __setitem__(self, i, v):
self._prefixes[i] = self.check_val(v)
def insert(self, i, v):
self._prefixes.insert(i, self.check_val(v))<|fim▁hole|>
def iter_add(self, it):
for v in it:
self._prefixes.append(self.check_val(v))
def __delitem__(self, i):
del self._prefixes[i]
def __len__(self):
return len(self._prefixes)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._prefixes == other._prefixes
raise TypeError("object not PrefixList type")
def __ne__(self, other):
return not self == other
def __str__(self):
return str(self._prefixes)
@property
def ipv4(self):
return [p for p in self._prefixes if p.version == 4]
@property
def ipv6(self):
return [p for p in self._prefixes if p.version == 6]
def str_list(self):
return list(map(str, self._prefixes))
def aggregate(self):
"returns a PrefixList containing the result of aggregating the list"
if len(self._prefixes) == 1:
return self.__class__(self._prefixes)
# v4 = sorted(self._prefixes)
v4 = [p for p in self._prefixes if p.version == 4]
v6 = [p for p in self._prefixes if p.version == 6]
v4 = _do_aggregate(v4)
v6 = _do_aggregate(v6)
return self.__class__(v4 + v6)<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import,
division)
import json
import re
import six
import sys
channel_name_re = re.compile('\A[-a-zA-Z0-9_=@,.;]+\Z')
app_id_re = re.compile('\A[0-9]+\Z')
pusher_url_re = re.compile('\A(http|https)://(.*):(.*)@(.*)/apps/([0-9]+)\Z')
socket_id_re = re.compile('\A\d+\.\d+\Z')
if sys.version_info < (3,):
text = 'a unicode string'
else:
text = 'a string'
def ensure_text(obj, name):
if isinstance(obj, six.text_type):
return obj
if isinstance(obj, six.string_types):
return six.text_type(obj)
raise TypeError("%s should be %s" % (name, text))
<|fim▁hole|> channel = ensure_text(channel, "channel")
if len(channel) > 200:
raise ValueError("Channel too long: %s" % channel)
if not channel_name_re.match(channel):
raise ValueError("Invalid Channel: %s" % channel)
return channel
def validate_socket_id(socket_id):
socket_id = ensure_text(socket_id, "socket_id")
if not socket_id_re.match(socket_id):
raise ValueError("Invalid socket ID: %s" % socket_id)
return socket_id<|fim▁end|> | def validate_channel(channel): |
<|file_name|>contribution-opportunities-backend-api.service.spec.ts<|end_file_name|><|fim▁begin|>// Copyright 2018 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Unit tests for ContributionOpportunitiesBackendApiService.
*/
import { HttpClientTestingModule, HttpTestingController } from
'@angular/common/http/testing';
import { TestBed, fakeAsync, flushMicrotasks } from '@angular/core/testing';
import { ContributionOpportunitiesBackendApiService } from
// eslint-disable-next-line max-len
'pages/contributor-dashboard-page/services/contribution-opportunities-backend-api.service';
import { ExplorationOpportunitySummaryObjectFactory } from
'domain/opportunity/ExplorationOpportunitySummaryObjectFactory';
import { SkillOpportunityObjectFactory } from
'domain/opportunity/SkillOpportunityObjectFactory';
import { UrlInterpolationService } from<|fim▁hole|>describe('Contribution Opportunities backend API service', function() {
let contributionOpportunitiesBackendApiService:
ContributionOpportunitiesBackendApiService = null;
let httpTestingController: HttpTestingController;
let explorationOpportunitySummaryObjectFactory:
ExplorationOpportunitySummaryObjectFactory = null;
let skillOpportunityObjectFactory:
SkillOpportunityObjectFactory = null;
let urlInterpolationService:
UrlInterpolationService = null;
const skillOpportunityResponse = {
opportunities: [{
id: 'skill_id',
skill_description: 'A new skill for question',
topic_name: 'A new topic',
question_count: 30
}],
next_cursor: '6',
more: true
};
const skillOpportunity = {
opportunities: [{
id: 'exp_id',
topic_name: 'Topic',
story_title: 'A new story',
chapter_title: 'Introduction',
content_count: 100,
translation_counts: {
hi: 15
}
}],
next_cursor: '6',
more: true
};
let sampleSkillOpportunitiesResponse = null;
let sampleTranslationOpportunitiesResponse = null;
let sampleVoiceoverOpportunitiesResponse = null;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule]
});
contributionOpportunitiesBackendApiService =
TestBed.get(ContributionOpportunitiesBackendApiService);
explorationOpportunitySummaryObjectFactory =
TestBed.get(ExplorationOpportunitySummaryObjectFactory);
httpTestingController = TestBed.get(HttpTestingController);
skillOpportunityObjectFactory = TestBed.get(SkillOpportunityObjectFactory);
urlInterpolationService = TestBed.get(UrlInterpolationService);
sampleSkillOpportunitiesResponse = [
skillOpportunityObjectFactory.createFromBackendDict(
skillOpportunityResponse.opportunities[0])
];
sampleTranslationOpportunitiesResponse = [
explorationOpportunitySummaryObjectFactory.createFromBackendDict(
skillOpportunity.opportunities[0]
)
];
sampleVoiceoverOpportunitiesResponse = [
explorationOpportunitySummaryObjectFactory.createFromBackendDict(
skillOpportunity.opportunities[0]
)
];
});
afterEach(() => {
httpTestingController.verify();
});
it('should successfully fetch the skill opportunities data',
fakeAsync(() => {
const successHandler = jasmine.createSpy('success');
const failHandler = jasmine.createSpy('fail');
contributionOpportunitiesBackendApiService.fetchSkillOpportunities(
'').then(
successHandler, failHandler
);
const req = httpTestingController.expectOne(
urlInterpolationService.interpolateUrl(
'/opportunitiessummaryhandler/<opportunityType>',
{ opportunityType: 'skill' }
) + '?cursor='
);
expect(req.request.method).toEqual('GET');
req.flush(skillOpportunityResponse);
flushMicrotasks();
expect(successHandler).toHaveBeenCalledWith({
opportunities: sampleSkillOpportunitiesResponse,
nextCursor: skillOpportunityResponse.next_cursor,
more: skillOpportunityResponse.more
});
expect(failHandler).not.toHaveBeenCalled();
})
);
it('should successfully fetch the translation opportunities data',
fakeAsync(() => {
const successHandler = jasmine.createSpy('success');
const failHandler = jasmine.createSpy('fail');
contributionOpportunitiesBackendApiService.fetchTranslationOpportunities(
'hi', '',).then(
successHandler, failHandler
);
const req = httpTestingController.expectOne(
urlInterpolationService.interpolateUrl(
'/opportunitiessummaryhandler/<opportunityType>',
{ opportunityType: 'translation' }
) + '?language_code=hi&cursor='
);
expect(req.request.method).toEqual('GET');
req.flush(skillOpportunity);
flushMicrotasks();
expect(successHandler).toHaveBeenCalledWith({
opportunities: sampleTranslationOpportunitiesResponse,
nextCursor: skillOpportunity.next_cursor,
more: skillOpportunity.more
});
expect(failHandler).not.toHaveBeenCalled();
})
);
it('should successfully fetch the voiceover opportunities data',
fakeAsync(() => {
const successHandler = jasmine.createSpy('success');
const failHandler = jasmine.createSpy('fail');
contributionOpportunitiesBackendApiService.fetchVoiceoverOpportunities(
'hi', '',).then(
successHandler, failHandler
);
const req = httpTestingController.expectOne(
urlInterpolationService.interpolateUrl(
'/opportunitiessummaryhandler/<opportunityType>',
{ opportunityType: 'voiceover' }
) + '?language_code=hi&cursor='
);
expect(req.request.method).toEqual('GET');
req.flush(skillOpportunity);
flushMicrotasks();
expect(successHandler).toHaveBeenCalledWith({
opportunities: sampleVoiceoverOpportunitiesResponse,
nextCursor: skillOpportunity.next_cursor,
more: skillOpportunity.more
});
expect(failHandler).not.toHaveBeenCalled();
})
);
it('should successfully fetch the featured translation languages',
fakeAsync(() => {
const successHandler = jasmine.createSpy('success');
const failHandler = jasmine.createSpy('fail');
const featuredTranslationLanguageObjectFactory = TestBed.get(
FeaturedTranslationLanguageObjectFactory);
contributionOpportunitiesBackendApiService
.fetchFeaturedTranslationLanguages()
.then(successHandler, failHandler);
const req = httpTestingController.expectOne(
'/retrivefeaturedtranslationlanguages'
);
expect(req.request.method).toEqual('GET');
req.flush({
featured_translation_languages:
[{ language_code: 'en', explanation: 'English' }]
});
flushMicrotasks();
expect(successHandler).toHaveBeenCalledWith([
featuredTranslationLanguageObjectFactory.createFromBackendDict(
{ language_code: 'en', explanation: 'English' }
)
]);
expect(failHandler).not.toHaveBeenCalled();
})
);
});<|fim▁end|> | 'domain/utilities/url-interpolation.service';
import { FeaturedTranslationLanguageObjectFactory} from
'domain/opportunity/FeaturedTranslationLanguageObjectFactory';
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from sqlalchemy import create_engine, Column, ForeignKey, Integer, String,\<|fim▁hole|> Boolean, Unicode, Date, DateTime, and_, func
from sqlalchemy.orm import relationship, backref, sessionmaker
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm.exc import NoResultFound
from helpers.utils import convert_to_snake_case, as_client_tz, delta_minutes
from helpers.h_logging import get_logger
from datetime import datetime, timedelta
from config import settings
import urlparse
import pytz
engine = create_engine(URL(**settings.DATABASE))
Session = sessionmaker(bind=engine)
Base = declarative_base()
class BaseMixin(object):
@declared_attr
def __tablename__(cls):
return convert_to_snake_case(cls.__name__)
__mapper_args__ = {'always_refresh': True}
pk = Column(Integer, primary_key=True)
class User(BaseMixin, Base):
vk_id = Column(String, nullable=False, unique=True)
data = relationship("UserData", uselist=False, backref='user')
@property
def url(self):
db_session = Session()
db_session.add(self)
if self.vk_id.isdigit():
user_url = urlparse.urljoin(settings.SOURCE_URL, "id" + self.vk_id)
else:
user_url = urlparse.urljoin(settings.SOURCE_URL, self.vk_id)
db_session.close()
return user_url
@property
def last_visit_text(self):
last_log = self.activity_logs[-1]
if last_log.is_online:
last_seen_line = 'Online'
else:
now = pytz.timezone(settings.CLIENT_TZ).localize(datetime.now())
last_visit_in_client_tz = as_client_tz(last_log.last_visit)
minutes_ago = delta_minutes(now, last_visit_in_client_tz)
delta_days = (now.date() - last_visit_in_client_tz.date()).days
if minutes_ago < 60:
last_seen_line = 'last seen {} minutes ago'.format(minutes_ago)
else:
if delta_days == 0:
strftime_tmpl = 'last seen today at %H:%M'
elif delta_days == 1:
strftime_tmpl = 'last seen yesterday at %H:%M'
else:
strftime_tmpl = 'last seen on %B %d at %H:%M'
last_seen_line = last_visit_in_client_tz.strftime(strftime_tmpl)
if last_log.is_mobile:
last_seen_line += ' [Mobile]'
return last_seen_line
@classmethod
def from_vk_id(cls, vk_id):
user = cls.get_by_vk_id(vk_id)
db_session = Session()
if not user:
get_logger('file').debug(
'User with vk_id={} not found. Creating.'.format(vk_id))
user = cls(vk_id=vk_id)
db_session.add(user)
db_session.commit()
else:
db_session.add(user)
if not user.data:
get_logger('file').debug(
'UserData absent. Creating and committing')
user.data = UserData()
db_session.commit()
db_session.close()
return user
@classmethod
def get_by_vk_id(cls, vk_id):
db_session = Session()
try:
user = db_session.query(cls).filter_by(vk_id=vk_id).one()
get_logger('file').debug(
'User with vk_id={} found and retrieved.'.format(vk_id))
except NoResultFound, e:
user = None
db_session.close()
return user
def activity_for(self, start, end):
db_session = Session()
query = db_session.query(
func.count(UserActivityLog.status).label('status_count'),
UserActivityLog.status
).filter(UserActivityLog.user_pk == self.pk)\
.filter(and_(
UserActivityLog.timestamp >= start,
UserActivityLog.timestamp <= end
))\
.group_by(UserActivityLog.status)\
.order_by('status_count DESC')
return query.all()
def get_name(self):
db_session = Session()
db_session.add(self)
user_name = self.data.name
db_session.close()
return user_name
class UserData(BaseMixin, Base):
user_pk = Column(Integer, ForeignKey('user.pk'))
name = Column(String)
birthday = Column(String)
photo = Column(String)
hometown = Column(String)
site = Column(String)
instagram = Column(String)
facebook = Column(String)
twitter = Column(String)
skype = Column(String)
phone = Column(String)
university = Column(String)
studied_at = Column(String)
wallposts = Column(Integer)
photos = Column(Integer)
videos = Column(Integer)
followers = Column(Integer)
communities = Column(Integer)
noteworthy_pages = Column(Integer)
current_city = Column(String)
info_1 = Column(String)
info_2 = Column(String)
info_3 = Column(String)
@classmethod
def from_dict(cls, data):
inst = cls()
keys = set(data.keys()) & set(cls.__dict__.keys())
for key in keys:
setattr(inst, key, data[key])
return inst
@staticmethod
def get_diff(old, new):
changes = {}
excluded_attrs = ['pk', 'user_pk', '_sa_instance_state']
keys = [k for k in old.__dict__.keys()
if k not in excluded_attrs and "__" not in k]
for k in keys:
old_val = getattr(old, k)
new_val = getattr(new, k)
if old_val != new_val:
changes[k] = {'old': old_val, 'new': new_val}
return changes
class UserActivityLog(BaseMixin, Base):
user_pk = Column(Integer, ForeignKey('user.pk'))
user = relationship("User", backref='activity_logs')
is_online = Column(Boolean, default=True)
is_mobile = Column(Boolean, default=False)
status = Column(String)
updates = Column(String)
last_visit_lt_an_hour_ago = Column(Boolean, default=False)
last_visit = Column(DateTime(timezone=True))
timestamp = Column(DateTime(timezone=True),
default=datetime.now)
@classmethod
def from_dict(cls, data):
inst = cls()
keys = set(data.keys()) & set(cls.__dict__.keys())
for key in keys:
setattr(inst, key, data[key])
return inst
@staticmethod
def get_diff(old, new):
changes = {}
excluded_attrs = ['pk', 'user_pk', 'user', 'timestamp',
'_sa_instance_state']
keys = [k for k in old.__dict__.keys()
if k not in excluded_attrs and "__" not in k]
for k in keys:
old_val = getattr(old, k)
new_val = getattr(new, k)
if old_val != new_val:
changes[k] = {'old': old_val, 'new': new_val}
return changes
Base.metadata.create_all(engine)<|fim▁end|> | |
<|file_name|>issue-3038.rs<|end_file_name|><|fim▁begin|>impl HTMLTableElement {
fn func() {
if number_of_row_elements == 0 {
if let Some(last_tbody) = node
.rev_children()
.filter_map(DomRoot::downcast::<Element>)
.find(|n| {
n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")
})
{
last_tbody
.upcast::<Node>()
.AppendChild(new_row.upcast::<Node>())
.expect("InsertRow failed to append first row.");
}
}
if number_of_row_elements == 0 {
if let Some(last_tbody) = node.find(|n| {
n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")
}) {
last_tbody
.upcast::<Node>()
.AppendChild(new_row.upcast::<Node>())
.expect("InsertRow failed to append first row.");
}
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>app.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
console.log( "this should be the tablesaw object: ", Tablesaw );<|fim▁end|> | import Tablesaw from '../../dist/tablesaw'; |
<|file_name|>handle.rs<|end_file_name|><|fim▁begin|>use std::{
collections::HashMap,
fmt::{Debug, Display},
fs::File,
hash::Hash,
io::Write,
result::Result as StdResult,
sync::{Arc, Mutex},
time::{Duration, Instant},
};
use dashmap::DashMap;
use libp2p::{identity, kad::QueryId, PeerId};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use tokio::sync::mpsc::{error::TrySendError, Receiver, Sender};
use uuid::Uuid;
use crate::{
agent::{AgentId, AgentPolicy},
config::GlobalExecutor,
group::GroupSettings,
group::{Group, GroupPermits},
network::NetworkError,
rpc::{self, Resource, ResourceIdentifier},
Error, Result,
};
/// A handle to a running network connection.
pub struct NetworkHandle<M>
where
M: Serialize + DeserializeOwned,
{
pub stats: NetworkStats,
sender: Sender<HandleCmd<M>>,
local_key: identity::ed25519::Keypair,
pending: Arc<DashMap<OpId, PendingCmd<M>>>,
next_msg_id: usize,
/// a flag attribute to indicate in the background network is alive or not
network_is_dead: bool,
msg_buf: Arc<DashMap<PeerId, Vec<M>>>,
}
impl<M> NetworkHandle<M>
where
M: Serialize + DeserializeOwned + Debug + Send + Sync + 'static,
{
/// Only should be instanced throught the network builder.
pub(crate) fn new(
sender: Sender<HandleCmd<M>>,
rcv: Receiver<StdResult<HandleAnsw<M>, NetworkError>>,
local_key: identity::ed25519::Keypair,
) -> Self {
let pending = Arc::new(DashMap::new());
let msg_buf = Arc::new(DashMap::new());
let net_stats = Arc::new(Mutex::new(Vec::new()));
GlobalExecutor::spawn(Self::process_answer_buffer(
rcv,
pending.clone(),
msg_buf.clone(),
net_stats.clone(),
));
NetworkHandle {
sender,
local_key,
pending,
next_msg_id: 1, // 0 value is reserved
network_is_dead: false,
stats: NetworkStats::new(net_stats),
msg_buf,
}
}
/// Returns whether an asynchrnous operation executed succesfully or not.
/// If the operation still is running it will return a `waiting` error type.
/// Returns the resource identifier in case the operation returns one:
/// - create_group
/// - join_group
/// - register_agent
/// - find_agent.
pub fn op_result(&mut self, op_id: OpId) -> Result<Option<ResourceIdentifier>> {
let answ = match self.pending.get_mut(&op_id) {
None => Err(HandleError::OpNotFound(op_id).into()),
Some(mut pend_cmd) => match pend_cmd.value_mut().answ.as_mut() {
None => return Err(HandleError::AwaitingResponse(op_id).into()),
Some(HandleAnsw::ReqJoinGroupAccepted { group_id, .. }) => {
Ok(Some(ResourceIdentifier::group(&group_id)))
}
Some(HandleAnsw::ReqJoinGroupDenied {
group_id, reason, ..
}) => Err(Error::GroupError {
group_id: *group_id,
reason: reason.clone(),
}),
Some(HandleAnsw::AgentRegistered { rss_key, .. }) => Ok(Some(*rss_key)),
Some(HandleAnsw::AgentFound { rss_key, .. }) => {
log::info!("Found agent {}", rss_key);
Ok(Some(*rss_key))
}
Some(HandleAnsw::HasShutdown { .. })
| Some(HandleAnsw::PropagateGroupChange { .. }) => Ok(None),
Some(HandleAnsw::AwaitingRegistration { .. }) => Ok(None),
Some(HandleAnsw::RcvMsg { .. }) => unreachable!(),
},
};
self.pending.remove(&op_id);
answ
}
/// Blocks the current thread until a resource key is returned from a previous operation.
/// Returns error in case time out is reached (if provided) or the result of the op
/// is an error.
///
/// # Panic
/// Panics if the provided operation does not return a resource key.
pub fn get_resource_key(
&mut self,
op_id: OpId,
time_out: Option<Duration>,
) -> Result<ResourceIdentifier> {
let start_time = Instant::now();
loop {
match time_out {
Some(time_out) => {
let diff = Instant::now() - start_time;
if diff > time_out {
break Err(HandleError::TimeOut(op_id).into());
}
}
None => {}
}
match self.op_result(op_id) {
Err(Error::OpError(HandleError::AwaitingResponse(_))) => {}
Ok(Some(key)) => return Ok(key),
Err(err) => return Err(err),
Ok(None) => panic!("Wrong OpId provided"),
}
}
}
/// Get all the received messages from a given peer.
pub fn received_messages(&mut self, peer: PeerId) -> Vec<M> {
let mut new_msgs = vec![];
if let Some(mut msgs) = self.msg_buf.get_mut(&peer) {
std::mem::swap(&mut *msgs, &mut new_msgs);
}
new_msgs
}
/// Register an agent with this node. From this point on the node will act as the owner of this
/// agent and handle any communication from/to this agent. Operation is asynchronous.
///
/// This will block the handle until the agent has been correctly registered or an error happens.
pub fn register_agent(&mut self, agent: &simag_core::Agent, config: AgentPolicy) -> OpId {
// TODO: An agent can only be registered with a single node. If you try to register the same agent in
// the same network more than once it will be an error.
let agent_id = AgentId::from(agent.id());
let id = self.next_id();
let msg = HandleCmd::RegisterAgent {
op_id: id,
agent_id,
config,
};
let sender = self.sender.clone();
self.pending.insert(
id,
PendingCmd {
cmd: SentCmd::RegisterAgent,
answ: None,
},
);
GlobalExecutor::spawn(async move {
sender.send(msg).await.map_err(|_| ())?;
Ok::<_, ()>(())
});
id
}
/// Find an agent in the network and try to open a connection with it. Operation is asynchronous.
pub fn find_agent<ID: Into<AgentId>>(&mut self, agent_id: ID) -> OpId {
let op_id = self.next_id();
let agent_id = agent_id.into();
self.pending.insert(
op_id,
PendingCmd {
cmd: SentCmd::FindAgent,
answ: None,
},
);
let sender = self.sender.clone();
GlobalExecutor::spawn(async move {
let msg = HandleCmd::ConnectToAgent { agent_id, op_id };
sender.send(msg).await.map_err(|_| ())?;
Ok::<_, ()>(())
});
op_id
}
pub fn send_message_to_agent<ID: Into<AgentId>>(&mut self, agent_id: ID, value: M) -> OpId {
let op_id = self.next_id();
let agent_id = agent_id.into();
self.pending.insert(
op_id,
PendingCmd {
cmd: SentCmd::SentMsg,
answ: None,
},
);
let sender = self.sender.clone();
GlobalExecutor::spawn(async move {
let msg = HandleCmd::<M>::SendMessageToAg {
op_id,
value,
agent_id,
};
sender.send(msg).await.map_err(|_| ())?;
Ok::<_, ()>(())
});
op_id
}
/// Create a resource of the group kind that belongs to the given agent, which will then be the original
/// owner of the group. Operation is asynchronous.
///
/// Returns the network identifier (key) to the group in case the group has not already been
/// registered by an other agent/peer or error otherwise.
pub fn create_group<C, ID>(
&mut self,
group_id: &str,
owners: impl IntoIterator<Item = ID> + Clone,
permits: Option<GroupPermits>,
settings: C,<|fim▁hole|> C: GroupSettings,
{
let op_id = self.next_id();
let (key, mut group) = Resource::group(owners.clone(), group_id, settings);
if let Some(mut permits) = permits {
// ensure that owners are added as readers/writers
owners.into_iter().for_each(|owner| {
permits.write(owner.as_ref());
permits.read(owner.as_ref());
});
group.with_permits(permits);
}
let sender = self.sender.clone();
// FIXME: register op as pending
GlobalExecutor::spawn(async move {
let msg = HandleCmd::RegisterGroup {
op_id,
group_key: key,
group,
};
sender.send(msg).await.map_err(|_| ())?;
Ok::<_, ()>(())
});
op_id
}
/// Request joining to a given group, will return the network identifier of the group in case
/// this agent is allowed to join the group. Operation is asynchronous.
///
/// The petitioner must pass the settings used to evaluate if joining is possible when comparing
/// with the settings set by the owners of the group.
///
/// Optionally request a set of group permits, otherwise read permits will be requested.
/// The agent only will be allowed to join if the permits are legal for this agent.
///
/// ## Arguments
/// - agent_id: identifier of the agent making the request.
pub fn join_group<C, AID>(
&mut self,
group_id: &str,
agent_id: AID,
permits: Option<GroupPermits>,
settings: C,
) -> OpId
where
C: GroupSettings,
AID: Into<AgentId>,
{
let op_id = self.next_id();
let agent_id = agent_id.into();
// unknown owners, this information is to be completed after fecthing the initial info
let (group_key, mut group) = Resource::group(Vec::<String>::new(), group_id, settings);
if let Some(permits) = permits {
group.with_permits(permits);
}
let sender = self.sender.clone();
// FIXME: register op as pending
GlobalExecutor::spawn(async move {
let msg = HandleCmd::ReqJoinGroup {
op_id,
group_key,
agent_id,
group,
};
sender.send(msg).await.map_err(|_| ())?;
Ok::<_, ()>(())
});
op_id
}
/// Operation is asynchronous.
pub fn leave_group(&mut self, group_id: &str, agent_id: &str) -> OpId {
todo!()
}
/// Send a message to the given peer directly. Operation is asynchronous.
pub fn send_message(&mut self, value: M, peer: PeerId) -> OpId {
let op_id = self.next_id();
self.pending.insert(
op_id,
PendingCmd {
cmd: SentCmd::SentMsg,
answ: None,
},
);
let sender = self.sender.clone();
GlobalExecutor::spawn(async move {
let msg = HandleCmd::<M>::SendMessage { op_id, value, peer };
sender.send(msg).await.map_err(|_| ())?;
Ok::<_, ()>(())
});
op_id
}
/// Get this peer id encoded as a Base58 string.
pub fn get_peer_id(&self) -> String {
Self::peer_id_from_ed25519(self.local_key.public()).to_base58()
}
/// Saves this peer secret key to a file in bytes. This file should be kept in a secure location.
pub fn save_secret_key<T: AsRef<std::path::Path>>(&self, path: T) -> Result<()> {
let enconded_key = self.local_key.encode().to_vec();
let mut file = File::create(path.as_ref())
.map_err(|err| crate::Error::from(HandleError::FailedSaving(err)))?;
file.write_all(enconded_key.as_slice())
.map_err(|err| HandleError::FailedSaving(err).into())
}
/// Returns whether the network connection is running or has shutdown.
pub fn running(&mut self) -> bool {
if self.network_is_dead {
return false;
}
self.sender
.try_send(HandleCmd::IsRunning)
.map(|_| true)
.unwrap_or_else(|err| match err {
TrySendError::Full(_) => true,
TrySendError::Closed(_) => {
self.network_is_dead = true;
false
}
})
}
/// Commands the network to shutdown, returns inmediately if the network has shutdown
/// or if it already had disconnected for any reason or blocking otherwise.
///
/// The network may not shutdown inmediately if still is processing any commands.
pub fn shutdown(&mut self) -> Result<bool> {
if self.network_is_dead {
// was previously marked as dead
return Err(HandleError::Disconnected.into());
}
if self.has_shutdown() {
return Ok(true);
}
let msg_id = self.next_id();
let msg = HandleCmd::Shutdown(msg_id);
match self.sender.blocking_send(msg) {
Ok(()) => {}
Err(_err) => {
self.network_is_dead = true;
return Err(HandleError::Disconnected.into());
}
}
Ok(self.has_shutdown())
}
fn has_shutdown(&mut self) -> bool {
let shutdown_cmds: Vec<_> = self
.pending
.iter()
.filter_map(|p| {
if (*p).shutdown() {
Some(*p.key())
} else {
None
}
})
.collect();
let mut has_shutdown = false;
for k in shutdown_cmds {
if let Some((
_,
PendingCmd {
cmd: SentCmd::ShutDown,
answ: Some(HandleAnsw::HasShutdown { answ, .. }),
},
)) = self.pending.remove(&k)
{
if answ {
// one of all the sent shutdown commands was successful, not the conn is dead
self.network_is_dead = true;
has_shutdown = true;
}
}
}
has_shutdown
}
async fn process_answer_buffer(
mut rcv: Receiver<StdResult<HandleAnsw<M>, NetworkError>>,
pending: Arc<DashMap<OpId, PendingCmd<M>>>,
msg_buf: Arc<DashMap<PeerId, Vec<M>>>,
net_stats: Arc<Mutex<Vec<(PeerId, usize)>>>,
) -> Result<()> {
while let Some(answ_res) = rcv.recv().await {
let msg = answ_res?;
match msg {
HandleAnsw::HasShutdown { op_id: id, answ } => {
pending.entry(id).and_modify(|e| {
e.answ = Some(HandleAnsw::HasShutdown { op_id: id, answ });
});
}
HandleAnsw::RcvMsg { peer, msg } => {
log::debug!("Received streaming msg from {}: {:?}", peer, msg);
msg_buf.entry(peer).or_default().push(msg);
let stats = &mut *net_stats.lock().unwrap();
if let Ok(idx) = stats.binary_search_by_key(&peer, |&(p, _)| p) {
stats[idx].1 += 1;
} else {
stats.push((peer, 1));
stats.sort_by_key(|e| e.0);
}
}
HandleAnsw::AgentRegistered { op_id, rss_key } => {
log::info!("Registered {} with {:?}", rss_key, op_id);
pending.entry(op_id).and_modify(|e| {
e.answ = Some(HandleAnsw::AgentRegistered { op_id, rss_key });
});
}
HandleAnsw::PropagateGroupChange { op_id, group_id } => todo!(),
HandleAnsw::ReqJoinGroupAccepted { op_id, group_id } => {
pending.entry(op_id).and_modify(|e| {
e.answ = Some(HandleAnsw::ReqJoinGroupAccepted { op_id, group_id });
});
}
HandleAnsw::ReqJoinGroupDenied {
op_id,
group_id,
reason,
} => {
pending.entry(op_id).and_modify(|e| {
e.answ = Some(HandleAnsw::ReqJoinGroupDenied {
op_id,
group_id,
reason,
});
});
}
HandleAnsw::AgentFound {
op_id,
rss_key: key,
} => {
pending.alter(&op_id, |_, mut e| {
e.answ = Some(HandleAnsw::AgentFound {
op_id,
rss_key: key,
});
e
});
}
HandleAnsw::AwaitingRegistration { .. } => {}
}
}
// all sending halves were closed, meaning that the network connection has been dropped
// communicate back to the main thread
pending.insert(
OpId(usize::MAX),
PendingCmd {
cmd: SentCmd::ShutDown,
answ: Some(HandleAnsw::HasShutdown {
op_id: OpId(usize::MAX),
answ: true,
}),
},
);
Err(HandleError::Disconnected.into())
}
fn next_id(&mut self) -> OpId {
let msg_id = self.next_msg_id;
self.next_msg_id += 1;
OpId(msg_id)
}
fn peer_id_from_ed25519(key: identity::ed25519::PublicKey) -> PeerId {
PeerId::from_public_key(identity::PublicKey::Ed25519(key))
}
}
pub struct NetworkStats {
key_stats: HashMap<ResourceIdentifier, KeyStats>,
msg_stats: Arc<Mutex<Vec<(PeerId, usize)>>>,
}
impl NetworkStats {
fn new(msg_stats: Arc<Mutex<Vec<(PeerId, usize)>>>) -> Self {
NetworkStats {
key_stats: HashMap::new(),
msg_stats,
}
}
pub fn for_key(&self, key: &ResourceIdentifier) -> Option<&KeyStats> {
self.key_stats.get(key)
}
pub fn received_messages(&self) -> Vec<(PeerId, usize)> {
let stats = self.msg_stats.lock().unwrap();
stats.clone()
}
}
#[derive(Default)]
pub struct KeyStats {
pub times_served: usize,
pub times_received: usize,
}
struct PendingCmd<M>
where
M: DeserializeOwned,
{
cmd: SentCmd,
answ: Option<HandleAnsw<M>>,
}
impl<M: DeserializeOwned> PendingCmd<M> {
fn shutdown(&self) -> bool {
match self.cmd {
SentCmd::ShutDown => true,
_ => false,
}
}
}
enum SentCmd {
AddedKey(Vec<u8>),
PullContent(Vec<u8>),
ShutDown,
SentMsg,
RegisterAgent,
FindAgent,
}
#[derive(Debug)]
pub(crate) enum HandleCmd<M> {
/// query the network about current status
IsRunning,
/// put a resource in this node
RegisterGroup {
op_id: OpId,
group_key: ResourceIdentifier,
group: Group,
},
/// issue a shutdown command
Shutdown(OpId),
/// send a serialized message
/// M should be heap allocated ideally to avoid large enum sizes
SendMessage { op_id: OpId, value: M, peer: PeerId },
/// register a peer as the manager of an agent
RegisterAgent {
op_id: OpId,
agent_id: AgentId,
config: AgentPolicy,
},
/// try to open a channel to the specified agent
ConnectToAgent { agent_id: AgentId, op_id: OpId },
/// enroute a message to a given agent
SendMessageToAg {
agent_id: AgentId,
value: M,
op_id: OpId,
},
/// instruct the network handler to send a request
/// for an agent joining a group
ReqJoinGroup {
op_id: OpId,
group_key: ResourceIdentifier,
agent_id: AgentId,
group: Group,
// state: RequestState,
},
/// No manager was found in the local cache. Awaiting peer information for managers
/// so a connection attempt can be initialized.
FindGroupManager {
op_id: OpId,
group_key: ResourceIdentifier,
agent_id: AgentId,
group: Group,
},
/// Awaiting publish confirmation by the kad DHT of a rss.
AwaitingRegistration {
op_id: OpId,
rss_key: ResourceIdentifier,
resource: Resource,
},
}
impl<M: DeserializeOwned> HandleCmd<M> {
fn get_op_id(&self) -> OpId {
use self::HandleCmd::*;
match self {
IsRunning => OpId::IS_RUNNING,
RegisterGroup { op_id, .. } => *op_id,
Shutdown(op_id) => *op_id,
SendMessage { op_id, .. } => *op_id,
RegisterAgent { op_id, .. } => *op_id,
ConnectToAgent { op_id, .. } => *op_id,
SendMessageToAg { op_id, .. } => *op_id,
ReqJoinGroup { op_id, .. } => *op_id,
FindGroupManager { op_id, .. } => *op_id,
AwaitingRegistration { op_id, .. } => *op_id,
}
}
}
impl<M: DeserializeOwned> PartialEq for HandleCmd<M> {
fn eq(&self, other: &Self) -> bool {
self.get_op_id() == other.get_op_id()
}
}
impl<M: DeserializeOwned> Eq for HandleCmd<M> {}
impl<M: DeserializeOwned> PartialOrd for HandleCmd<M> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.get_op_id().partial_cmp(&other.get_op_id())
}
}
impl<M: DeserializeOwned> Ord for HandleCmd<M> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.get_op_id().cmp(&other.get_op_id())
}
}
/// The answer of an operation.
#[derive(Clone, Debug)]
pub(crate) enum HandleAnsw<M>
where
M: DeserializeOwned,
{
AgentRegistered {
op_id: OpId,
rss_key: ResourceIdentifier,
},
HasShutdown {
op_id: OpId,
answ: bool,
},
/// Propagate a change in the configuration or composition of a group.
PropagateGroupChange {
op_id: OpId,
group_id: Uuid,
},
ReqJoinGroupAccepted {
op_id: OpId,
group_id: Uuid,
},
ReqJoinGroupDenied {
op_id: OpId,
group_id: Uuid,
reason: crate::group::GroupError,
},
/// The operation id of received messages is always considered OpId::RCV_MSG.
RcvMsg {
peer: PeerId,
msg: M,
},
AgentFound {
op_id: OpId,
rss_key: ResourceIdentifier,
},
AwaitingRegistration {
op_id: OpId,
qid: QueryId,
},
}
impl<M: DeserializeOwned> HandleAnsw<M> {
fn get_op_id(&self) -> OpId {
use self::HandleAnsw::*;
match self {
AgentRegistered { op_id, .. } => *op_id,
HasShutdown { op_id, .. } => *op_id,
PropagateGroupChange { op_id, .. } => *op_id,
ReqJoinGroupAccepted { op_id, .. } => *op_id,
ReqJoinGroupDenied { op_id, .. } => *op_id,
RcvMsg { .. } => OpId::RCV_MSG,
AgentFound { op_id, .. } => *op_id,
AwaitingRegistration { op_id, .. } => *op_id,
}
}
}
impl<M: DeserializeOwned> PartialEq for HandleAnsw<M> {
fn eq(&self, other: &Self) -> bool {
self.get_op_id() == other.get_op_id()
}
}
impl<M: DeserializeOwned> Eq for HandleAnsw<M> {}
impl<M: DeserializeOwned> PartialOrd for HandleAnsw<M> {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.get_op_id().partial_cmp(&other.get_op_id())
}
}
impl<M: DeserializeOwned> Ord for HandleAnsw<M> {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.get_op_id().cmp(&other.get_op_id())
}
}
#[derive(thiserror::Error, Debug)]
pub enum HandleError {
#[error("awaiting a response for op `{0}`")]
AwaitingResponse(OpId),
#[error("unexpected disconnect")]
Disconnected,
#[error("failed saving secret key")]
FailedSaving(#[from] std::io::Error),
#[error("handle not available")]
HandleNotResponding,
#[error("irrecoverable error in the network")]
IrrecoverableError,
#[error("network op execution failed")]
OpFailed,
#[error("operation `{0}` not found")]
OpNotFound(OpId),
#[error("manager not found for op `{0}`")]
ManagerNotFound(OpId),
#[error("unexpected response")]
UnexpectedResponse(OpId),
#[error("op `{0}` timed out")]
TimeOut(OpId),
}
/// An identifier for an operation executed asynchronously. Used to fetch the results
/// of such operation in an asynchronous fashion.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct OpId(usize);
impl OpId {
const RCV_MSG: OpId = OpId(0);
const IS_RUNNING: OpId = OpId(0);
}
impl Display for OpId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)?;
Ok(())
}
}<|fim▁end|> | ) -> OpId
where
ID: AsRef<str>, |
<|file_name|>gen_table.rs<|end_file_name|><|fim▁begin|>/*
* This file is part of the uutils coreutils package.
*
* (c) Arcterus <[email protected]>
* (c) Michael Gehring <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
use std::io;
static CRC_TABLE_LEN: uint = 256;
<|fim▁hole|> for num in range(0, CRC_TABLE_LEN) {
table.push(crc_entry(num as u8) as u32);
}
let mut file = io::File::open_mode(&Path::new("crc_table.rs"), io::Truncate, io::Write).unwrap();
let output = format!("/* auto-generated (DO NOT EDIT) */
pub static CRC_TABLE: [u32, ..{}] = {};", CRC_TABLE_LEN, table);
file.write_line(output.as_slice()).unwrap();
}
#[inline]
fn crc_entry(input: u8) -> u32 {
let mut crc = input as u32 << 24;
for _ in range(0u, 8) {
if crc & 0x80000000 != 0 {
crc <<= 1;
crc ^= 0x04c11db7;
} else {
crc <<= 1;
}
}
crc
}<|fim▁end|> | fn main() {
let mut table = Vec::with_capacity(CRC_TABLE_LEN); |
<|file_name|>common_words.py<|end_file_name|><|fim▁begin|>#[email protected]
def common_words(first, second):
dd = set()
for s in first.split(","): dd.add(s)
return ",".join(sorted([w for w in second.split(",") if w in dd]))
<|fim▁hole|> assert common_words("one,two,three", "four,five,six") == "", "Too different"
assert common_words("one,two,three", "four,five,one,two,six,three") == "one,three,two", "1 2 3"
print("Coding complete? Click 'Check' to review your tests and earn cool rewards!")<|fim▁end|> |
if __name__ == '__main__':
# These "asserts" using only for self-checking and not necessary for auto-testing
assert common_words("hello,world", "hello,earth") == "hello", "Hello" |
<|file_name|>platform.js<|end_file_name|><|fim▁begin|>/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*<|fim▁hole|> * software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
module.exports = {
id: 'windows',
bootstrap:function() {
var cordova = require('cordova'),
exec = require('cordova/exec'),
channel = cordova.require('cordova/channel'),
modulemapper = require('cordova/modulemapper');
modulemapper.clobbers('cordova/exec/proxy', 'cordova.commandProxy');
channel.onNativeReady.fire();
var onWinJSReady = function () {
var app = WinJS.Application;
var checkpointHandler = function checkpointHandler() {
cordova.fireDocumentEvent('pause',null,true);
};
var resumingHandler = function resumingHandler() {
cordova.fireDocumentEvent('resume',null,true);
};
app.addEventListener("checkpoint", checkpointHandler);
Windows.UI.WebUI.WebUIApplication.addEventListener("resuming", resumingHandler, false);
app.start();
};
if (!window.WinJS) {
var scriptElem = document.createElement("script");
if (navigator.appVersion.indexOf('MSAppHost/3.0') !== -1) {
// Windows 10 UWP
scriptElem.src = '/WinJS/js/base.js';
} else if (navigator.appVersion.indexOf("Windows Phone 8.1;") !== -1) {
// windows phone 8.1 + Mobile IE 11
scriptElem.src = "//Microsoft.Phone.WinJS.2.1/js/base.js";
} else if (navigator.appVersion.indexOf("MSAppHost/2.0;") !== -1) {
// windows 8.1 + IE 11
scriptElem.src = "//Microsoft.WinJS.2.0/js/base.js";
}
scriptElem.addEventListener("load", onWinJSReady);
document.head.appendChild(scriptElem);
}
else {
onWinJSReady();
}
}
};<|fim▁end|> | * Unless required by applicable law or agreed to in writing, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from django.utils.datastructures import SortedDict
from bencode import bencode, bdecode
def sort_dict(D):
result = SortedDict()<|fim▁hole|> D[key] = sort_dict(D[key])
result[key] = D[key]
return result<|fim▁end|> | for key in sorted(D.keys()):
if type(D[key]) is dict: |
<|file_name|>kleroterion.js<|end_file_name|><|fim▁begin|>var Court = artifacts.require("./Court.sol");
<|fim▁hole|>})<|fim▁end|> | contract('Court', (accounts) => {
it("test kleroterion", () => {}) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
class Company(object):
def __init__(self, name=None, code=None, phone=None, digit=None):
# Company's name
self.name = name
# Codename
self.code = code
# The digit of the invoice number<|fim▁hole|> self.digit = digit
# Phone number of the service center
self.phone = phone
def __repr__(self):
return '[%s] %s (%s)' % (
self.code,
self.name,
self.phone
)
class Track(object):
def __init__(self, time=None, location=None, status=None,
phone1=None, phone2=None):
# Time
self.time = time
# Location
self.location = location
# Status
self.status = status
# Phone number 1
self.phone1 = phone1
# Phone number 2
self.phone2 = phone2
def __repr__(self):
return '[%s] %s - %s / %s / %s' % (
self.time,
self.status,
self.location,
self.phone1,
self.phone2
)
class Tracker(object):
def __init__(self):
self._tracks = []
@property
def tracks(self):
return self._tracks
def add_track(self, new_track):
if not isinstance(new_track, Track):
raise TypeError('The new_track must be Track!')
self._tracks.append(new_track)
def track_by_status(self, status):
"""
Find the tracking information matching the status
:param str status: The status to find the tracking information
:return: The tracking information matching the status
"""
tracks = list(filter(lambda x: x.status == status, self._tracks))
if len(tracks) > 0:
return tracks[-1]
raise LookupError("Can't find the track by status %s" % status)
def __iter__(self):
return iter(self._tracks)
class Parcel(object):
def __init__(self, sender=None, receiver=None, invoice_number=None,
address=None, note=None):
# The sender's name
self.sender = sender
# The receiver's name
self.receiver = receiver
# Invoice number
self.invoice_number = invoice_number
# The receiver's address
self.address = address
# Note for the parcel
self.note = note
def __repr__(self):
return '[%s] From: %s, To: %s, %s' % (
self.invoice_number,
self.sender,
self.receiver,
self.note
)<|fim▁end|> | if digit is None:
digit = [] |
<|file_name|>test_smooth_l1_loss_op.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>
import unittest
import numpy as np
from op_test import OpTest
def smooth_l1_loss_forward(val, sigma2):
abs_val = abs(val)
if abs_val < 1.0 / sigma2:
return 0.5 * val * val * sigma2
else:
return abs_val - 0.5 / sigma2
class TestSmoothL1LossOp1(OpTest):
def setUp(self):
self.op_type = "smooth_l1_loss"
dims = (5, 10)
self.inputs = {
'X': np.random.random(dims).astype("float32"),
'Y': np.random.random(dims).astype("float32")
}
sigma = 3.0
self.attrs = {'sigma': sigma}
sigma2 = sigma * sigma
diff = self.inputs['X'] - self.inputs['Y']
loss = np.vectorize(smooth_l1_loss_forward)(diff, sigma2).sum(1)
loss = loss.reshape((dims[0], 1))
self.outputs = {
'Diff': diff.astype('float32'),
'Out': loss.astype('float32')
}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.02)
def test_check_grad_ingore_x(self):
self.check_grad(
['Y'], 'Out', max_relative_error=0.03, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
self.check_grad(
['X'], 'Out', max_relative_error=0.03, no_grad_set=set('Y'))
class TestSmoothL1LossOp2(OpTest):
def setUp(self):
self.op_type = "smooth_l1_loss"
dims = (5, 10)
self.inputs = {
'X': np.random.random(dims).astype("float32"),
'Y': np.random.random(dims).astype("float32"),
'InsideWeight': np.random.random(dims).astype("float32"),
'OutsideWeight': np.random.random(dims).astype("float32")
}
sigma = 3.0
self.attrs = {'sigma': sigma}
sigma2 = sigma * sigma
diff = self.inputs['X'] - self.inputs['Y']
diff = diff * self.inputs['InsideWeight']
loss = np.vectorize(smooth_l1_loss_forward)(diff, sigma2)
loss = loss * self.inputs['OutsideWeight']
loss = loss.sum(1).reshape((dims[0], 1))
self.outputs = {
'Diff': diff.astype('float32'),
'Out': loss.astype('float32')
}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.03)
def test_check_grad_ingore_x(self):
self.check_grad(
['Y'],
'Out',
max_relative_error=0.03,
no_grad_set=set(['X', 'InsideWeight', 'OutsideWeight']))
def test_check_grad_ingore_y(self):
self.check_grad(
['X'],
'Out',
max_relative_error=0.03,
no_grad_set=set(['Y', 'InsideWeight', 'OutsideWeight']))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>DEPS = [
'depot_tools',
'gclient',
'gerrit',
'gitiles',
'recipe_engine/buildbucket',
'recipe_engine/context',
'recipe_engine/commit_position',
'recipe_engine/cq',
'recipe_engine/json',
'recipe_engine/path',<|fim▁hole|> 'recipe_engine/runtime',
'recipe_engine/source_manifest',
'recipe_engine/step',
'tryserver',
]
from recipe_engine.recipe_api import Property
from recipe_engine.config import ConfigGroup, Single
PROPERTIES = {
# Gerrit patches will have all properties about them prefixed with patch_.
'deps_revision_overrides': Property(default={}),
'fail_patch': Property(default=None, kind=str),
'$depot_tools/bot_update': Property(
help='Properties specific to bot_update module.',
param_name='properties',
kind=ConfigGroup(
# Whether we should do the patching in gclient instead of bot_update
apply_patch_on_gclient=Single(bool),
),
default={},
),
}<|fim▁end|> | 'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io', |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
from __future__ import unicode_literals
"""
LANG_INFO is a dictionary structure to provide meta information about languages.
About name_local: capitalize it as if your language name was appearing
inside a sentence in your language.
The 'fallback' key can be used to specify a special fallback logic which doesn't
follow the traditional 'fr-ca' -> 'fr' fallback logic.
"""
LANG_INFO = {
'af': {
'bidi': False,
'code': 'af',
'name': 'Afrikaans',
'name_local': 'Afrikaans',
},
'ar': {
'bidi': True,
'code': 'ar',
'name': 'Arabic',
'name_local': 'العربيّة',
},
'ast': {
'bidi': False,
'code': 'ast',
'name': 'Asturian',
'name_local': 'asturianu',
},
'az': {
'bidi': True,
'code': 'az',
'name': 'Azerbaijani',
'name_local': 'Azərbaycanca',
},
'be': {
'bidi': False,
'code': 'be',
'name': 'Belarusian',
'name_local': 'беларуская',
},
'bg': {
'bidi': False,
'code': 'bg',
'name': 'Bulgarian',
'name_local': 'български',
},
'bn': {
'bidi': False,
'code': 'bn',
'name': 'Bengali',
'name_local': 'বাংলা',
},
'br': {
'bidi': False,
'code': 'br',
'name': 'Breton',
'name_local': 'brezhoneg',
},
'bs': {
'bidi': False,
'code': 'bs',
'name': 'Bosnian',
'name_local': 'bosanski',
},
'ca': {
'bidi': False,
'code': 'ca',
'name': 'Catalan',
'name_local': 'català',
},
'cs': {
'bidi': False,
'code': 'cs',
'name': 'Czech',
'name_local': 'česky',
},
'cy': {
'bidi': False,
'code': 'cy',
'name': 'Welsh',
'name_local': 'Cymraeg',
},
'da': {
'bidi': False,
'code': 'da',
'name': 'Danish',
'name_local': 'dansk',
},
'de': {
'bidi': False,
'code': 'de',
'name': 'German',
'name_local': 'Deutsch',
},
'el': {
'bidi': False,
'code': 'el',
'name': 'Greek',
'name_local': 'Ελληνικά',
},
'en': {
'bidi': False,
'code': 'en',
'name': 'English',
'name_local': 'English',
},
'en-au': {
'bidi': False,
'code': 'en-au',
'name': 'Australian English',
'name_local': 'Australian English',
},
'en-gb': {
'bidi': False,
'code': 'en-gb',
'name': 'British English',
'name_local': 'British English',
},
'eo': {
'bidi': False,
'code': 'eo',
'name': 'Esperanto',
'name_local': 'Esperanto',
},
'es': {
'bidi': False,
'code': 'es',
'name': 'Spanish',
'name_local': 'español',
},
'es-ar': {
'bidi': False,
'code': 'es-ar',
'name': 'Argentinian Spanish',
'name_local': 'español de Argentina',
},
'es-co': {
'bidi': False,
'code': 'es-co',
'name': 'Colombian Spanish',
'name_local': 'español de Colombia',
},
'es-mx': {
'bidi': False,
'code': 'es-mx',
'name': 'Mexican Spanish',
'name_local': 'español de Mexico',
},
'es-ni': {
'bidi': False,
'code': 'es-ni',
'name': 'Nicaraguan Spanish',
'name_local': 'español de Nicaragua',
},
'es-ve': {
'bidi': False,
'code': 'es-ve',
'name': 'Venezuelan Spanish',
'name_local': 'español de Venezuela',
},
'et': {
'bidi': False,
'code': 'et',
'name': 'Estonian',
'name_local': 'eesti',
},
'eu': {
'bidi': False,
'code': 'eu',
'name': 'Basque',
'name_local': 'Basque',
},
'fa': {
'bidi': True,
'code': 'fa',
'name': 'Persian',
'name_local': 'فارسی',
},
'fi': {
'bidi': False,
'code': 'fi',
'name': 'Finnish',
'name_local': 'suomi',
},
'fr': {
'bidi': False,
'code': 'fr',
'name': 'French',
'name_local': 'français',
},
'fy': {
'bidi': False,
'code': 'fy',
'name': 'Frisian',
'name_local': 'frysk',
},
'ga': {
'bidi': False,
'code': 'ga',
'name': 'Irish',
'name_local': 'Gaeilge',
},
'gd': {
'bidi': False,
'code': 'gd',
'name': 'Scottish Gaelic',
'name_local': 'Gàidhlig',
},
'gl': {
'bidi': False,
'code': 'gl',
'name': 'Galician',
'name_local': 'galego',
},
'he': {
'bidi': True,
'code': 'he',
'name': 'Hebrew',
'name_local': 'עברית',
},
'hi': {
'bidi': False,
'code': 'hi',
'name': 'Hindi',
'name_local': 'Hindi',
},
'hr': {
'bidi': False,
'code': 'hr',
'name': 'Croatian',
'name_local': 'Hrvatski',
},
'hu': {
'bidi': False,
'code': 'hu',
'name': 'Hungarian',
'name_local': 'Magyar',
},
'ia': {
'bidi': False,
'code': 'ia',
'name': 'Interlingua',
'name_local': 'Interlingua',
},
'io': {
'bidi': False,
'code': 'io',
'name': 'Ido',
'name_local': 'ido',
},
'id': {
'bidi': False,
'code': 'id',
'name': 'Indonesian',
'name_local': 'Bahasa Indonesia',
},
'is': {
'bidi': False,
'code': 'is',
'name': 'Icelandic',
'name_local': 'Íslenska',
},
'it': {
'bidi': False,
'code': 'it',
'name': 'Italian',
'name_local': 'italiano',
},
'ja': {
'bidi': False,
'code': 'ja',
'name': 'Japanese',
'name_local': '日本語',
},
'ka': {
'bidi': False,
'code': 'ka',
'name': 'Georgian',
'name_local': 'ქართული',
},
'kk': {
'bidi': False,
'code': 'kk',
'name': 'Kazakh',
'name_local': 'Қазақ',
},
'km': {
'bidi': False,
'code': 'km',
'name': 'Khmer',
'name_local': 'Khmer',
},
'kn': {
'bidi': False,
'code': 'kn',
'name': 'Kannada',
'name_local': 'Kannada',
},
'ko': {
'bidi': False,
'code': 'ko',
'name': 'Korean',
'name_local': '한국어',
},
'lb': {
'bidi': False,
'code': 'lb',
'name': 'Luxembourgish',
'name_local': 'Lëtzebuergesch',
},
'lt': {
'bidi': False,
'code': 'lt',
'name': 'Lithuanian',
'name_local': 'Lietuviškai',
},
'lv': {
'bidi': False,
'code': 'lv',
'name': 'Latvian',
'name_local': 'latviešu',
},
'mk': {
'bidi': False,
'code': 'mk',
'name': 'Macedonian',
'name_local': 'Македонски',
},
'ml': {
'bidi': False,
'code': 'ml',
'name': 'Malayalam',
'name_local': 'Malayalam',
},
'mn': {
'bidi': False,
'code': 'mn',
'name': 'Mongolian',
'name_local': 'Mongolian',
},
'mr': {
'bidi': False,
'code': 'mr',
'name': 'Marathi',
'name_local': 'मराठी',
},
'my': {
'bidi': False,
'code': 'my',
'name': 'Burmese',
'name_local': 'မြန်မာဘာသာ',
},
'nb': {
'bidi': False,
'code': 'nb',
'name': 'Norwegian Bokmal',
'name_local': 'norsk (bokmål)',
},
'ne': {
'bidi': False,
'code': 'ne',
'name': 'Nepali',
'name_local': 'नेपाली',
},
'nl': {
'bidi': False,
'code': 'nl',
'name': 'Dutch',
'name_local': 'Nederlands',
},
'nn': {
'bidi': False,
'code': 'nn',
'name': 'Norwegian Nynorsk',
'name_local': 'norsk (nynorsk)',
},
'no': {
'bidi': False,
'code': 'no',
'name': 'Norwegian',
'name_local': 'norsk',
},
'os': {
'bidi': False,
'code': 'os',
'name': 'Ossetic',
'name_local': 'Ирон',
},
'pa': {
'bidi': False,
'code': 'pa',
'name': 'Punjabi',
'name_local': 'Punjabi',
},
'pl': {
'bidi': False,
'code': 'pl',
'name': 'Polish',
'name_local': 'polski',
},
'pt': {
'bidi': False,
'code': 'pt',
'name': 'Portuguese',
'name_local': 'Português',
},
'pt-br': {
'bidi': False,
'code': 'pt-br',
'name': 'Brazilian Portuguese',
'name_local': 'Português Brasileiro',
},
'ro': {
'bidi': False,
'code': 'ro',
'name': 'Romanian',
'name_local': 'Română',
},
'ru': {
'bidi': False,
'code': 'ru',
'name': 'Russian',
'name_local': 'Русский',
},
'sk': {
'bidi': False,
'code': 'sk',
'name': 'Slovak',
'name_local': 'Slovensky',
},
'sl': {
'bidi': False,
'code': 'sl',
'name': 'Slovenian',
'name_local': 'Slovenščina',
},
'sq': {
'bidi': False,
'code': 'sq',
'name': 'Albanian',
'name_local': 'shqip',
},
'sr': {
'bidi': False,
'code': 'sr',
'name': 'Serbian',
'name_local': 'српски',
},
'sr-latn': {
'bidi': False,
'code': 'sr-latn',
<|fim▁hole|> 'bidi': False,
'code': 'sv',
'name': 'Swedish',
'name_local': 'svenska',
},
'sw': {
'bidi': False,
'code': 'sw',
'name': 'Swahili',
'name_local': 'Kiswahili',
},
'ta': {
'bidi': False,
'code': 'ta',
'name': 'Tamil',
'name_local': 'தமிழ்',
},
'te': {
'bidi': False,
'code': 'te',
'name': 'Telugu',
'name_local': 'తెలుగు',
},
'th': {
'bidi': False,
'code': 'th',
'name': 'Thai',
'name_local': 'ภาษาไทย',
},
'tr': {
'bidi': False,
'code': 'tr',
'name': 'Turkish',
'name_local': 'Türkçe',
},
'tt': {
'bidi': False,
'code': 'tt',
'name': 'Tatar',
'name_local': 'Татарча',
},
'udm': {
'bidi': False,
'code': 'udm',
'name': 'Udmurt',
'name_local': 'Удмурт',
},
'uk': {
'bidi': False,
'code': 'uk',
'name': 'Ukrainian',
'name_local': 'Українська',
},
'ur': {
'bidi': True,
'code': 'ur',
'name': 'Urdu',
'name_local': 'اردو',
},
'vi': {
'bidi': False,
'code': 'vi',
'name': 'Vietnamese',
'name_local': 'Tiếng Việt',
},
'zh-cn': {
'fallback': ['zh-hans'],
},
'zh-hans': {
'bidi': False,
'code': 'zh-hans',
'name': 'Simplified Chinese',
'name_local': '简体中文',
},
'zh-hant': {
'bidi': False,
'code': 'zh-hant',
'name': 'Traditional Chinese',
'name_local': '繁體中文',
},
'zh-hk': {
'fallback': ['zh-hant'],
},
'zh-mo': {
'fallback': ['zh-hant'],
},
'zh-my': {
'fallback': ['zh-hans'],
},
'zh-sg': {
'fallback': ['zh-hans'],
},
'zh-tw': {
'fallback': ['zh-hant'],
},
}<|fim▁end|> | 'name': 'Serbian Latin',
'name_local': 'srpski (latinica)',
},
'sv': {
|
<|file_name|>home_action.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines an action for moving the workspace to the user's home directory.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from os.path import expanduser
from enthought.traits.api import Bool, Instance
from enthought.pyface.api import ImageResource
from enthought.pyface.action.api import Action
from enthought.envisage.ui.workbench.workbench_window import WorkbenchWindow
from puddle.resource.resource_view import RESOURCE_VIEW
from common import IMAGE_LOCATION
#------------------------------------------------------------------------------
# "HomeAction" class:
#------------------------------------------------------------------------------
<|fim▁hole|> """
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# A longer description of the action:
description = "Move workspace to the user's home directory"
# The action"s name (displayed on menus/tool bar tools etc):
name = "&Home"
# A short description of the action used for tooltip text etc:
tooltip = "Open home directory"
# Keyboard accelerator:
accelerator = "Alt+Home"
# The action's image (displayed on tool bar tools etc):
image = ImageResource("home_folder", search_path=[IMAGE_LOCATION])
#--------------------------------------------------------------------------
# "UpAction" interface:
#--------------------------------------------------------------------------
window = Instance(WorkbenchWindow)
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
# Note that we always offer the service via its name, but look it up
# via the actual protocol.
from puddle.resource.i_workspace import IWorkspace
workspace = self.window.application.get_service(IWorkspace)
workspace.path = expanduser("~")
view = self.window.get_view_by_id(RESOURCE_VIEW)
if view is not None:
view.tree_viewer.refresh(workspace)
# EOF -------------------------------------------------------------------------<|fim▁end|> | class HomeAction(Action):
""" An action for moving the workspace to the user's home directory. |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""Define and instantiate the configuration class for Robottelo."""
import logging
import os
import sys
from logging import config
from nailgun import entities, entity_mixins
from nailgun.config import ServerConfig
from robottelo.config import casts
from six.moves.urllib.parse import urlunsplit, urljoin
from six.moves.configparser import (
NoOptionError,
NoSectionError,
ConfigParser
)
LOGGER = logging.getLogger(__name__)
SETTINGS_FILE_NAME = 'robottelo.properties'
class ImproperlyConfigured(Exception):
"""Indicates that Robottelo somehow is improperly configured.
For example, if settings file can not be found or some required
configuration is not defined.
"""
def get_project_root():
"""Return the path to the Robottelo project root directory.
:return: A directory path.
:rtype: str
"""
return os.path.realpath(os.path.join(
os.path.dirname(__file__),
os.pardir,
os.pardir,
))
class INIReader(object):
"""ConfigParser wrapper able to cast value when reading INI options."""
# Helper casters
cast_boolean = casts.Boolean()
cast_dict = casts.Dict()
cast_list = casts.List()
cast_logging_level = casts.LoggingLevel()
cast_tuple = casts.Tuple()
cast_webdriver_desired_capabilities = casts.WebdriverDesiredCapabilities()
def __init__(self, path):
self.config_parser = ConfigParser()
with open(path) as handler:
self.config_parser.readfp(handler)
if sys.version_info[0] < 3:
# ConfigParser.readfp is deprecated on Python3, read_file
# replaces it
self.config_parser.readfp(handler)
else:
self.config_parser.read_file(handler)
def get(self, section, option, default=None, cast=None):
"""Read an option from a section of a INI file.
The default value will return if the look up option is not available.
The value will be cast using a callable if specified otherwise a string
will be returned.
:param section: Section to look for.
:param option: Option to look for.
:param default: The value that should be used if the option is not
defined.
:param cast: If provided the value will be cast using the cast
provided.
"""
try:
value = self.config_parser.get(section, option)
if cast is not None:
if cast is bool:
value = self.cast_boolean(value)
elif cast is dict:
value = self.cast_dict(value)
elif cast is list:
value = self.cast_list(value)
elif cast is tuple:
value = self.cast_tuple(value)
else:
value = cast(value)<|fim▁hole|>
def has_section(self, section):
"""Check if section is available."""
return self.config_parser.has_section(section)
class FeatureSettings(object):
"""Settings related to a feature.
Create a instance of this class and assign attributes to map to the feature
options.
"""
def read(self, reader):
"""Subclasses must implement this method in order to populate itself
with expected settings values.
:param reader: An INIReader instance to read the settings.
"""
raise NotImplementedError('Subclasses must implement read method.')
def validate(self):
"""Subclasses must implement this method in order to validade the
settings and raise ``ImproperlyConfigured`` if any issue is found.
"""
raise NotImplementedError('Subclasses must implement validate method.')
class ServerSettings(FeatureSettings):
"""Satellite server settings definitions."""
def __init__(self, *args, **kwargs):
super(ServerSettings, self).__init__(*args, **kwargs)
self.admin_password = None
self.admin_username = None
self.hostname = None
self.port = None
self.scheme = None
self.ssh_key = None
self.ssh_password = None
self.ssh_username = None
def read(self, reader):
"""Read and validate Satellite server settings."""
self.admin_password = reader.get(
'server', 'admin_password', 'changeme')
self.admin_username = reader.get(
'server', 'admin_username', 'admin')
self.hostname = reader.get('server', 'hostname')
self.port = reader.get('server', 'port', cast=int)
self.scheme = reader.get('server', 'scheme', 'https')
self.ssh_key = reader.get('server', 'ssh_key')
self.ssh_password = reader.get('server', 'ssh_password')
self.ssh_username = reader.get('server', 'ssh_username', 'root')
def validate(self):
validation_errors = []
if self.hostname is None:
validation_errors.append('[server] hostname must be provided.')
if (self.ssh_key is None and self.ssh_password is None):
validation_errors.append(
'[server] ssh_key or ssh_password must be provided.')
return validation_errors
def get_credentials(self):
"""Return credentials for interacting with a Foreman deployment API.
:return: A username-password pair.
:rtype: tuple
"""
return (self.admin_username, self.admin_password)
def get_url(self):
"""Return the base URL of the Foreman deployment being tested.
The following values from the config file are used to build the URL:
* ``[server] scheme`` (default: https)
* ``[server] hostname`` (required)
* ``[server] port`` (default: none)
Setting ``port`` to 80 does *not* imply that ``scheme`` is 'https'. If
``port`` is 80 and ``scheme`` is unset, ``scheme`` will still default
to 'https'.
:return: A URL.
:rtype: str
"""
if not self.scheme:
scheme = 'https'
else:
scheme = self.scheme
# All anticipated error cases have been handled at this point.
if not self.port:
return urlunsplit((scheme, self.hostname, '', '', ''))
else:
return urlunsplit((
scheme, '{0}:{1}'.format(self.hostname, self.port), '', '', ''
))
def get_pub_url(self):
"""Return the pub URL of the server being tested.
The following values from the config file are used to build the URL:
* ``main.server.hostname`` (required)
:return: The pub directory URL.
:rtype: str
"""
return urlunsplit(('http', self.hostname, 'pub/', '', ''))
def get_cert_rpm_url(self):
"""Return the Katello cert RPM URL of the server being tested.
The following values from the config file are used to build the URL:
* ``main.server.hostname`` (required)
:return: The Katello cert RPM URL.
:rtype: str
"""
return urljoin(
self.get_pub_url(), 'katello-ca-consumer-latest.noarch.rpm')
class ClientsSettings(FeatureSettings):
"""Clients settings definitions."""
def __init__(self, *args, **kwargs):
super(ClientsSettings, self).__init__(*args, **kwargs)
self.image_dir = None
self.provisioning_server = None
def read(self, reader):
"""Read clients settings."""
self.image_dir = reader.get(
'clients', 'image_dir', '/opt/robottelo/images')
self.provisioning_server = reader.get(
'clients', 'provisioning_server')
def validate(self):
"""Validate clients settings."""
validation_errors = []
if self.provisioning_server is None:
validation_errors.append(
'[clients] provisioning_server option must be provided.')
return validation_errors
class DockerSettings(FeatureSettings):
"""Docker settings definitions."""
def __init__(self, *args, **kwargs):
super(DockerSettings, self).__init__(*args, **kwargs)
self.unix_socket = None
self.external_url = None
self.external_registry_1 = None
self.external_registry_2 = None
def read(self, reader):
"""Read docker settings."""
self.unix_socket = reader.get(
'docker', 'unix_socket', False, bool)
self.external_url = reader.get('docker', 'external_url')
self.external_registry_1 = reader.get('docker', 'external_registry_1')
self.external_registry_2 = reader.get('docker', 'external_registry_2')
def validate(self):
"""Validate docker settings."""
validation_errors = []
if not any((self.unix_socket, self.external_url)):
validation_errors.append(
'Either [docker] unix_socket or external_url options must '
'be provided or enabled.')
if not all((self.external_registry_1, self.external_registry_2)):
validation_errors.append(
'Both [docker] external_registry_1 and external_registry_2 '
'options must be provided.')
return validation_errors
def get_unix_socket_url(self):
"""Use the unix socket connection to the local docker daemon. Make sure
that your Satellite server's docker is configured to allow foreman user
accessing it. This can be done by::
$ groupadd docker
$ usermod -aG docker foreman
# Add -G docker to the options for the docker daemon
$ systemctl restart docker
$ katello-service restart
"""
return (
'unix:///var/run/docker.sock'
if self.unix_socket else None
)
class FakeManifestSettings(FeatureSettings):
"""Fake manifest settings defintitions."""
def __init__(self, *args, **kwargs):
super(FakeManifestSettings, self).__init__(*args, **kwargs)
self.cert_url = None
self.key_url = None
self.url = None
def read(self, reader):
"""Read fake manifest settings."""
self.cert_url = reader.get(
'fake_manifest', 'cert_url')
self.key_url = reader.get(
'fake_manifest', 'key_url')
self.url = reader.get(
'fake_manifest', 'url')
def validate(self):
"""Validate fake manifest settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [fake_manifest] cert_url, key_url, url options must '
'be provided.'
)
return validation_errors
class LDAPSettings(FeatureSettings):
"""LDAP settings definitions."""
def __init__(self, *args, **kwargs):
super(LDAPSettings, self).__init__(*args, **kwargs)
self.basedn = None
self.grpbasedn = None
self.hostname = None
self.password = None
self.username = None
def read(self, reader):
"""Read LDAP settings."""
self.basedn = reader.get('ldap', 'basedn')
self.grpbasedn = reader.get('ldap', 'grpbasedn')
self.hostname = reader.get('ldap', 'hostname')
self.password = reader.get('ldap', 'password')
self.username = reader.get('ldap', 'username')
def validate(self):
"""Validate LDAP settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [ldap] basedn, grpbasedn, hostname, password, '
'username options must be provided.'
)
return validation_errors
class LibvirtHostSettings(FeatureSettings):
"""Libvirt host settings definitions."""
def __init__(self, *args, **kwargs):
super(LibvirtHostSettings, self).__init__(*args, **kwargs)
self.libvirt_image_dir = None
self.libvirt_hostname = None
def read(self, reader):
"""Read libvirt host settings."""
self.libvirt_image_dir = reader.get(
'compute_resources', 'libvirt_image_dir', '/var/lib/libvirt/images'
)
self.libvirt_hostname = reader.get(
'compute_resources', 'libvirt_hostname')
def validate(self):
"""Validate libvirt host settings."""
validation_errors = []
if self.libvirt_hostname is None:
validation_errors.append(
'[compute_resources] libvirt_hostname option must be provided.'
)
return validation_errors
class FakeCapsuleSettings(FeatureSettings):
"""Fake Capsule settings definitions."""
def __init__(self, *args, **kwargs):
super(FakeCapsuleSettings, self).__init__(*args, **kwargs)
self.port_range = None
def read(self, reader):
"""Read fake capsule settings"""
self.port_range = reader.get(
'fake_capsules', 'port_range', cast=tuple
)
def validate(self):
"""Validate fake capsule settings."""
validation_errors = []
if self.port_range is None:
validation_errors.append(
'[fake_capsules] port_range option must be provided.'
)
return validation_errors
class RHEVSettings(FeatureSettings):
"""RHEV settings definitions."""
def __init__(self, *args, **kwargs):
super(RHEVSettings, self).__init__(*args, **kwargs)
# Compute Resource Information
self.hostname = None
self.username = None
self.password = None
self.datacenter = None
self.vm_name = None
# Image Information
self.image_os = None
self.image_arch = None
self.image_username = None
self.image_password = None
self.image_name = None
def read(self, reader):
"""Read rhev settings."""
# Compute Resource Information
self.hostname = reader.get('rhev', 'hostname')
self.username = reader.get('rhev', 'username')
self.password = reader.get('rhev', 'password')
self.datacenter = reader.get('rhev', 'datacenter')
self.vm_name = reader.get('rhev', 'vm_name')
# Image Information
self.image_os = reader.get('rhev', 'image_os')
self.image_arch = reader.get('rhev', 'image_arch')
self.image_username = reader.get('rhev', 'image_username')
self.image_password = reader.get('rhev', 'image_password')
self.image_name = reader.get('rhev', 'image_name')
def validate(self):
"""Validate rhev settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [rhev] hostname, username, password, datacenter, '
'vm_name, image_name, image_os, image_arch, image_usernam, '
'image_name options must be provided.'
)
return validation_errors
class VmWareSettings(FeatureSettings):
"""VmWare settings definitions."""
def __init__(self, *args, **kwargs):
super(VmWareSettings, self).__init__(*args, **kwargs)
# Compute Resource Information
self.vcenter = None
self.username = None
self.password = None
self.datacenter = None
self.vm_name = None
# Image Information
self.image_os = None
self.image_arch = None
self.image_username = None
self.image_password = None
self.image_name = None
def read(self, reader):
"""Read vmware settings."""
# Compute Resource Information
self.vcenter = reader.get('vmware', 'hostname')
self.username = reader.get('vmware', 'username')
self.password = reader.get('vmware', 'password')
self.datacenter = reader.get('vmware', 'datacenter')
self.vm_name = reader.get('vmware', 'vm_name')
# Image Information
self.image_os = reader.get('vmware', 'image_os')
self.image_arch = reader.get('vmware', 'image_arch')
self.image_username = reader.get('vmware', 'image_username')
self.image_password = reader.get('vmware', 'image_password')
self.image_name = reader.get('vmware', 'image_name')
def validate(self):
"""Validate vmware settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [vmware] hostname, username, password, datacenter, '
'vm_name, image_name, image_os, image_arch, image_usernam, '
'image_name options must be provided.'
)
return validation_errors
class DiscoveryISOSettings(FeatureSettings):
"""Discovery ISO name settings definition."""
def __init__(self, *args, **kwargs):
super(DiscoveryISOSettings, self).__init__(*args, **kwargs)
self.discovery_iso = None
def read(self, reader):
"""Read discovery iso setting."""
self.discovery_iso = reader.get('discovery', 'discovery_iso')
def validate(self):
"""Validate discovery iso name setting."""
validation_errors = []
if self.discovery_iso is None:
validation_errors.append(
'[discovery] discovery iso name must be provided.'
)
return validation_errors
class OscapSettings(FeatureSettings):
"""Oscap settings definitions."""
def __init__(self, *args, **kwargs):
super(OscapSettings, self).__init__(*args, **kwargs)
self.content_path = None
def read(self, reader):
"""Read Oscap settings."""
self.content_path = reader.get('oscap', 'content_path')
def validate(self):
"""Validate Oscap settings."""
validation_errors = []
if self.content_path is None:
validation_errors.append(
'[oscap] content_path option must be provided.'
)
return validation_errors
class PerformanceSettings(FeatureSettings):
"""Performance settings definitions."""
def __init__(self, *args, **kwargs):
super(PerformanceSettings, self).__init__(*args, **kwargs)
self.time_hammer = None
self.cdn_address = None
self.virtual_machines = None
self.fresh_install_savepoint = None
self.enabled_repos_savepoint = None
self.csv_buckets_count = None
self.sync_count = None
self.sync_type = None
self.repos = None
def read(self, reader):
"""Read performance settings."""
self.time_hammer = reader.get(
'performance', 'time_hammer', False, bool)
self.cdn_address = reader.get(
'performance', 'cdn_address')
self.virtual_machines = reader.get(
'performance', 'virtual_machines', cast=list)
self.fresh_install_savepoint = reader.get(
'performance', 'fresh_install_savepoint')
self.enabled_repos_savepoint = reader.get(
'performance', 'enabled_repos_savepoint')
self.csv_buckets_count = reader.get(
'performance', 'csv_buckets_count', 10, int)
self.sync_count = reader.get(
'performance', 'sync_count', 3, int)
self.sync_type = reader.get(
'performance', 'sync_type', 'sync')
self.repos = reader.get(
'performance', 'repos', cast=list)
def validate(self):
"""Validate performance settings."""
validation_errors = []
if self.cdn_address is None:
validation_errors.append(
'[performance] cdn_address must be provided.')
if self.virtual_machines is None:
validation_errors.append(
'[performance] virtual_machines must be provided.')
if self.fresh_install_savepoint is None:
validation_errors.append(
'[performance] fresh_install_savepoint must be provided.')
if self.enabled_repos_savepoint is None:
validation_errors.append(
'[performance] enabled_repos_savepoint must be provided.')
return validation_errors
class RHAISettings(FeatureSettings):
"""RHAI settings definitions."""
def __init__(self, *args, **kwargs):
super(RHAISettings, self).__init__(*args, **kwargs)
self.insights_client_el6repo = None
self.insights_client_el7repo = None
def read(self, reader):
"""Read RHAI settings."""
self.insights_client_el6repo = reader.get(
'rhai', 'insights_client_el6repo')
self.insights_client_el7repo = reader.get(
'rhai', 'insights_client_el7repo')
def validate(self):
"""Validate RHAI settings."""
return []
class TransitionSettings(FeatureSettings):
"""Transition settings definitions."""
def __init__(self, *args, **kwargs):
super(TransitionSettings, self).__init__(*args, **kwargs)
self.exported_data = None
def read(self, reader):
"""Read transition settings."""
self.exported_data = reader.get('transition', 'exported_data')
def validate(self):
"""Validate transition settings."""
validation_errors = []
if self.exported_data is None:
validation_errors.append(
'[transition] exported_data must be provided.')
return validation_errors
class VlanNetworkSettings(FeatureSettings):
"""Vlan Network settings definitions."""
def __init__(self, *args, **kwargs):
super(VlanNetworkSettings, self).__init__(*args, **kwargs)
self.subnet = None
self.netmask = None
self.gateway = None
self.bridge = None
def read(self, reader):
"""Read Vlan Network settings."""
self.subnet = reader.get('vlan_networking', 'subnet')
self.netmask = reader.get('vlan_networking', 'netmask')
self.gateway = reader.get('vlan_networking', 'gateway')
self.bridge = reader.get('vlan_networking', 'bridge')
def validate(self):
"""Validate Vlan Network settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [vlan_networking] subnet, netmask, gateway, bridge '
'options must be provided.')
return validation_errors
class UpgradeSettings(FeatureSettings):
"""Satellite upgrade settings definitions."""
def __init__(self, *args, **kwargs):
super(UpgradeSettings, self).__init__(*args, **kwargs)
self.upgrade_data = None
def read(self, reader):
"""Read and validate Satellite server settings."""
self.upgrade_data = reader.get('upgrade', 'upgrade_data')
def validate(self):
validation_errors = []
if self.upgrade_data is None:
validation_errors.append('[upgrade] data must be provided.')
return validation_errors
class Settings(object):
"""Robottelo's settings representation."""
def __init__(self):
self._all_features = None
self._configured = False
self._validation_errors = []
self.browser = None
self.locale = None
self.project = None
self.reader = None
self.rhel6_repo = None
self.rhel7_repo = None
self.screenshots_path = None
self.saucelabs_key = None
self.saucelabs_user = None
self.server = ServerSettings()
self.run_one_datapoint = None
self.upstream = None
self.verbosity = None
self.webdriver = None
self.webdriver_binary = None
self.webdriver_desired_capabilities = None
# Features
self.clients = ClientsSettings()
self.compute_resources = LibvirtHostSettings()
self.discovery = DiscoveryISOSettings()
self.docker = DockerSettings()
self.fake_capsules = FakeCapsuleSettings()
self.fake_manifest = FakeManifestSettings()
self.ldap = LDAPSettings()
self.oscap = OscapSettings()
self.performance = PerformanceSettings()
self.rhai = RHAISettings()
self.rhev = RHEVSettings()
self.transition = TransitionSettings()
self.vlan_networking = VlanNetworkSettings()
self.upgrade = UpgradeSettings()
self.vmware = VmWareSettings()
def configure(self):
"""Read the settings file and parse the configuration.
:raises: ImproperlyConfigured if any issue is found during the parsing
or validation of the configuration.
"""
if self.configured:
# TODO: what to do here, raise and exception, just skip or ...?
return
# Expect the settings file to be on the robottelo project root.
settings_path = os.path.join(get_project_root(), SETTINGS_FILE_NAME)
if not os.path.isfile(settings_path):
raise ImproperlyConfigured(
'Not able to find settings file at {}'.format(settings_path))
self.reader = INIReader(settings_path)
self._read_robottelo_settings()
self._validation_errors.extend(
self._validate_robottelo_settings())
self.server.read(self.reader)
self._validation_errors.extend(self.server.validate())
if self.reader.has_section('clients'):
self.clients.read(self.reader)
self._validation_errors.extend(self.clients.validate())
if self.reader.has_section('compute_resources'):
self.compute_resources.read(self.reader)
self._validation_errors.extend(self.compute_resources.validate())
if self.reader.has_section('discovery'):
self.discovery.read(self.reader)
self._validation_errors.extend(self.discovery.validate())
if self.reader.has_section('docker'):
self.docker.read(self.reader)
self._validation_errors.extend(self.docker.validate())
if self.reader.has_section('fake_capsules'):
self.fake_capsules.read(self.reader)
self._validation_errors.extend(self.fake_capsules.validate())
if self.reader.has_section('fake_manifest'):
self.fake_manifest.read(self.reader)
self._validation_errors.extend(self.fake_manifest.validate())
if self.reader.has_section('ldap'):
self.ldap.read(self.reader)
self._validation_errors.extend(self.ldap.validate())
if self.reader.has_section('oscap'):
self.oscap.read(self.reader)
self._validation_errors.extend(self.oscap.validate())
if self.reader.has_section('performance'):
self.performance.read(self.reader)
self._validation_errors.extend(self.performance.validate())
if self.reader.has_section('rhai'):
self.rhai.read(self.reader)
self._validation_errors.extend(self.rhai.validate())
if self.reader.has_section('rhev'):
self.rhev.read(self.reader)
self._validation_errors.extend(self.rhev.validate())
if self.reader.has_section('transition'):
self.transition.read(self.reader)
self._validation_errors.extend(self.transition.validate())
if self.reader.has_section('vlan_networking'):
self.vlan_networking.read(self.reader)
self._validation_errors.extend(self.vlan_networking.validate())
if self.reader.has_section('upgrade'):
self.upgrade.read(self.reader)
self._validation_errors.extend(self.upgrade.validate())
if self.reader.has_section('vmware'):
self.vmware.read(self.reader)
self._validation_errors.extend(self.vmware.validate())
if self._validation_errors:
raise ImproperlyConfigured(
'Failed to validate the configuration, check the message(s):\n'
'{}'.format('\n'.join(self._validation_errors))
)
self._configure_logging()
self._configure_third_party_logging()
self._configure_entities()
self._configured = True
def _read_robottelo_settings(self):
"""Read Robottelo's general settings."""
self.log_driver_commands = self.reader.get(
'robottelo',
'log_driver_commands',
['newSession',
'windowMaximize',
'get',
'findElement',
'sendKeysToElement',
'clickElement',
'mouseMoveTo'],
list
)
self.browser = self.reader.get(
'robottelo', 'browser', 'selenium')
self.locale = self.reader.get('robottelo', 'locale', 'en_US.UTF-8')
self.project = self.reader.get('robottelo', 'project', 'sat')
self.rhel6_repo = self.reader.get('robottelo', 'rhel6_repo', None)
self.rhel7_repo = self.reader.get('robottelo', 'rhel7_repo', None)
self.screenshots_path = self.reader.get(
'robottelo', 'screenshots_path', '/tmp/robottelo/screenshots')
self.run_one_datapoint = self.reader.get(
'robottelo', 'run_one_datapoint', False, bool)
self.cleanup = self.reader.get('robottelo', 'cleanup', False, bool)
self.upstream = self.reader.get('robottelo', 'upstream', True, bool)
self.verbosity = self.reader.get(
'robottelo',
'verbosity',
INIReader.cast_logging_level('debug'),
INIReader.cast_logging_level
)
self.webdriver = self.reader.get(
'robottelo', 'webdriver', 'firefox')
self.saucelabs_user = self.reader.get(
'robottelo', 'saucelabs_user', None)
self.saucelabs_key = self.reader.get(
'robottelo', 'saucelabs_key', None)
self.webdriver_binary = self.reader.get(
'robottelo', 'webdriver_binary', None)
self.webdriver_desired_capabilities = self.reader.get(
'robottelo',
'webdriver_desired_capabilities',
None,
cast=INIReader.cast_webdriver_desired_capabilities
)
self.window_manager_command = self.reader.get(
'robottelo', 'window_manager_command', None)
def _validate_robottelo_settings(self):
"""Validate Robottelo's general settings."""
validation_errors = []
browsers = ('selenium', 'docker', 'saucelabs')
webdrivers = ('chrome', 'firefox', 'ie', 'phantomjs', 'remote')
if self.browser not in browsers:
validation_errors.append(
'[robottelo] browser should be one of {0}.'
.format(', '.join(browsers))
)
if self.webdriver not in webdrivers:
validation_errors.append(
'[robottelo] webdriver should be one of {0}.'
.format(', '.join(webdrivers))
)
if self.browser == 'saucelabs':
if self.saucelabs_user is None:
validation_errors.append(
'[robottelo] saucelabs_user must be provided when '
'browser is saucelabs.'
)
if self.saucelabs_key is None:
validation_errors.append(
'[robottelo] saucelabs_key must be provided when '
'browser is saucelabs.'
)
return validation_errors
@property
def configured(self):
"""Returns True if the settings have already been configured."""
return self._configured
@property
def all_features(self):
"""List all expected feature settings sections."""
if self._all_features is None:
self._all_features = [
name for name, value in vars(self).items()
if isinstance(value, FeatureSettings)
]
return self._all_features
def _configure_entities(self):
"""Configure NailGun's entity classes.
Do the following:
* Set ``entity_mixins.CREATE_MISSING`` to ``True``. This causes method
``EntityCreateMixin.create_raw`` to generate values for empty and
required fields.
* Set ``nailgun.entity_mixins.DEFAULT_SERVER_CONFIG`` to whatever is
returned by :meth:`robottelo.helpers.get_nailgun_config`. See
``robottelo.entity_mixins.Entity`` for more information on the effects
of this.
* Set a default value for ``nailgun.entities.GPGKey.content``.
* Set the default value for
``nailgun.entities.DockerComputeResource.url``
if either ``docker.internal_url`` or ``docker.external_url`` is set in
the configuration file.
"""
entity_mixins.CREATE_MISSING = True
entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig(
self.server.get_url(),
self.server.get_credentials(),
verify=False,
)
gpgkey_init = entities.GPGKey.__init__
def patched_gpgkey_init(self, server_config=None, **kwargs):
"""Set a default value on the ``content`` field."""
gpgkey_init(self, server_config, **kwargs)
self._fields['content'].default = os.path.join(
get_project_root(),
'tests', 'foreman', 'data', 'valid_gpg_key.txt'
)
entities.GPGKey.__init__ = patched_gpgkey_init
# NailGun provides a default value for ComputeResource.url. We override
# that value if `docker.internal_url` or `docker.external_url` is set.
docker_url = None
# Try getting internal url
docker_url = self.docker.get_unix_socket_url()
# Try getting external url
if docker_url is None:
docker_url = self.docker.external_url
if docker_url is not None:
dockercr_init = entities.DockerComputeResource.__init__
def patched_dockercr_init(self, server_config=None, **kwargs):
"""Set a default value on the ``docker_url`` field."""
dockercr_init(self, server_config, **kwargs)
self._fields['url'].default = docker_url
entities.DockerComputeResource.__init__ = patched_dockercr_init
def _configure_logging(self):
"""Configure logging for the entire framework.
If a config named ``logging.conf`` exists in Robottelo's root
directory, the logger is configured using the options in that file.
Otherwise, a custom logging output format is set, and default values
are used for all other logging options.
"""
# All output should be made by the logging module, including warnings
logging.captureWarnings(True)
# Set the logging level based on the Robottelo's verbosity
for name in ('nailgun', 'robottelo'):
logging.getLogger(name).setLevel(self.verbosity)
# Allow overriding logging config based on the presence of logging.conf
# file on Robottelo's project root
logging_conf_path = os.path.join(get_project_root(), 'logging.conf')
if os.path.isfile(logging_conf_path):
config.fileConfig(logging_conf_path)
else:
logging.basicConfig(
format='%(levelname)s %(module)s:%(lineno)d: %(message)s'
)
def _configure_third_party_logging(self):
"""Increase the level of third party packages logging."""
loggers = (
'bugzilla',
'easyprocess',
'paramiko',
'requests.packages.urllib3.connectionpool',
'selenium.webdriver.remote.remote_connection',
)
for logger in loggers:
logging.getLogger(logger).setLevel(logging.WARNING)<|fim▁end|> | except (NoSectionError, NoOptionError):
value = default
return value |
<|file_name|>gdb_maloader.py<|end_file_name|><|fim▁begin|># Copyright 2011 Shinichiro Hamaji. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY Shinichiro Hamaji ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Shinichiro Hamaji OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
import gdb
import os
import re
import sys
def bt(demangle=True):
# Find the newest frame.
frame = gdb.selected_frame()
while True:<|fim▁hole|> break
frame = next
if demangle:
pipe = os.popen('c++filt', 'w')
else:
pipe = sys.stdout
i = 0
while frame:
s = gdb.execute('p dumpSymbol((void*)0x%x)' % frame.pc(),
to_string=True)
m = re.match(r'.*"(.*)"$', s)
if m:
pipe.write("#%-2d %s\n" % (i, m.group(1)))
else:
sal = frame.find_sal()
lineno = ''
if sal.symtab:
lineno = 'at %s:%d' % (sal.symtab, sal.line)
else:
soname = gdb.solib_name(frame.pc())
if soname:
lineno = 'from %s' % (soname)
framename = frame.name()
if not framename:
framename = '??'
pipe.write("#%-2d 0x%016x in %s () %s\n" %
(i, frame.pc(), framename, lineno))
frame = frame.older()
i += 1
pipe.close()<|fim▁end|> | next = frame.newer()
if not next: |
<|file_name|>cholesky.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate linxal;
extern crate ndarray;
extern crate num_traits;
extern crate rand;
use ndarray::{Array, ArrayBase, Data, Ix2};
use rand::thread_rng;
use linxal::types::{LinxalScalar, LinxalMatrix, Symmetric, c32, c64};
use linxal::types::error::{ CholeskyError};
use linxal::generate::{RandomSemiPositive};
fn check_cholesky<T, D1, D2>(mat: &ArrayBase<D1, Ix2>, chol: &ArrayBase<D2, Ix2>, uplo: Symmetric)
where T: LinxalScalar, D1: Data<Elem=T>, D2: Data<Elem=T> {
// Check the dimension
assert_eq!(mat.dim(), chol.dim());
// The matrix must be triangular
assert!(chol.is_triangular(uplo, None));
// The fatorization must match the original matrix.
match uplo {<|fim▁hole|> let u = chol.conj_t();
assert_eq_within_tol!(chol.dot(&u), mat, 1e-4.into());
},
Symmetric::Upper => {
let l = chol.conj_t();
println!("{:?} {:?} {:?} {:?}", chol, l, l.dot(chol), mat);
assert_eq_within_tol!(l.dot(chol), mat, 1e-4.into());
}
}
}
fn cholesky_identity_generic<T: LinxalScalar>() {
for n in 1..11 {
let m: Array<T, Ix2> = Array::eye(n);
let l = m.cholesky(Symmetric::Upper).ok().unwrap();
assert_eq_within_tol!(l, m, 1e-5.into());
}
}
fn cholesky_generate_generic<T: LinxalScalar>(uplo: Symmetric) {
let mut rng = thread_rng();
for n in 1..11 {
let m: Array<T, Ix2> = RandomSemiPositive::new(n, &mut rng).generate().unwrap();
let res = m.cholesky(uplo);
let chol = res.ok().unwrap();
check_cholesky(&m, &chol, uplo);
}
}
fn cholesky_fail_zero_ev<T: LinxalScalar>() {
let mut rng = thread_rng();
for n in 4..11 {
let mut gen: RandomSemiPositive<T> = RandomSemiPositive::new(n, &mut rng);
let r = gen.rank(0).generate_with_sv();
let m = r.ok().unwrap();
let res = m.0.cholesky(Symmetric::Upper);
assert_eq!(res.err().unwrap(), CholeskyError::NotPositiveDefinite);
}
}
#[test]
fn cholesky_identity() {
cholesky_identity_generic::<f32>();
cholesky_identity_generic::<c32>();
cholesky_identity_generic::<f64>();
cholesky_identity_generic::<c64>();
}
#[test]
fn cholesky_generate() {
cholesky_generate_generic::<f32>(Symmetric::Upper);
cholesky_generate_generic::<f32>(Symmetric::Lower);
cholesky_generate_generic::<f64>(Symmetric::Upper);
cholesky_generate_generic::<f64>(Symmetric::Lower);
cholesky_generate_generic::<c32>(Symmetric::Upper);
cholesky_generate_generic::<c32>(Symmetric::Lower);
cholesky_generate_generic::<c64>(Symmetric::Upper);
cholesky_generate_generic::<c64>(Symmetric::Lower);
}
#[test]
fn cholesky_zero() {
cholesky_fail_zero_ev::<f32>();
}
#[test]
fn cholesky_fail_not_square() {
for r in 1..11 {
for c in 1..11 {
if r == c {
continue;
}
let m: Array<f32, Ix2> = Array::linspace(1.0, 2.0, r*c).into_shape((r, c)).unwrap();
let res = m.cholesky(Symmetric::Upper);
assert_eq!(res.err().unwrap(), CholeskyError::NotSquare);
}
}
}<|fim▁end|> | Symmetric::Lower => { |
<|file_name|>HistJobunit.js<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////////////////////////////////////
//
// Jobbox WebGUI
// Copyright (C) 2014-2015 Komatsu Yuji(Zheng Chuyu)
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
/////////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////
//
// Function:
//
// Purpose:
//
// Parameters:
//
// Return value:
//
// Author: Komatsu Yuji(Zheng Chuyu)
//
/////////////////////////////////////////////////////////////////////////////////
Ext.define('Jobbox.store.HistJobunit', {
extend: 'Ext.data.Store',
model: 'Jobbox.model.HistJobunit',
autoLoad: true,
autoDestroy: false,<|fim▁hole|> autoSync: false,
remoteSort: true,
proxy: {
type: 'rest',
url: location.pathname + '/hist_jobunits',
appendId: true,
format: 'json',
reader: {
root: 'hist_jobunits',
totalProperty: 'total_count',
},
},
});<|fim▁end|> | |
<|file_name|>Cisco_IOS_XR_ipv4_acl_cfg.py<|end_file_name|><|fim▁begin|>""" Cisco_IOS_XR_ipv4_acl_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR ipv4\-acl package configuration.
This module contains definitions
for the following management objects\:
ipv4\-acl\-and\-prefix\-list\: IPv4 ACL configuration data
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class NextHopTypeEnum(Enum):
"""
NextHopTypeEnum
Next\-hop type.
.. data:: none_next_hop = 0
None next-hop.
.. data:: regular_next_hop = 1
Regular next-hop.
.. data:: default_next_hop = 2
Default next-hop.
"""
none_next_hop = 0
regular_next_hop = 1
default_next_hop = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['NextHopTypeEnum']
class Ipv4AclAndPrefixList(object):
"""
IPv4 ACL configuration data
.. attribute:: accesses
Table of access lists. Entries in this table and the AccessListExistenceTable table must be kept consistent
**type**\: :py:class:`Accesses <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses>`
.. attribute:: log_update
Control access lists log updates
**type**\: :py:class:`LogUpdate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.LogUpdate>`
.. attribute:: prefixes
Table of ACL prefix lists. Entries in this table and the PrefixListExistenceTable table must be kept consistent
**type**\: :py:class:`Prefixes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.accesses = Ipv4AclAndPrefixList.Accesses()
self.accesses.parent = self
self.log_update = Ipv4AclAndPrefixList.LogUpdate()
self.log_update.parent = self
self.prefixes = Ipv4AclAndPrefixList.Prefixes()
self.prefixes.parent = self
class Accesses(object):
"""
Table of access lists. Entries in this table
and the AccessListExistenceTable table must be
kept consistent
.. attribute:: access
An ACL
**type**\: list of :py:class:`Access <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.access = YList()
self.access.parent = self
self.access.name = 'access'
class Access(object):
"""
An ACL
.. attribute:: access_list_name <key>
Access list name \- 64 characters max
**type**\: str
.. attribute:: access_list_entries
ACL entry table; contains list of ACEs
**type**\: :py:class:`AccessListEntries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.access_list_name = None
self.access_list_entries = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries()
self.access_list_entries.parent = self
class AccessListEntries(object):
"""
ACL entry table; contains list of ACEs
.. attribute:: access_list_entry
An ACL entry; either a description (remark) or an ACE to match against
**type**\: list of :py:class:`AccessListEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.access_list_entry = YList()
self.access_list_entry.parent = self
self.access_list_entry.name = 'access_list_entry'
class AccessListEntry(object):
"""
An ACL entry; either a description (remark)
or an ACE to match against
.. attribute:: sequence_number <key>
Sequence number for this entry
**type**\: int
**range:** 1..2147483646
.. attribute:: capture
Enable capture
**type**\: bool
.. attribute:: counter_name
Counter name
**type**\: str
.. attribute:: destination_network
Destination network settings
**type**\: :py:class:`DestinationNetwork <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationNetwork>`
.. attribute:: destination_port
Destination port settings
**type**\: :py:class:`DestinationPort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationPort>`
.. attribute:: destination_port_group
Destination port object group name
**type**\: str
**length:** 1..64
.. attribute:: destination_prefix_group
IPv4 destination network object group name
**type**\: str
**length:** 1..64
.. attribute:: dscp
DSCP settings
**type**\: :py:class:`Dscp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Dscp>`
.. attribute:: fragment_offset
Fragment\-offset settings
**type**\: :py:class:`FragmentOffset <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.FragmentOffset>`
.. attribute:: fragments
Check non\-initial fragments. Item is mutually exclusive with TCP, SCTP, UDP, IGMP and ICMP comparions and with logging
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: grant
Whether to forward or drop packets matching the ACE
**type**\: :py:class:`Ipv4AclGrantEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclGrantEnumEnum>`
.. attribute:: icmp
ICMP settings
**type**\: :py:class:`Icmp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Icmp>`
.. attribute:: icmp_off
To turn off ICMP generation for deny ACEs
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: igmp_message_type
IGMP message type to match. Leave unspecified if no message type comparison is to be done
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclIgmpNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclIgmpNumberEnum>`
----
**type**\: int
**range:** 0..255
----
.. attribute:: log_option
Whether and how to log matches against this entry
**type**\: :py:class:`Ipv4AclLoggingEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclLoggingEnumEnum>`
.. attribute:: next_hop
Next\-hop settings
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop>`
.. attribute:: packet_length
Packet length settings
**type**\: :py:class:`PacketLength <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.PacketLength>`
.. attribute:: precedence
Precedence value to match (if a protocol was specified), leave unspecified if precedence comparion is not to be performed
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclPrecedenceNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPrecedenceNumberEnum>`
----
**type**\: int
**range:** 0..7
----
.. attribute:: protocol
Protocol to match
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclProtocolNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclProtocolNumberEnum>`
----
**type**\: int
**range:** 0..255
----
.. attribute:: protocol2
Protocol2 to match
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclProtocolNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclProtocolNumberEnum>`
----
**type**\: int
**range:** 0..255
----
.. attribute:: protocol_operator
Protocol operator. Leave unspecified if no protocol comparison is to be done
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
.. attribute:: qos_group
Set qos\-group number
**type**\: int
**range:** 0..512
.. attribute:: remark
Comments or a description for the access list
**type**\: str
.. attribute:: sequence_str
Sequence String for the ace
**type**\: str
**length:** 1..64
.. attribute:: source_network
Source network settings
**type**\: :py:class:`SourceNetwork <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourceNetwork>`
.. attribute:: source_port
Source port settings
**type**\: :py:class:`SourcePort <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourcePort>`
.. attribute:: source_port_group
Source port object group name
**type**\: str
**length:** 1..64
.. attribute:: source_prefix_group
IPv4 source network object group name
**type**\: str
**length:** 1..64
.. attribute:: tcp
TCP settings
**type**\: :py:class:`Tcp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Tcp>`
.. attribute:: time_to_live
TTL settings
**type**\: :py:class:`TimeToLive <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.TimeToLive>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.sequence_number = None
self.capture = None
self.counter_name = None
self.destination_network = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationNetwork()
self.destination_network.parent = self
self.destination_port = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationPort()
self.destination_port.parent = self
self.destination_port_group = None
self.destination_prefix_group = None
self.dscp = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Dscp()
self.dscp.parent = self
self.fragment_offset = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.FragmentOffset()
self.fragment_offset.parent = self
self.fragments = None
self.grant = None
self.icmp = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Icmp()
self.icmp.parent = self
self.icmp_off = None
self.igmp_message_type = None
self.log_option = None
self.next_hop = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop()
self.next_hop.parent = self
self.packet_length = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.PacketLength()
self.packet_length.parent = self
self.precedence = None
self.protocol = None
self.protocol2 = None
self.protocol_operator = None
self.qos_group = None
self.remark = None
self.sequence_str = None
self.source_network = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourceNetwork()
self.source_network.parent = self
self.source_port = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourcePort()
self.source_port.parent = self
self.source_port_group = None
self.source_prefix_group = None
self.tcp = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Tcp()
self.tcp.parent = self
self.time_to_live = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.TimeToLive()
self.time_to_live.parent = self
class SourceNetwork(object):
"""
Source network settings.
.. attribute:: source_address
Source IPv4 address to match, leave unspecified for any
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: source_prefix_length
Prefix length to apply to source address (if specified), leave unspecified for no wildcarding
**type**\: int
**range:** 0..32
.. attribute:: source_wild_card_bits
Wildcard bits to apply to source address (if specified), leave unspecified for no wildcarding
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.source_address = None
self.source_prefix_length = None
self.source_wild_card_bits = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:source-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.source_address is not None:
return True
if self.source_prefix_length is not None:
return True
if self.source_wild_card_bits is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourceNetwork']['meta_info']
class DestinationNetwork(object):
"""
Destination network settings.
.. attribute:: destination_address
Destination IPv4 address to match (if a protocol was specified), leave unspecified for any
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_prefix_length
Prefix length to apply to destination address (if specified), leave unspecified for no wildcarding
**type**\: int
**range:** 0..32
.. attribute:: destination_wild_card_bits
Wildcard bits to apply to destination address (if specified), leave unspecified for no wildcarding
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.destination_address = None
self.destination_prefix_length = None
self.destination_wild_card_bits = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:destination-network'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.destination_address is not None:
return True
if self.destination_prefix_length is not None:
return True
if self.destination_wild_card_bits is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationNetwork']['meta_info']
class SourcePort(object):
"""
Source port settings.
.. attribute:: first_source_port
First source port for comparison, leave unspecified if source port comparison is not to be performed
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>`
----
**type**\: int
**range:** 0..65535
----
.. attribute:: second_source_port
Second source port for comparion, leave unspecified if source port comparison is not to be performed
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>`
----
**type**\: int
**range:** 0..65535
----
.. attribute:: source_operator
Source comparison operator . Leave unspecified if no source port comparison is to be done
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.first_source_port = None
self.second_source_port = None
self.source_operator = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:source-port'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.first_source_port is not None:
return True
if self.second_source_port is not None:
return True
if self.source_operator is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.SourcePort']['meta_info']
class DestinationPort(object):
"""
Destination port settings.
.. attribute:: destination_operator
Destination comparison operator. Leave unspecified if no destination port comparison is to be done
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
.. attribute:: first_destination_port
First destination port for comparison, leave unspecified if destination port comparison is not to be performed
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>`
----
**type**\: int
**range:** 0..65535
----
.. attribute:: second_destination_port
Second destination port for comparion, leave unspecified if destination port comparison is not to be performed
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclPortNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclPortNumberEnum>`
----
**type**\: int
**range:** 0..65535
----
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.destination_operator = None
self.first_destination_port = None
self.second_destination_port = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:destination-port'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.destination_operator is not None:
return True
if self.first_destination_port is not None:
return True
if self.second_destination_port is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.DestinationPort']['meta_info']
class Icmp(object):
"""
ICMP settings.
.. attribute:: icmp_type_code
Well known ICMP message code types to match, leave unspecified if ICMP message code type comparion is not to be performed
**type**\: :py:class:`Ipv4AclIcmpTypeCodeEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclIcmpTypeCodeEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.icmp_type_code = None<|fim▁hole|> if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:icmp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.icmp_type_code is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Icmp']['meta_info']
class Tcp(object):
"""
TCP settings.
.. attribute:: tcp_bits
TCP bits to match. Leave unspecified if comparison of TCP bits is not required
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclTcpBitsNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclTcpBitsNumberEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: tcp_bits_mask
TCP bits mask to use for flexible TCP matching. Leave unspecified if tcp\-bits\-match\-operator is unspecified
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclTcpBitsNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclTcpBitsNumberEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: tcp_bits_match_operator
TCP Bits match operator. Leave unspecified if flexible comparison of TCP bits is not required
**type**\: :py:class:`Ipv4AclTcpMatchOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclTcpMatchOperatorEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.tcp_bits = None
self.tcp_bits_mask = None
self.tcp_bits_match_operator = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:tcp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.tcp_bits is not None:
return True
if self.tcp_bits_mask is not None:
return True
if self.tcp_bits_match_operator is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Tcp']['meta_info']
class PacketLength(object):
"""
Packet length settings.
.. attribute:: packet_length_max
Maximum packet length for comparion, leave unspecified if packet length comparison is not to be performed or if only the minimum packet length should be considered
**type**\: int
**range:** 0..65535
.. attribute:: packet_length_min
Minimum packet length for comparison, leave unspecified if packet length comparison is not to be performed or if only the maximum packet length should be considered
**type**\: int
**range:** 0..65535
.. attribute:: packet_length_operator
Packet length operator applicable if Packet length is to be compared. Leave unspecified if no packet length comparison is to be done
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.packet_length_max = None
self.packet_length_min = None
self.packet_length_operator = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:packet-length'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.packet_length_max is not None:
return True
if self.packet_length_min is not None:
return True
if self.packet_length_operator is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.PacketLength']['meta_info']
class TimeToLive(object):
"""
TTL settings.
.. attribute:: time_to_live_max
Maximum TTL for comparion, leave unspecified if TTL comparison is not to be performed or if only the minimum TTL should be considered
**type**\: int
**range:** 0..255
.. attribute:: time_to_live_min
TTL value for comparison OR Minimum TTL value for TTL range comparision, leave unspecified if TTL classification is not required
**type**\: int
**range:** 0..255
.. attribute:: time_to_live_operator
TTL operator is applicable if TTL is to be compared. Leave unspecified if TTL classification is not required
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.time_to_live_max = None
self.time_to_live_min = None
self.time_to_live_operator = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:time-to-live'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.time_to_live_max is not None:
return True
if self.time_to_live_min is not None:
return True
if self.time_to_live_operator is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.TimeToLive']['meta_info']
class FragmentOffset(object):
"""
Fragment\-offset settings.
.. attribute:: fragment_offset_1
Fragment\-offset value for comparison or first fragment\-offset value for fragment\-offset range comparision, leave unspecified if fragment\-offset classification is not required
**type**\: int
**range:** 0..8191
.. attribute:: fragment_offset_2
Second fragment\-offset value for comparion, leave unspecified if fragment\-offset comparison is not to be performed or if only the first fragment\-offset should be considered
**type**\: int
**range:** 0..8191
.. attribute:: fragment_offset_operator
Fragment\-offset operator if fragment\-offset is to be compared. Leave unspecified if fragment\-offset classification is not required
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.fragment_offset_1 = None
self.fragment_offset_2 = None
self.fragment_offset_operator = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:fragment-offset'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.fragment_offset_1 is not None:
return True
if self.fragment_offset_2 is not None:
return True
if self.fragment_offset_operator is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.FragmentOffset']['meta_info']
class NextHop(object):
"""
Next\-hop settings.
.. attribute:: next_hop_1
The first next\-hop settings
**type**\: :py:class:`NextHop1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop1>`
.. attribute:: next_hop_2
The second next\-hop settings
**type**\: :py:class:`NextHop2 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop2>`
.. attribute:: next_hop_3
The third next\-hop settings
**type**\: :py:class:`NextHop3 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop3>`
.. attribute:: next_hop_type
The nexthop type
**type**\: :py:class:`NextHopTypeEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.NextHopTypeEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.next_hop_1 = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop1()
self.next_hop_1.parent = self
self.next_hop_2 = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop2()
self.next_hop_2.parent = self
self.next_hop_3 = Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop3()
self.next_hop_3.parent = self
self.next_hop_type = None
class NextHop1(object):
"""
The first next\-hop settings.
.. attribute:: next_hop
The IPv4 address of the next\-hop
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: track_name
The object tracking name for the next\-hop
**type**\: str
.. attribute:: vrf_name
The VRF name of the next\-hop
**type**\: str
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.next_hop = None
self.track_name = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop-1'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.next_hop is not None:
return True
if self.track_name is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop1']['meta_info']
class NextHop2(object):
"""
The second next\-hop settings.
.. attribute:: next_hop
The IPv4 address of the next\-hop
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: track_name
The object tracking name for the next\-hop
**type**\: str
.. attribute:: vrf_name
The VRF name of the next\-hop
**type**\: str
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.next_hop = None
self.track_name = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop-2'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.next_hop is not None:
return True
if self.track_name is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop2']['meta_info']
class NextHop3(object):
"""
The third next\-hop settings.
.. attribute:: next_hop
The IPv4 address of the next\-hop
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: track_name
The object tracking name for the next\-hop
**type**\: str
.. attribute:: vrf_name
The VRF name of the next\-hop
**type**\: str
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.next_hop = None
self.track_name = None
self.vrf_name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop-3'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.next_hop is not None:
return True
if self.track_name is not None:
return True
if self.vrf_name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop.NextHop3']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:next-hop'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.next_hop_1 is not None and self.next_hop_1._has_data():
return True
if self.next_hop_2 is not None and self.next_hop_2._has_data():
return True
if self.next_hop_3 is not None and self.next_hop_3._has_data():
return True
if self.next_hop_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.NextHop']['meta_info']
class Dscp(object):
"""
DSCP settings.
.. attribute:: dscp_max
Maximum DSCP value for comparion, leave unspecified if DSCP comparison is not to be performed or if only the minimum DSCP should be considered
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclDscpNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclDscpNumberEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: dscp_min
DSCP value to match or minimum DSCP value for DSCP range comparison, leave unspecified if DSCP comparion is not to be performed
**type**\: one of the below types:
**type**\: :py:class:`Ipv4AclDscpNumberEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclDscpNumberEnum>`
----
**type**\: int
**range:** 0..63
----
.. attribute:: dscp_operator
DSCP operator is applicable only when DSCP range is configured. Leave unspecified if DSCP range is not required
**type**\: :py:class:`Ipv4AclOperatorEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclOperatorEnumEnum>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.dscp_max = None
self.dscp_min = None
self.dscp_operator = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:dscp'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.dscp_max is not None:
return True
if self.dscp_min is not None:
return True
if self.dscp_operator is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry.Dscp']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.sequence_number is None:
raise YPYModelError('Key property sequence_number is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:access-list-entry[Cisco-IOS-XR-ipv4-acl-cfg:sequence-number = ' + str(self.sequence_number) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.sequence_number is not None:
return True
if self.capture is not None:
return True
if self.counter_name is not None:
return True
if self.destination_network is not None and self.destination_network._has_data():
return True
if self.destination_port is not None and self.destination_port._has_data():
return True
if self.destination_port_group is not None:
return True
if self.destination_prefix_group is not None:
return True
if self.dscp is not None and self.dscp._has_data():
return True
if self.fragment_offset is not None and self.fragment_offset._has_data():
return True
if self.fragments is not None:
return True
if self.grant is not None:
return True
if self.icmp is not None and self.icmp._has_data():
return True
if self.icmp_off is not None:
return True
if self.igmp_message_type is not None:
return True
if self.log_option is not None:
return True
if self.next_hop is not None and self.next_hop._has_data():
return True
if self.packet_length is not None and self.packet_length._has_data():
return True
if self.precedence is not None:
return True
if self.protocol is not None:
return True
if self.protocol2 is not None:
return True
if self.protocol_operator is not None:
return True
if self.qos_group is not None:
return True
if self.remark is not None:
return True
if self.sequence_str is not None:
return True
if self.source_network is not None and self.source_network._has_data():
return True
if self.source_port is not None and self.source_port._has_data():
return True
if self.source_port_group is not None:
return True
if self.source_prefix_group is not None:
return True
if self.tcp is not None and self.tcp._has_data():
return True
if self.time_to_live is not None and self.time_to_live._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries.AccessListEntry']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:access-list-entries'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.access_list_entry is not None:
for child_ref in self.access_list_entry:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access.AccessListEntries']['meta_info']
@property
def _common_path(self):
if self.access_list_name is None:
raise YPYModelError('Key property access_list_name is None')
return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:accesses/Cisco-IOS-XR-ipv4-acl-cfg:access[Cisco-IOS-XR-ipv4-acl-cfg:access-list-name = ' + str(self.access_list_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.access_list_name is not None:
return True
if self.access_list_entries is not None and self.access_list_entries._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses.Access']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:accesses'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.access is not None:
for child_ref in self.access:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Accesses']['meta_info']
class Prefixes(object):
"""
Table of ACL prefix lists. Entries in this
table and the PrefixListExistenceTable table
must be kept consistent
.. attribute:: prefix
Name of a prefix list
**type**\: list of :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes.Prefix>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.prefix = YList()
self.prefix.parent = self
self.prefix.name = 'prefix'
class Prefix(object):
"""
Name of a prefix list
.. attribute:: prefix_list_name <key>
Prefix list name \- max 32 characters
**type**\: str
.. attribute:: prefix_list_entries
Sequence of entries forming a prefix list
**type**\: :py:class:`PrefixListEntries <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries>`
**presence node**\: True
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.prefix_list_name = None
self.prefix_list_entries = None
class PrefixListEntries(object):
"""
Sequence of entries forming a prefix list
.. attribute:: prefix_list_entry
A prefix list entry; either a description (remark) or a prefix to match against
**type**\: list of :py:class:`PrefixListEntry <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_cfg.Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries.PrefixListEntry>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self._is_presence = True
self.prefix_list_entry = YList()
self.prefix_list_entry.parent = self
self.prefix_list_entry.name = 'prefix_list_entry'
class PrefixListEntry(object):
"""
A prefix list entry; either a description
(remark) or a prefix to match against
.. attribute:: sequence_number <key>
Sequence number of prefix list
**type**\: int
**range:** 1..2147483646
.. attribute:: exact_prefix_length
If exact prefix length matching specified, set the length of prefix to be matched
**type**\: int
**range:** 0..32
.. attribute:: grant
Whether to forward or drop packets matching the prefix list
**type**\: :py:class:`Ipv4AclGrantEnumEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_acl_datatypes.Ipv4AclGrantEnumEnum>`
.. attribute:: match_exact_length
Set to perform an exact prefix length match. Item is mutually exclusive with minimum and maximum length match items
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: match_max_length
Set to perform a maximum length prefix match . Item is mutually exclusive with exact length match item
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: match_min_length
Set to perform a minimum length prefix match . Item is mutually exclusive with exact length match item
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: max_prefix_length
If maximum length prefix matching specified, set the maximum length of prefix to be matched
**type**\: int
**range:** 0..32
.. attribute:: min_prefix_length
If minimum length prefix matching specified, set the minimum length of prefix to be matched
**type**\: int
**range:** 0..32
.. attribute:: netmask
Mask of IPv4 address prefix
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix
IPv4 address prefix to match
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remark
Comments or a description for the prefix list. Item is mutually exclusive with all others in the object
**type**\: str
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.sequence_number = None
self.exact_prefix_length = None
self.grant = None
self.match_exact_length = None
self.match_max_length = None
self.match_min_length = None
self.max_prefix_length = None
self.min_prefix_length = None
self.netmask = None
self.prefix = None
self.remark = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.sequence_number is None:
raise YPYModelError('Key property sequence_number is None')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:prefix-list-entry[Cisco-IOS-XR-ipv4-acl-cfg:sequence-number = ' + str(self.sequence_number) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.sequence_number is not None:
return True
if self.exact_prefix_length is not None:
return True
if self.grant is not None:
return True
if self.match_exact_length is not None:
return True
if self.match_max_length is not None:
return True
if self.match_min_length is not None:
return True
if self.max_prefix_length is not None:
return True
if self.min_prefix_length is not None:
return True
if self.netmask is not None:
return True
if self.prefix is not None:
return True
if self.remark is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries.PrefixListEntry']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-ipv4-acl-cfg:prefix-list-entries'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self._is_presence:
return True
if self.prefix_list_entry is not None:
for child_ref in self.prefix_list_entry:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Prefixes.Prefix.PrefixListEntries']['meta_info']
@property
def _common_path(self):
if self.prefix_list_name is None:
raise YPYModelError('Key property prefix_list_name is None')
return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:prefixes/Cisco-IOS-XR-ipv4-acl-cfg:prefix[Cisco-IOS-XR-ipv4-acl-cfg:prefix-list-name = ' + str(self.prefix_list_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.prefix_list_name is not None:
return True
if self.prefix_list_entries is not None and self.prefix_list_entries._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Prefixes.Prefix']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:prefixes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.prefix is not None:
for child_ref in self.prefix:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.Prefixes']['meta_info']
class LogUpdate(object):
"""
Control access lists log updates
.. attribute:: rate
Log update rate (log msgs per second)
**type**\: int
**range:** 1..1000
.. attribute:: threshold
Log update threshold (number of hits)
**type**\: int
**range:** 1..2147483647
"""
_prefix = 'ipv4-acl-cfg'
_revision = '2016-11-07'
def __init__(self):
self.parent = None
self.rate = None
self.threshold = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list/Cisco-IOS-XR-ipv4-acl-cfg:log-update'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.rate is not None:
return True
if self.threshold is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList.LogUpdate']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-ipv4-acl-cfg:ipv4-acl-and-prefix-list'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.accesses is not None and self.accesses._has_data():
return True
if self.log_update is not None and self.log_update._has_data():
return True
if self.prefixes is not None and self.prefixes._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_acl_cfg as meta
return meta._meta_table['Ipv4AclAndPrefixList']['meta_info']<|fim▁end|> |
@property
def _common_path(self): |
<|file_name|>test_review_xblock.py<|end_file_name|><|fim▁begin|>"""
Test scenarios for the review xblock.
"""
import ddt
import unittest
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from nose.plugins.attrib import attr
from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.courseware.tests.helpers import LoginEnrollmentTestCase
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from review import get_review_ids
import crum
class TestReviewXBlock(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Create the test environment with the review xblock.
"""
STUDENTS = [
{'email': '[email protected]', 'password': 'foo'},
]
XBLOCK_NAMES = ['review']
URL_BEGINNING = settings.LMS_ROOT_URL + \
'/xblock/block-v1:DillonX/DAD101x_review/3T2017+type@'
@classmethod
def setUpClass(cls):
# Nose runs setUpClass methods even if a class decorator says to skip
# the class: https://github.com/nose-devs/nose/issues/946
# So, skip the test class here if we are not in the LMS.
if settings.ROOT_URLCONF != 'lms.urls':
raise unittest.SkipTest('Test only valid in lms')
super(TestReviewXBlock, cls).setUpClass()
# Set up for the actual course
cls.course_actual = CourseFactory.create(
display_name='Review_Test_Course_ACTUAL',
org='DillonX',
number='DAD101x',
run='3T2017'
)
# There are multiple sections so the learner can load different
# problems, but should only be shown review problems from what they have loaded
with cls.store.bulk_operations(cls.course_actual.id, emit_signals=False):
cls.chapter_actual = ItemFactory.create(
parent=cls.course_actual, display_name='Overview'
)
cls.section1_actual = ItemFactory.create(
parent=cls.chapter_actual, display_name='Section 1'
)
cls.unit1_actual = ItemFactory.create(
parent=cls.section1_actual, display_name='New Unit 1'
)
cls.xblock1_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 1'
)
cls.xblock2_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 2'
)
cls.xblock3_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 3'
)
cls.xblock4_actual = ItemFactory.create(
parent=cls.unit1_actual,
category='problem',
display_name='Problem 4'
)
cls.section2_actual = ItemFactory.create(
parent=cls.chapter_actual, display_name='Section 2'
)
cls.unit2_actual = ItemFactory.create(
parent=cls.section2_actual, display_name='New Unit 2'
)
cls.xblock5_actual = ItemFactory.create(
parent=cls.unit2_actual,
category='problem',
display_name='Problem 5'
)
cls.section3_actual = ItemFactory.create(
parent=cls.chapter_actual, display_name='Section 3'
)
cls.unit3_actual = ItemFactory.create(
parent=cls.section3_actual, display_name='New Unit 3'
)
cls.xblock6_actual = ItemFactory.create(
parent=cls.unit3_actual,
category='problem',
display_name='Problem 6'
)
cls.course_actual_url = reverse(
'courseware_section',
kwargs={
'course_id': unicode(cls.course_actual.id),
'chapter': 'Overview',
'section': 'Welcome',
}
)
# Set up for the review course where the review problems are hosted
cls.course_review = CourseFactory.create(
display_name='Review_Test_Course_REVIEW',
org='DillonX',
number='DAD101x_review',
run='3T2017'
)
with cls.store.bulk_operations(cls.course_review.id, emit_signals=True):
cls.chapter_review = ItemFactory.create(
parent=cls.course_review, display_name='Overview'
)
cls.section_review = ItemFactory.create(
parent=cls.chapter_review, display_name='Welcome'
)
cls.unit1_review = ItemFactory.create(
parent=cls.section_review, display_name='New Unit 1'
)
cls.xblock1_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 1'
)
cls.xblock2_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 2'
)
cls.xblock3_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 3'
)
cls.xblock4_review = ItemFactory.create(
parent=cls.unit1_review,
category='problem',
display_name='Problem 4'
)
cls.unit2_review = ItemFactory.create(
parent=cls.section_review, display_name='New Unit 2'
)
cls.xblock5_review = ItemFactory.create(
parent=cls.unit2_review,
category='problem',
display_name='Problem 5'
)
cls.unit3_review = ItemFactory.create(
parent=cls.section_review, display_name='New Unit 3'
)
cls.xblock6_review = ItemFactory.create(
parent=cls.unit3_review,
category='problem',
display_name='Problem 6'
)
cls.course_review_url = reverse(
'courseware_section',
kwargs={
'course_id': unicode(cls.course_review.id),
'chapter': 'Overview',
'section': 'Welcome',
}
)
def setUp(self):
super(TestReviewXBlock, self).setUp()
for idx, student in enumerate(self.STUDENTS):
username = 'u{}'.format(idx)
self.create_account(username, student['email'], student['password'])
self.activate_user(student['email'])
self.staff_user = GlobalStaffFactory()
def enroll_student(self, email, password, course):
"""
Student login and enroll for the course
"""
self.login(email, password)
self.enroll(course, verify=True)
@attr(shard=1)
@ddt.ddt
class TestReviewFunctions(TestReviewXBlock):
"""
Check that the essential functions of the Review xBlock work as expected.
Tests cover the basic process of receiving a hint, adding a new hint,
and rating/reporting hints.
"""
def test_no_review_problems(self):
"""
If a user has not seen any problems, they should
receive a response to go out and try more problems so they have
material to review.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
with self.store.bulk_operations(self.course_actual.id, emit_signals=False):
review_section_actual = ItemFactory.create(
parent=self.chapter_actual, display_name='Review Subsection'
)
review_unit_actual = ItemFactory.create(
parent=review_section_actual, display_name='Review Unit'
)
review_xblock_actual = ItemFactory.create( # pylint: disable=unused-variable
parent=review_unit_actual,
category='review',
display_name='Review Tool'
)
# Loading the review section
response = self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': review_section_actual.location.name,
}
))
expected_h2 = 'Nothing to review'
self.assertIn(expected_h2, response.content)
@ddt.data(5, 7)
def test_too_few_review_problems(self, num_desired):
"""
If a user does not have enough problems to review, they should
receive a response to go out and try more problems so they have
material to review.
Testing loading 4 problems and asking for 5 and then loading every
problem and asking for more than that.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Want to load fewer problems than num_desired
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
if num_desired > 6:
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section2_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
with self.store.bulk_operations(self.course_actual.id, emit_signals=False):
review_section_actual = ItemFactory.create(
parent=self.chapter_actual, display_name='Review Subsection'
)
review_unit_actual = ItemFactory.create(
parent=review_section_actual, display_name='Review Unit'
)
review_xblock_actual = ItemFactory.create( # pylint: disable=unused-variable
parent=review_unit_actual,
category='review',
display_name='Review Tool',
num_desired=num_desired
)
# Loading the review section
response = self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,<|fim▁hole|> expected_h2 = 'Nothing to review'
self.assertIn(expected_h2, response.content)
@ddt.data(2, 6)
def test_review_problems(self, num_desired):
"""
If a user has enough problems to review, they should
receive a response where there are review problems for them to try.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has enough problems in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section2_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
with self.store.bulk_operations(self.course_actual.id, emit_signals=False):
review_section_actual = ItemFactory.create(
parent=self.chapter_actual, display_name='Review Subsection'
)
review_unit_actual = ItemFactory.create(
parent=review_section_actual, display_name='Review Unit'
)
review_xblock_actual = ItemFactory.create( # pylint: disable=unused-variable
parent=review_unit_actual,
category='review',
display_name='Review Tool',
num_desired=num_desired
)
# Loading the review section
response = self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': review_section_actual.location.name,
}
))
expected_header_text = 'Review Problems'
# The problems are defaulted to correct upon load
# This happens because the problems "raw_possible" field is 0 and the
# "raw_earned" field is also 0.
expected_correctness_text = 'correct'
expected_problems = ['Review Problem 1', 'Review Problem 2', 'Review Problem 3',
'Review Problem 4', 'Review Problem 5', 'Review Problem 6']
self.assertIn(expected_header_text, response.content)
self.assertEqual(response.content.count(expected_correctness_text), num_desired)
# Since the problems are randomly selected, we have to check
# the correct number of problems are returned.
count = 0
for problem in expected_problems:
if problem in response.content:
count += 1
self.assertEqual(count, num_desired)
self.assertEqual(response.content.count(self.URL_BEGINNING), num_desired)
@ddt.data(2, 6)
def test_review_problem_urls(self, num_desired):
"""
Verify that the URLs returned from the Review xBlock are valid and
correct URLs for the problems the learner has seen.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has enough problems in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section2_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
user = User.objects.get(email=self.STUDENTS[0]['email'])
crum.set_current_user(user)
result_urls = get_review_ids.get_problems(num_desired, self.course_actual.id)
expected_urls = [
(self.URL_BEGINNING + 'problem+block@Problem_1', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_2', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_3', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_4', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_5', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_6', True, 0)
]
# Since the problems are randomly selected, we have to check
# the correct number of urls are returned.
count = 0
for url in expected_urls:
if url in result_urls:
count += 1
self.assertEqual(count, num_desired)
@ddt.data(2, 5)
def test_review_problem_urls_unique_problem(self, num_desired):
"""
Verify that the URLs returned from the Review xBlock are valid and
correct URLs for the problems the learner has seen. This test will give
a unique problem to a learner and verify only that learner sees
it as a review. It will also ensure that if a learner has not loaded a
problem, it should never show up as a review problem
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has enough problems in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section3_actual.location.name,
}
))
user = User.objects.get(email=self.STUDENTS[0]['email'])
crum.set_current_user(user)
result_urls = get_review_ids.get_problems(num_desired, self.course_actual.id)
expected_urls = [
(self.URL_BEGINNING + 'problem+block@Problem_1', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_2', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_3', True, 0),
(self.URL_BEGINNING + 'problem+block@Problem_4', True, 0),
# This is the unique problem when num_desired == 5
(self.URL_BEGINNING + 'problem+block@Problem_6', True, 0)
]
expected_not_loaded_problem = (self.URL_BEGINNING + 'problem+block@Problem_5', True, 0)
# Since the problems are randomly selected, we have to check
# the correct number of urls are returned.
count = 0
for url in expected_urls:
if url in result_urls:
count += 1
self.assertEqual(count, num_desired)
self.assertNotIn(expected_not_loaded_problem, result_urls)
# NOTE: This test is failing because when I grab the problem from the CSM,
# it is unable to find its parents. This is some issue with the BlockStructure
# and it not being populated the way we want. For now, this is being left out
# since the first course I'm working with does not use this function.
# TODO: Fix get_vertical from get_review_ids to have the block structure for this test
# or fix something in this file to make sure it populates the block structure for the CSM
@unittest.skip
def test_review_vertical_url(self):
"""
Verify that the URL returned from the Review xBlock is a valid and
correct URL for the vertical the learner has seen.
"""
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_actual)
self.enroll_student(self.STUDENTS[0]['email'], self.STUDENTS[0]['password'], self.course_review)
# Loading problems so the learner has problems and thus a vertical in the CSM
self.client.get(reverse(
'courseware_section',
kwargs={
'course_id': self.course_actual.id,
'chapter': self.chapter_actual.location.name,
'section': self.section1_actual.location.name,
}
))
user = User.objects.get(email=self.STUDENTS[0]['email'])
crum.set_current_user(user)
result_url = get_review_ids.get_vertical(self.course_actual.id)
expected_url = self.URL_BEGINNING + 'vertical+block@New_Unit_1'
self.assertEqual(result_url, expected_url)<|fim▁end|> | 'section': review_section_actual.location.name,
}
))
|
<|file_name|>router_spec.js<|end_file_name|><|fim▁begin|>var should = require('should'),
sinon = require('sinon'),
rewire = require('rewire'),
path = require('path'),
Promise = require('bluebird'),
ampController = rewire('../lib/router'),
errors = require('../../../errors'),
configUtils = require('../../../../test/utils/configUtils'),
themes = require('../../../themes'),
sandbox = sinon.sandbox.create();
// Helper function to prevent unit tests
// from failing via timeout when they
// should just immediately fail
function failTest(done) {
return function (err) {
done(err);
};
}
describe('AMP Controller', function () {
var res,
req,
defaultPath,
setResponseContextStub,
hasTemplateStub;
beforeEach(function () {
hasTemplateStub = sandbox.stub().returns(false);
hasTemplateStub.withArgs('index').returns(true);
sandbox.stub(themes, 'getActive').returns({
hasTemplate: hasTemplateStub
});
res = {
render: sandbox.spy(),
locals: {
context: ['amp', 'post']
}
};
req = {
route: {path: '/'},
query: {r: ''},
params: {},
amp: {}
};
defaultPath = path.join(configUtils.config.get('paths').appRoot, '/core/server/apps/amp/lib/views/amp.hbs');
configUtils.set({
theme: {
permalinks: '/:slug/',
amp: true
}
});
});
afterEach(function () {
sandbox.restore();
configUtils.restore();
});
it('should render default amp page when theme has no amp template', function (done) {
setResponseContextStub = sandbox.stub();
ampController.__set__('setResponseContext', setResponseContextStub);
res.render = function (view) {
view.should.eql(defaultPath);
done();
};
ampController.controller(req, res, failTest(done));
});
it('should render theme amp page when theme has amp template', function (done) {
hasTemplateStub.withArgs('amp').returns(true);
setResponseContextStub = sandbox.stub();
ampController.__set__('setResponseContext', setResponseContextStub);
res.render = function (view) {
view.should.eql('amp');
done();
};
ampController.controller(req, res, failTest(done));
});
it('should render with error when error is passed in', function (done) {
res.error = 'Test Error';
setResponseContextStub = sandbox.stub();
ampController.__set__('setResponseContext', setResponseContextStub);
res.render = function (view, context) {
view.should.eql(defaultPath);
context.should.eql({error: 'Test Error'});
done();
};
ampController.controller(req, res, failTest(done));
});
it('does not render amp page when amp context is missing', function (done) {
var renderSpy;
setResponseContextStub = sandbox.stub();
ampController.__set__('setResponseContext', setResponseContextStub);
res.locals.context = ['post'];
res.render = sandbox.spy(function () {
done();
});
renderSpy = res.render;
ampController.controller(req, res, failTest(done));
renderSpy.called.should.be.false();
});
it('does not render amp page when context is other than amp and post', function (done) {
var renderSpy;
setResponseContextStub = sandbox.stub();
ampController.__set__('setResponseContext', setResponseContextStub);
res.locals.context = ['amp', 'page'];
res.render = sandbox.spy(function () {
done();
});
renderSpy = res.render;
ampController.controller(req, res, failTest(done));
renderSpy.called.should.be.false();
});
});
describe('AMP getPostData', function () {
var res, req, postLookupStub, next;
beforeEach(function () {
res = {
locals: {
relativeUrl: '/welcome-to-ghost/amp/'
}
};
req = {
amp: {
post: {}
}
};
next = function () {};
});
afterEach(function () {
sandbox.restore();
});
it('should successfully get the post data from slug', function (done) {
postLookupStub = sandbox.stub();
postLookupStub.returns(new Promise.resolve({
post: {
id: '1',
slug: 'welcome-to-ghost'
}
}));
ampController.__set__('postLookup', postLookupStub);
ampController.getPostData(req, res, function () {
req.body.post.should.be.eql({
id: '1',
slug: 'welcome-to-ghost'
}
);
done();
});
});
it('should return error if postlookup returns NotFoundError', function (done) {
postLookupStub = sandbox.stub();
postLookupStub.returns(new Promise.reject(new errors.NotFoundError({message: 'not found'})));
ampController.__set__('postLookup', postLookupStub);
ampController.getPostData(req, res, function (err) {
should.exist(err);
should.exist(err.message);
should.exist(err.statusCode);
should.exist(err.errorType);
err.message.should.be.eql('not found');
err.statusCode.should.be.eql(404);
err.errorType.should.be.eql('NotFoundError');
req.body.should.be.eql({});
done();
});
});
it('should return error and if postlookup returns error', function (done) {
postLookupStub = sandbox.stub();
postLookupStub.returns(new Promise.reject('not found'));
<|fim▁hole|> ampController.__set__('postLookup', postLookupStub);
ampController.getPostData(req, res, function (err) {
should.exist(err);
err.should.be.eql('not found');
req.body.should.be.eql({});
done();
});
});
});<|fim▁end|> | |
<|file_name|>0007_fieldvalue_column_name.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-10-05 09:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('explorer', '0006_auto_20181004_1159'),<|fim▁hole|>
operations = [
migrations.AddField(
model_name='fieldvalue',
name='column_name',
field=models.CharField(blank=True, max_length=80, null=True),
),
]<|fim▁end|> | ] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__productname__ = 'dotinstall'
__version__ = '0.1'
__copyright__ = "Copyright (C) 2014 Cinghio Pinghio"
__author__ = "Cinghio Pinghio"<|fim▁hole|>__long_description__ = "Install dofile based on some rules"
__url__ = "cinghiopinghio...."
__license__ = "Licensed under the GNU GPL v3+."<|fim▁end|> | __author_email__ = "[email protected]"
__description__ = "Install dotfiles" |
<|file_name|>extraction_way.hpp<|end_file_name|><|fim▁begin|>#ifndef EXTRACTION_WAY_HPP
#define EXTRACTION_WAY_HPP
#include "extractor/guidance/road_classification.hpp"
#include "extractor/travel_mode.hpp"
#include "util/guidance/turn_lanes.hpp"
#include "util/typedefs.hpp"
#include <string>
#include <vector>
namespace osrm
{
namespace extractor
{
namespace detail
{
inline void maybeSetString(std::string &str, const char *value)
{
if (value == nullptr)
{
str.clear();
}<|fim▁hole|> {
str = std::string(value);
}
}
}
/**
* This struct is the direct result of the call to ```way_function```
* in the lua based profile.
*
* It is split into multiple edge segments in the ExtractorCallback.
*/
struct ExtractionWay
{
ExtractionWay() { clear(); }
void clear()
{
forward_speed = -1;
backward_speed = -1;
forward_rate = -1;
backward_rate = -1;
duration = -1;
weight = -1;
name.clear();
ref.clear();
pronunciation.clear();
destinations.clear();
exits.clear();
turn_lanes_forward.clear();
turn_lanes_backward.clear();
road_classification = guidance::RoadClassification();
forward_travel_mode = TRAVEL_MODE_INACCESSIBLE;
backward_travel_mode = TRAVEL_MODE_INACCESSIBLE;
roundabout = false;
circular = false;
is_startpoint = true;
forward_restricted = false;
backward_restricted = false;
is_left_hand_driving = false;
}
// wrappers to allow assigning nil (nullptr) to string values
void SetName(const char *value) { detail::maybeSetString(name, value); }
const char *GetName() const { return name.c_str(); }
void SetRef(const char *value) { detail::maybeSetString(ref, value); }
const char *GetRef() const { return ref.c_str(); }
void SetDestinations(const char *value) { detail::maybeSetString(destinations, value); }
const char *GetDestinations() const { return destinations.c_str(); }
void SetExits(const char *value) { detail::maybeSetString(exits, value); }
const char *GetExits() const { return exits.c_str(); }
void SetPronunciation(const char *value) { detail::maybeSetString(pronunciation, value); }
const char *GetPronunciation() const { return pronunciation.c_str(); }
void SetTurnLanesForward(const char *value)
{
detail::maybeSetString(turn_lanes_forward, value);
}
const char *GetTurnLanesForward() const { return turn_lanes_forward.c_str(); }
void SetTurnLanesBackward(const char *value)
{
detail::maybeSetString(turn_lanes_backward, value);
}
const char *GetTurnLanesBackward() const { return turn_lanes_backward.c_str(); }
// markers for determining user-defined classes for each way
std::unordered_map<std::string, bool> forward_classes;
std::unordered_map<std::string, bool> backward_classes;
// speed in km/h
double forward_speed;
double backward_speed;
// weight per meter
double forward_rate;
double backward_rate;
// duration of the whole way in both directions
double duration;
// weight of the whole way in both directions
double weight;
std::string name;
std::string ref;
std::string pronunciation;
std::string destinations;
std::string exits;
std::string turn_lanes_forward;
std::string turn_lanes_backward;
guidance::RoadClassification road_classification;
TravelMode forward_travel_mode : 4;
TravelMode backward_travel_mode : 4;
// Boolean flags
bool roundabout : 1;
bool circular : 1;
bool is_startpoint : 1;
bool forward_restricted : 1;
bool backward_restricted : 1;
bool is_left_hand_driving : 1;
bool : 2;
};
}
}
#endif // EXTRACTION_WAY_HPP<|fim▁end|> | else |
<|file_name|>day4.rs<|end_file_name|><|fim▁begin|>extern crate crypto;
extern crate clap;
use clap::App;
use crypto::md5::Md5;
use crypto::digest::Digest;
fn main() {
let matches = App::new("day4")
.version("v1.0")
.author("Andrew Rink <[email protected]>")
.args_from_usage("<KEY> 'Secret key for MD5 hash'")
.get_matches();
let key = matches.value_of("KEY").unwrap();
println!("For key {}, found {}", key, find_number_leading_zeroes(key, 5));
println!("For key {}, found {}", key, find_number_leading_zeroes(key, 6));
}
fn find_number_leading_zeroes(key : &str, num_zeroes : usize) -> u64 {
let mut md5 = Md5::new();
let mut res = 0;
let target_string : String = (vec!['0'; num_zeroes]).into_iter().collect();<|fim▁hole|> let mut tst = String::from(key);
tst.push_str(&i.to_string());
md5.input_str(&tst);
if md5.result_str().starts_with(&target_string) {
res = i;
break;
}
md5.reset();
}
res
}
#[cfg(test)]
mod tests {
use find_number_leading_zeroes;
#[test]
fn number_check() {
assert_eq!(609043, find_number_leading_zeroes("abcdef", 5));
assert_eq!(1048970, find_number_leading_zeroes("pqrstuv", 5));
}
}<|fim▁end|> | for i in 0..std::u64::MAX { |
<|file_name|>bomOverload.js<|end_file_name|><|fim▁begin|>function bomOverload() {
if(settings.verbose) console.log("RubberGlove: Creating PluginArray");
function PluginArray() { // native(PluginArray)
if(window.navigator.plugins.constructor === PluginArray)
throw new TypeError("Illegal constructor");
if(settings.verbose) console.log("RubberGlove: Creating PluginArray instance");
Object.defineProperty(this, 'length', {
enumerable: true,
get: (function(eventNode) {
return function() {
// native()
console.error('RubberGlove: Iteration of window.navigator.plugins blocked for ' + window.location.href + ' (Informational, not an error.)');
window.postMessage({
type: 'RubberGlove',
text: 'window.navigator.plugins',
url: window.location.href
}, '*');
return 0;
};
})(document.currentScript.parentNode)
});
// Add hidden named plugins
var plugins = window.navigator.plugins;
for(var i = 0; i < plugins.length; i++) {
var plugin = plugins[i];
if(typeof plugin != 'undefined' && typeof plugin.name != 'undefined' && plugin.name != null) {
Object.defineProperty(this, plugin.name, {
configurable: true,
value: plugin
});
}
}
}
Object.defineProperty(PluginArray, "toString", {
enumerable: true,
value: function toString() { // native(toString)
return "function PluginArray() { [native code] }";
}
});
if(settings.verbose) console.log("RubberGlove: Creating PluginArray.prototype.item()");
PluginArray.prototype.item = function item() { // native(item)
return this[arguments[0]];
};
if(settings.verbose) console.log("RubberGlove: Creating PluginArray.prototype.namedItem()");
PluginArray.prototype.namedItem = function namedItem() { // native(namedItem)
return this[arguments[0]];
};
if(settings.verbose) console.log("RubberGlove: Creating PluginArray.prototype.refresh()");
PluginArray.prototype.refresh = (function(plugins) {
if(settings.verbose) console.log("RubberGlove: Returning our custom PluginArray.refresh()");
return function refresh() { // native(refresh)
// Refresh the real plugins list
plugins.refresh.apply(plugins, Array.prototype.slice.apply(arguments));<|fim▁hole|> var property = propertyNames[i];
if(property != 'length') delete this[property];
}
// Add hidden named plugins
for(var i = 0; i < plugins.length; i++) {
var plugin = plugins[i];
if(typeof plugin.name != 'undefined' && plugin.name != null) {
Object.defineProperty(this, plugin.name, {
configurable: true,
value: plugin
});
}
}
}
})(window.navigator.plugins);
if(settings.verbose) console.log("RubberGlove: Replacing window.PluginArray");
Object.defineProperty(window, 'PluginArray', {
enumerable: false,
configurable: false,
writable: true,
value: PluginArray
});
// TODO: This should refresh as well when PluginArray.refresh() is called.
if(settings.verbose) console.log("RubberGlove: Creating MimeTypeArray");
function MimeTypeArray() { // native(MimeTypeArray)
if(window.navigator.mimeTypes.constructor === MimeTypeArray)
throw new TypeError("Illegal constructor");
if(settings.verbose) console.log("RubberGlove: Creating MimeTypeArray instance");
Object.defineProperty(this, 'length', {
enumerable: true,
get: (function(eventNode) {
return function() {
// native()
console.error('RubberGlove: Iteration of window.navigator.mimeTypes blocked for ' + window.location.href + ' (Informational, not an error.)');
window.postMessage({
type: 'RubberGlove',
text: 'window.navigator.mimeTypes',
url: window.location.href
}, '*');
return 0;
};
})(document.currentScript.parentNode)
});
// Add hidden named mimeTypes
var mimeTypes = window.navigator.mimeTypes;
for(var i = 0; i < mimeTypes.length; i++) {
var mimeType = mimeTypes[i];
if(typeof mimeType != 'undefined' && typeof mimeType.type != 'undefined' && mimeType.type != null) {
Object.defineProperty(this, mimeType.type, {
configurable: true,
value: mimeType
});
}
}
}
Object.defineProperty(MimeTypeArray, "toString", {
enumerable: true,
value: function toString() { // native(toString)
return "function MimeTypeArray() { [native code] }";
}
});
// Yes, these duplicate the ones for PluginArray. No, they should
// not use the same functions as they shouldn't test as equal.
if(settings.verbose) console.log("RubberGlove: Creating MimeTypeArray.prototype.item()");
MimeTypeArray.prototype.item = function item(index) { // native(item)
return this[arguments[0]];
};
if(settings.verbose) console.log("RubberGlove: Creating MimeTypeArray.prototype.namedItem()");
MimeTypeArray.prototype.namedItem = function namedItem(name) { // native(namedItem)
return this[arguments[0]];
};
if(settings.verbose) console.log("RubberGlove: Replacing window.MimeTypeArray");
Object.defineProperty(window, 'MimeTypeArray', {
enumerable: false,
configurable: false,
writable: true,
value: MimeTypeArray
});
if(settings.verbose) console.log("RubberGlove: Creating Navigator");
function Navigator() { // native(Navigator)
if(window.navigator.constructor === Navigator)
throw new TypeError("Illegal constructor");
if(settings.verbose) console.log("RubberGlove: Creating Navigator instance");
var propertyNames = Object.getOwnPropertyNames(window.navigator);
for(var propertyIndex = 0; propertyIndex < propertyNames.length; propertyIndex++) {
var propertyName = propertyNames[propertyIndex];
var descriptor = Object.getOwnPropertyDescriptor(window.navigator, propertyName);
var writable = descriptor.writable == true || typeof descriptor.set == 'function';
delete descriptor.value;
delete descriptor.get;
delete descriptor.set;
delete descriptor.writable;
switch(propertyName) {
case 'plugins':
console.log('RubberGlove: Cloaking plugins for ' + window.location.href);
descriptor.value = new PluginArray();
break;
case 'mimeTypes':
console.log('RubberGlove: Cloaking mimeTypes for ' + window.location.href);
descriptor.value = new MimeTypeArray();
break;
default:
//console.log("RubberGlove: wrapping " + propertyName);
descriptor.get = (function(propertyName, navigator) {
return function() { /* native() */ return navigator[propertyName] };
})(propertyName, window.navigator);
if(writable) {
descriptor.set = (function(propertyName, navigator) {
return function(value) { /* native(item) */ navigator[propertyName] = value; };
})(propertyName, window.navigator);
}
break;
}
Object.defineProperty(this, propertyName, descriptor);
}
}
Object.defineProperty(Navigator, "toString", {
enumerable: true,
value: function toString() { // native(toString)
return "function Navigator() { [native code] }";
}
});
if(settings.verbose) console.log("RubberGlove: Replacing Navigator.prototype");
for(var property in window.Navigator.prototype) {
Navigator.prototype[property] = window.Navigator.prototype[property];
}
if(settings.verbose) console.log("RubberGlove: Replacing window.Navigator");
Object.defineProperty(window, 'Navigator', {
enumerable: false,
configurable: false,
writable: true,
value: Navigator
});
if(settings.verbose) console.log("RubberGlove: Constructing Navigator");
var navigatorProxy = new Navigator();
if(settings.verbose) console.log("RubberGlove: Replacing window.navigator");
Object.defineProperty(window, 'navigator', {
enumerable: true,
configurable: false,
writable: true,
value: navigatorProxy
});
if(settings.verbose) console.log("RubberGlove: Replacing window.clientInformation");
Object.defineProperty(window, 'clientInformation', {
enumerable: true,
configurable: false,
writable: true,
value: navigatorProxy
});
// Hides source code when it contains "// native(functionName)" or
// "/* native(functionName)" at the beginning of the function body.
if(settings.verbose) console.log("RubberGlove: Replacing Function.prototype.toString()");
Function.prototype.toString = (function(oldToString) {
return function toString() { // native(toString) <-- yes, it handles itself
var result = oldToString.apply(this, Array.prototype.slice.apply(arguments));
var match = result.match(/^\s*?function.*?\(.*?\)\s*?{\s*?\/[\*\/]\s*?native\((.*?)\)/);
if(match != null && match.length > 1)
return 'function ' + match[1] + '() { [native code] }';
return result;
};
})(Function.prototype.toString);
// Hides named plugins and mimeTypes
if(settings.verbose) console.log("RubberGlove: Replacing Object.getOwnPropertyNames()");
Object.getOwnPropertyNames = (function(oldGetOwnPropertyNames) {
return function getOwnPropertyNames() { // native(getOwnPropertyNames)
var propertyNames = oldGetOwnPropertyNames.apply(this, Array.prototype.slice.apply(arguments));
if(arguments[0] === window.navigator.plugins || arguments[0] === window.navigator.mimeTypes) {
var filteredNames = [];
for(var i=0; i < propertyNames.length; i++) {
var propertyName = propertyNames[i];
if(propertyName == 'item' || propertyName == 'namedItem' || propertyName == 'length') {
filteredNames.push(propertyName);
}
}
return filteredNames;
}
return propertyNames;
}
})(Object.getOwnPropertyNames);
// Makes our objects look like first class objects
if(settings.verbose) console.log("RubberGlove: Replacing Object.prototype.toString()");
Object.prototype.toString = (function(oldToString) {
return function toString() { // native(toString)
if(this === window.navigator) return "[object Navigator]";
if(this === window.navigator.plugins) return "[object PluginArray]";
if(this === window.navigator.mimeTypes) return "[object MimeTypeArray]";
return oldToString.apply(this, Array.prototype.slice.apply(arguments));
};
})(Object.prototype.toString);
}<|fim▁end|> |
// Delete our existing set of plugins
var propertyNames = Object.getOwnPropertyNames(this);
for(var i = 0; i < propertyNames.length; i++) { |
<|file_name|>get_cov_per_ind.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
#
# This file reads through a vcf file and prints a space-separated text file, containing the coverage for each SNV (rows) and individual (columns). Alt and ref alleles are summed to total coverage at each SNV and locus.
# Usage: ~/hts_tools/get_cov_per_ind.py fltrd_pubRetStri_dipUG35_200bp.vcf > outfile
from sys import argv
with open(argv[1], 'rb') as file:
for line in file:
if line[0:2] == '##':
continue #print line.split('\n')[0]
elif line[0:2] == "#C":
line_list = line.split('\t')
inds = line_list[9:len(line_list)]
last_ind = inds[-1].split('\n')[0]<|fim▁hole|> inds[-1] = last_ind
print ' '.join(inds)
else:
line_list = line.split('\t')
ref_count = dict()
if len(line_list[4]) > 1:
continue
else:
scaf, bp = line.split('\t')[0:2]
vcf_inds = line_list[9:len(line_list)]
count_list = list()
for i, ind in enumerate(inds):
count = int()
if vcf_inds[i] == './.':
count += 0
else:
ad = vcf_inds[i].split(':')[1]
if ad == '.':
count += 0
else:
ad = ad.split(',')
count += int(ad[0]) + int(ad[1])
count_list.append(str(count))
print ' '.join(count_list)
file.close()<|fim▁end|> | |
<|file_name|>chromstat.py<|end_file_name|><|fim▁begin|>import sys
import numpy as np
class ChromStats(object):
def __init__(self):
self.n = 0
self.n_nan = 0
self.sum = 0
self.min = None
self.max = None
def mean(self):
"""Calculates mean of sites that are not nan
on this chromsome"""
n = self.n - self.n_nan
if n == 0:
return np.inf
return self.sum / float(n)
def set_from_vals(self, vals):
self.n = vals.size
if str(vals.dtype).startswith('float'):
nan_vals = np.isnan(vals)
self.n_nan = np.sum(nan_vals)
if self.n_nan < self.n:
self.min = np.min(vals[~nan_vals])
self.max = np.max(vals[~nan_vals])
self.sum = np.sum(vals[~nan_vals])
else:
self.min = np.min(vals)
self.max = np.max(vals)
self.sum = np.sum(vals)
def add(self, other):
self.n += other.n
self.n_nan += other.n_nan
self.sum += other.sum
if (self.min is None) or (other.min is not None and
self.min > other.min):
self.min = other.min
if (self.max is None) or (other.max is not None and
self.max < other.max):
self.max = other.max
def __str__(self):
return "n=%d n_nan=%s min=%s max=%s sum=%s" % \
(self.n, str(self.n_nan), str(self.min), str(self.max),
str(self.sum))
<|fim▁hole|>def calc_stats(h5f, chrom_list, verbose=False):
"""Calculates stats for each chromosome in provided list as well
as combined stats."""
combined = ChromStats()
for chrom in chrom_list:
chrom_stat = ChromStats()
node_name = "/%s" % chrom.name
if node_name in h5f:
node = h5f.getNode("/%s" % chrom.name)
vals = node[:]
chrom_stat.set_from_vals(vals)
if verbose:
sys.stderr.write("%s %s\n" % (str(chrom), str(chrom_stat)))
else:
sys.stderr.write("skipping chromosome %s because "
"not present in HDF5 file" % chrom.name)
combined.add(chrom_stat)
return combined
def set_stats(h5f, chrom_list, verbose=False):
"""Calculates stats for each chromosome and entire track and
stores them as attributes on the chromosome nodes. The
provided HDF5 file handle must have been opened in append mode"""
combined = ChromStats()
for chrom in chrom_list:
node_name = "/%s" % chrom.name
if node_name in h5f:
chrom_stat = ChromStats()
node = h5f.getNode(node_name)
chrom_stat.set_from_vals(node[:])
node.attrs.n = chrom_stat.n
node.attrs.n_nan = chrom_stat.n_nan
node.attrs.min = chrom_stat.min
node.attrs.max = chrom_stat.max
node.attrs.sum = chrom_stat.sum
node.flush()
if verbose:
sys.stderr.write("%s %s\n" % (str(chrom), str(chrom_stat)))
combined.add(chrom_stat)
else:
sys.stderr.write("skipping chromosome %s because "
"not present in HDF5 file\n" % chrom.name)
return combined
def get_stats(h5f, chrom_list, verbose=False):
"""Retrieves stats that are stored as attributes for the specified
set of chromosomes."""
combined = ChromStats()
chrom_stat = ChromStats()
for chrom in chrom_list:
node_name = "/%s" % chrom.name
if node_name in h5f:
node = h5f.getNode(node_name)
if 'n' not in node.attrs:
raise ValueError("Stat attributes are not set for track %s"
% track.name)
chrom_stat.n = node.attrs.n
chrom_stat.n_nan = node.attrs.n_nan
chrom_stat.min = node.attrs.min
chrom_stat.max = node.attrs.max
chrom_stat.sum = node.attrs.sum
if verbose:
sys.stderr.write("%s %s\n" % (str(chrom), str(chrom_stat)))
combined.add(chrom_stat)
else:
sys.stderr.write("skipping chromosome %s because "
"not present in HDF5 file\n" % chrom.name)
return combined<|fim▁end|> | |
<|file_name|>where-clauses-no-bounds-or-predicates.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only
fn equal1<T>(_: &T, _: &T) -> bool where {
//~^ ERROR a `where` clause must have at least one predicate in it
true
}
fn equal2<T>(_: &T, _: &T) -> bool where T: {
//~^ ERROR each predicate in a `where` clause must have at least one bound
true
}<|fim▁hole|>}<|fim▁end|> |
fn main() { |
<|file_name|>events.js<|end_file_name|><|fim▁begin|>;(function() {
function ToerismeApp(id, parentContainer) {
this.API_URL = 'https://datatank.stad.gent/4/toerisme/visitgentevents.json';
this.id = id;
this.parentContainer = parentContainer;
this.loadData = function() {
var that = this;
var xhr = new XMLHttpRequest();
xhr.open('get', this.API_URL, true);
xhr.responseType = 'json';
xhr.onload = function() {
if(xhr.status == 200) {
var data = (!xhr.responseType)?JSON.parse(xhr.response):xhr.response;
var id = 1;
var tempStr = '';
for(i=0; i<20; i++) {
var title = data[i].title;
var contact = data[i].contact[0];
//var website = contact.website[0];
//var weburl = website.url;
var images = data[i].images[0];
var language = data[i].language;
var website = '';
if(contact.website){
website = contact.website[0].url;
//console.log(website);
}
if(language == 'nl'){
tempStr += '<div class="row row_events">';
tempStr += '<a href="http://' + website +'" target="_blank";>';
tempStr += '<div class="col-xs-6"><div class="div_image" style="background: url(' + images +') no-repeat center ;background-size:cover;"></div></div>';
tempStr += '<div class="col-xs-6"><h4>' + title + '</h4>';
tempStr += '<p>Adres: ' + contact.street + ' nr ' + contact.number + '<br> Stad: ' + contact.city +'</p>';
tempStr += '</div>'; /* einde adres */
tempStr += '</a>';
tempStr += '<a class="link_heart" id="myDIV'+[i]+'" alt="Add to favorites" title="Add to favorites" onclick="myFunction('+[i]+')" ><span class="glyphicon glyphicon-heart-empty"></span></a>';
tempStr += '</div>';/* einde row */
}else{};
}
that.parentContainer.innerHTML = tempStr;
} else {
console.log('xhr.status');
}
}<|fim▁hole|> console.log('Error');
}
xhr.send();
};
this.updateUI = function() {
};
this.toString = function() {
return `ToerismeApp with id: ${this.id}`;
};
};
var ww1 = new ToerismeApp(1, document.querySelector('.sidebar'));
ww1.loadData();
console.log(ww1.toString());
})();
function myFunction(id){
document.getElementById("myDIV"+id).classList.toggle("link_heart-select");
}<|fim▁end|> | xhr.onerror = function() { |
<|file_name|>forensic_artifacts.py<|end_file_name|><|fim▁begin|># Rekall Memory Forensics
# Copyright 2016 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""This module implements plugins related to forensic artifacts.
https://github.com/ForensicArtifacts
"""
from future import standard_library
standard_library.install_aliases()
from builtins import str
from past.builtins import basestring
from builtins import object
from future.utils import with_metaclass
__author__ = "Michael Cohen <[email protected]>"
import csv
import datetime
import json
import platform
import os
import io
import sys
import zipfile
import yaml
from artifacts import definitions
from artifacts import errors
from rekall import plugin
from rekall import obj
from rekall_lib import yaml_utils
from rekall.ui import text
from rekall.ui import json_renderer
from rekall.plugins.response import common
from rekall_lib import registry
class ArtifactResult(object):
"""Bundle all the results from an artifact."""
def __init__(self, artifact_name=None, result_type=None, fields=None):
self.artifact_name = artifact_name
self.result_type = result_type
self.results = []
self.fields = fields or []
def __iter__(self):
return iter(self.results)
def add_result(self, **data):
if data:
self.results.append(data)
def merge(self, other):
self.results.extend(other)
def as_dict(self):
return dict(fields=self.fields,
results=self.results,
artifact_name=self.artifact_name,
result_type=self.result_type)
class BaseArtifactResultWriter(with_metaclass(registry.MetaclassRegistry, object)):
"""Writes the results of artifacts."""
__abstract = True
def __init__(self, session=None, copy_files=False,
create_timeline=False):
self.session = session
self.copy_files = copy_files
self.create_timeline = create_timeline
def write_result(self, result):
"""Writes the artifact result."""
def _create_timeline(self, artifact_result):
"""Create a new timeline result from the given result.
We use the output format suitable for the timesketch tool:
https://github.com/google/timesketch/wiki/UserGuideTimelineFromFile
"""
artifact_fields = artifact_result.fields
fields = [
dict(name="message", type="unicode"),
dict(name="timestamp", type="int"),
dict(name="datetime", type="unicode"),
dict(name="timestamp_desc", type="unicode"),
] + artifact_fields
new_result = ArtifactResult(
artifact_name=artifact_result.artifact_name,
result_type="timeline",
fields=fields)
for field in artifact_fields:
# This field is a timestamp - copy the entire row into the timeline.
if field["type"] == "epoch":
for row in artifact_result.results:
new_row = row.copy()
timestamp = row.get(field["name"])
if timestamp is None:
continue
new_row["timestamp"] = int(timestamp)
new_row["datetime"] = datetime.datetime.utcfromtimestamp(
timestamp).strftime("%Y-%m-%dT%H:%M:%S+00:00")
new_row["timestamp_desc"] = artifact_result.artifact_name
new_row["message"] = " ".join(
str(row[field["name"]]) for field in artifact_fields
if field["name"] in row)
new_result.add_result(**new_row)
return new_result
def __enter__(self):
return self
def __exit__(self, unused_type, unused_value, unused_traceback):
return
class DirectoryBasedWriter(BaseArtifactResultWriter):
name = "Directory"
def __init__(self, output=None, **kwargs):
super(DirectoryBasedWriter, self).__init__(**kwargs)
self.dump_dir = output
# Check if the directory already exists.
if not os.path.isdir(self.dump_dir):
raise plugin.PluginError("%s is not a directory" % self.dump_dir)
def write_file(self, result):
"""Writes a FileInformation object."""
for row in result.results:
filename = row["filename"]
with open(filename, "rb") as in_fd:
with self.session.GetRenderer().open(
directory=self.dump_dir,
filename=filename, mode="wb") as out_fd:
while 1:
data = in_fd.read(1024*1024)
if not data:
break
out_fd.write(data)
def _write_csv_file(self, out_fd, result):
fieldnames = [x["name"] for x in result.fields]
writer = csv.DictWriter(
out_fd, dialect="excel",
fieldnames=fieldnames)
writer.writeheader()
for row in result.results:
writer.writerow(row)
def write_result(self, result):
"""Writes the artifact result."""
if self.copy_files and result.result_type == "file_information":
try:
self.write_file(result)
except (IOError, OSError) as e:
self.session.logging.warn("Unable to copy file: %s", e)
with self.session.GetRenderer().open(
directory=self.dump_dir,
filename="artifacts/%s.json" % result.artifact_name,
mode="wb") as out_fd:
out_fd.write(json.dumps(result.as_dict(), sort_keys=True))
with self.session.GetRenderer().open(
directory=self.dump_dir,
filename="artifacts/%s.csv" % result.artifact_name,
mode="wb") as out_fd:
self._write_csv_file(out_fd, result)
if self.create_timeline:
with self.session.GetRenderer().open(
directory=self.dump_dir,
filename="artifacts/%s.timeline.csv" %
result.artifact_name,
mode="wb") as out_fd:
self._write_csv_file(out_fd, self._create_timeline(result))
class ZipBasedWriter(BaseArtifactResultWriter):
name = "Zip"
def __init__(self, output=None, **kwargs):
super(ZipBasedWriter, self).__init__(**kwargs)
self.output = output
def __enter__(self):
self.out_fd = self.session.GetRenderer().open(
filename=self.output, mode="wb").__enter__()
self.outzip = zipfile.ZipFile(self.out_fd, mode="w",
compression=zipfile.ZIP_DEFLATED)
return self
def __exit__(self, *args):
self.outzip.close()
self.out_fd.__exit__(*args)
def _write_csv_file(self, out_fd, result):
fieldnames = [x["name"] for x in result.fields]
writer = csv.DictWriter(
out_fd, dialect="excel",
fieldnames=fieldnames)
writer.writeheader()
for row in result.results:
writer.writerow(row)
def write_file(self, result):
for row in result.results:
filename = row["filename"]
self.outzip.write(filename)
def write_result(self, result):
"""Writes the artifact result."""
if self.copy_files and result.result_type == "file_information":
try:
self.write_file(result)
except (IOError, OSError) as e:
self.session.logging.warn(
"Unable to copy file %s into output: %s",
result["filename"], e)
self.outzip.writestr("artifacts/%s.json" % result.artifact_name,
json.dumps(result.as_dict(), sort_keys=True),
zipfile.ZIP_DEFLATED)
tmp_fd = io.StringIO()
self._write_csv_file(tmp_fd, result)
self.outzip.writestr("artifacts/%s.csv" % result.artifact_name,
tmp_fd.getvalue(),
zipfile.ZIP_DEFLATED)
if self.create_timeline:
tmp_fd = io.StringIO()
self._write_csv_file(tmp_fd, self._create_timeline(result))
self.outzip.writestr("artifacts/%s.timeline.csv" %
result.artifact_name,
tmp_fd.getvalue(),
zipfile.ZIP_DEFLATED)
# Rekall defines a new artifact type.
TYPE_INDICATOR_REKALL = "REKALL_EFILTER"
class _FieldDefinitionValidator(object):
"""Loads and validates fields in a dict.
We check their name, types and if they are optional according to a template
in _field_definitions.
"""
_field_definitions = []
def _LoadFieldDefinitions(self, data, field_definitions):
for field in field_definitions:
name = field["name"]
default = field.get("default")
required_type = field.get("type")
if required_type in (str, str):
required_type = basestring
if default is None and required_type is not None:
# basestring cant be instantiated.
if required_type is basestring:
default = ""
else:
default = required_type()
if required_type is None and default is not None:
required_type = type(default)
if not field.get("optional"):
if name not in data:
raise errors.FormatError(
u'Missing fields {}.'.format(name))
value = data.get(name, default)
if default is not None and not isinstance(value, required_type):
raise errors.FormatError(
u'field {} has type {} should be {}.'.format(
name, type(data[name]), required_type))
if field.get("checker"):
value = field["checker"](self, data)
setattr(self, name, value)
class SourceType(_FieldDefinitionValidator):
"""All sources inherit from this."""
# Common fields for all sources.
_common_fields = [
dict(name="type", optional=False),
dict(name="supported_os", optional=True, type=list,
default=list(definitions.SUPPORTED_OS)),
]
def __init__(self, source_definition, artifact=None):
attributes = source_definition["attributes"]
# The artifact that owns us.
self.artifact = artifact
self.source_definition = source_definition
self.type_indicator = source_definition["type"]
self._LoadFieldDefinitions(attributes, self._field_definitions)
self._LoadFieldDefinitions(source_definition, self._common_fields)
def is_active(self, **_):
"""Indicates if the source is applicable to the environment."""
return True
def apply(self, artifact_name=None, fields=None, result_type=None, **_):
"""Generate ArtifactResult instances."""
return ArtifactResult(artifact_name=artifact_name,
result_type=result_type,
fields=fields)
# These are the valid types of Rekall images. They can be used to restrict
# REKALL_EFILTER artifacts to specific types of images. The types which end in
# API refer to the API only version of the similar plugins.
REKALL_IMAGE_TYPES = [
"Windows", "WindowsAPI",
"Linux", "LinuxAPI",
"Darwin", "DarwinAPI"
]
class RekallEFilterArtifacts(SourceType):
"""Class to support Rekall Efilter artifact types."""
allowed_types = {
"int": int,
"unicode": str, # Unicode data.
"str": str, # Used for binary data.
"float": float,
"epoch": float, # Dates as epoch timestamps.
"any": str # Used for opaque types that can not be further processed.
}
_field_definitions = [
dict(name="query", type=basestring),
dict(name="query_parameters", default=[], optional=True),
dict(name="fields", type=list),
dict(name="type_name", type=basestring),
dict(name="image_type", type=list, optional=True,
default=REKALL_IMAGE_TYPES),
]
def __init__(self, source_definition, **kw):
super(RekallEFilterArtifacts, self).__init__(source_definition, **kw)
for column in self.fields:
if "name" not in column or "type" not in column:
raise errors.FormatError(
u"Field definition should have both name and type.")
mapped_type = column["type"]
if mapped_type not in self.allowed_types:
raise errors.FormatError(
u"Unsupported type %s." % mapped_type)
def GetImageType(self, session):
"""Returns one of the standard image types based on the session."""
result = session.profile.metadata("os").capitalize()
if session.GetParameter("live_mode") == "API":
result += "API"
return result
def is_active(self, session=None):
"""Determine if this source is active."""
return (self.image_type and
self.GetImageType(session) in self.image_type)
def apply(self, session=None, **kwargs):
result = super(RekallEFilterArtifacts, self).apply(
fields=self.fields, result_type=self.type_name, **kwargs)
if not self.is_active(session):
return
search = session.plugins.search(
query=self.query,
query_parameters=self.query_parameters)
for match in search.solve():
row = {}
for column in self.fields:
name = column["name"]
type = column["type"]
value = match.get(name)
if value is None:
continue
row[name] = RekallEFilterArtifacts.allowed_types[
type](value)
result.add_result(**row)
yield result
class LiveModeSourceMixin(object):
def is_active(self, session=None):
"""Determine if this source is active."""
# We are only active in Live mode (API or Memory).
return (session.GetParameter("live_mode") != None and
session.profile.metadata("os").capitalize() in
self.supported_os)
class FileSourceType(LiveModeSourceMixin, SourceType):
_field_definitions = [
dict(name="paths", default=[]),
dict(name="separator", default="/", type=basestring,
optional=True),
]
# These fields will be present in the ArtifactResult object we return.
_FIELDS = [
dict(name="st_mode", type="unicode"),
dict(name="st_nlink", type="int"),
dict(name="st_uid", type="unicode"),
dict(name="st_gid", type="unicode"),
dict(name="st_size", type="int"),
dict(name="st_mtime", type="epoch"),
dict(name="filename", type="unicode"),
]
def apply(self, session=None, **kwargs):
result = super(FileSourceType, self).apply(
fields=self._FIELDS, result_type="file_information", **kwargs)
for hits in session.plugins.glob(
self.paths, path_sep=self.separator,
root=self.separator).collect():
# Hits are FileInformation objects, and we just pick some of the
# important fields to report.
info = hits["path"]
row = {}
for field in self._FIELDS:
name = field["name"]
type = RekallEFilterArtifacts.allowed_types[field["type"]]
row[name] = type(getattr(info, name))
result.add_result(**row)
yield result
class ArtifactGroupSourceType(SourceType):
_field_definitions = [
dict(name="names", type=list),
dict(name="supported_os", optional=True,
default=definitions.SUPPORTED_OS),
]
def apply(self, collector=None, **_):
for name in self.names:
for result in collector.collect_artifact(name):
yield result
class WMISourceType(LiveModeSourceMixin, SourceType):
_field_definitions = [
dict(name="query", type=basestring),
dict(name="fields", type=list, optional=True, default=[]),
dict(name="type_name", type=basestring, optional=True),
dict(name="supported_os", optional=True,
default=definitions.SUPPORTED_OS),
]
fields = None
def _guess_returned_fields(self, sample):
result = []
for key, value in sample.items():
field_type = type(value)
if field_type is int:
field_type = "int"
elif field_type is str:
field_type = "unicode"
else:
field_type = "unicode"
result.append(dict(name=key, type=field_type))
return result
def apply(self, session=None, **kwargs):
result = super(WMISourceType, self).apply(
result_type=self.type_name, **kwargs)
wmi = session.plugins.wmi(query=self.query)
# The wmi plugin may not exist on non-windows systems.
if wmi == None:
return
for collected in wmi.collect():
match = collected["Result"]
row = {}
# If the user did not specify the fields, we must
# deduce them from the first returned row.
if not self.fields:
self.fields = self._guess_returned_fields(match)
result.fields = self.fields
for column in self.fields:
name = column["name"]
type = column["type"]
value = match.get(name)
if value is None:
continue
row[name] = RekallEFilterArtifacts.allowed_types[
type](value)
result.add_result(**row)
yield result
class RegistryKeySourceType(LiveModeSourceMixin, SourceType):
_field_definitions = [
dict(name="keys", default=[]),
dict(name="supported_os", optional=True,
default=["Windows"]),
]
_FIELDS = [
dict(name="st_mtime", type="epoch"),
dict(name="hive", type="unicode"),
dict(name="key_name", type="unicode"),
dict(name="value", type="str"),
dict(name="value_type", type="str"),
]
def apply(self, session=None, **kwargs):
result = super(RegistryKeySourceType, self).apply(
fields=self._FIELDS, result_type="registry_key", **kwargs)
for hits in session.plugins.glob(
self.keys, path_sep="\\", filesystem="Reg",
root="\\").collect():
# Hits are FileInformation objects, and we just pick some of the
# important fields to report.
info = hits["path"]
row = {}
for field in self._FIELDS:
name = field["name"]
field_type = RekallEFilterArtifacts.allowed_types[field["type"]]
data = info.get(name)
if data is not None:
row[name] = field_type(data)
result.add_result(**row)
yield result
class RegistryValueSourceType(LiveModeSourceMixin, SourceType):
def CheckKeyValuePairs(self, source):
key_value_pairs = source["key_value_pairs"]
for pair in key_value_pairs:
if (not isinstance(pair, dict) or "key" not in pair or
"value" not in pair):
raise errors.FormatError(
u"key_value_pairs should consist of dicts with key and "
"value items.")
return key_value_pairs
_field_definitions = [
dict(name="key_value_pairs", default=[],
checker=CheckKeyValuePairs),
dict(name="supported_os", optional=True,
default=["Windows"]),
]
_FIELDS = [
dict(name="st_mtime", type="epoch"),
dict(name="hive", type="unicode"),
dict(name="key_name", type="unicode"),
dict(name="value_name", type="unicode"),
dict(name="value_type", type="str"),
dict(name="value", type="str"),
]
def apply(self, session=None, **kwargs):
result = super(RegistryValueSourceType, self).apply(
fields=self._FIELDS, result_type="registry_value", **kwargs)
globs = [u"%s\\%s" % (x["key"], x["value"])
for x in self.key_value_pairs]
for hits in session.plugins.glob(
globs, path_sep="\\", filesystem="Reg",
root="\\").collect():
info = hits["path"]
row = {}
for field in self._FIELDS:
name = field["name"]
field_type = RekallEFilterArtifacts.allowed_types[field["type"]]
data = info.get(name)
if data is not None:
row[name] = field_type(data)
result.add_result(**row)
yield result
# This lookup table maps between source type name and concrete implementations
# that we support. Artifacts which contain sources which are not implemented
# will be ignored.
SOURCE_TYPES = {
TYPE_INDICATOR_REKALL: RekallEFilterArtifacts,
definitions.TYPE_INDICATOR_FILE: FileSourceType,
definitions.TYPE_INDICATOR_ARTIFACT_GROUP: ArtifactGroupSourceType,
definitions.TYPE_INDICATOR_WMI_QUERY: WMISourceType,
definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY: RegistryKeySourceType,
definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE: RegistryValueSourceType,
}
class ArtifactDefinition(_FieldDefinitionValidator):
"""The main artifact class."""
def CheckLabels(self, art_definition):
"""Ensure labels are defined."""
labels = art_definition.get("labels", [])
# Keep unknown labels around in case callers want to check for complete
# label coverage. In most cases it is desirable to allow users to extend
# labels but when super strict validation is required we want to make
# sure that users dont typo a label.
self.undefined_labels = set(labels).difference(definitions.LABELS)
return labels
def BuildSources(self, art_definition):
sources = art_definition["sources"]
result = []
self.unsupported_source_types = []
for source in sources:
if not isinstance(source, dict):
raise errors.FormatError("Source is not a dict.")
source_type_name = source.get("type")
if source_type_name is None:
raise errors.FormatError("Source has no type.")
source_cls = self.source_types.get(source_type_name)
if source_cls:
result.append(source_cls(source, artifact=self))
else:
self.unsupported_source_types.append(source_type_name)
if not result:
if self.unsupported_source_types:
raise errors.FormatError(
"No supported sources: %s" % (
self.unsupported_source_types,))
raise errors.FormatError("No available sources.")
return result
def SupportedOS(self, art_definition):
supported_os = art_definition.get(
"supported_os", definitions.SUPPORTED_OS)
undefined_supported_os = set(supported_os).difference(
definitions.SUPPORTED_OS)
if undefined_supported_os:
raise errors.FormatError(
u'supported operating system: {} '
u'not defined.'.format(
u', '.join(undefined_supported_os)))
return supported_os
_field_definitions = [
dict(name="name", type=basestring),
dict(name="doc", type=basestring),
dict(name="labels", default=[],
checker=CheckLabels, optional=True),
dict(name="sources", default=[],
checker=BuildSources),
dict(name="supported_os",
checker=SupportedOS, optional=True),
dict(name="conditions", default=[], optional=True),
dict(name="returned_types", default=[], optional=True),
dict(name="provides", type=list, optional=True),
dict(name="urls", type=list, optional=True)
]
name = "unknown"
source_types = SOURCE_TYPES
def __init__(self, data, source_types=None):
self.source_types = source_types or SOURCE_TYPES
self.data = data
try:
self._LoadDefinition(data)
except Exception as e:
exc_info = sys.exc_info()
raise errors.FormatError(
"Definition %s: %s" % (self.name, e))
def set_implementations(self, source_types):
return self.__class__(self.data, source_types)
def _LoadDefinition(self, data):
if not isinstance(data, dict):
raise errors.FormatError(
"Artifact definition must be a dict.")
different_keys = set(data) - definitions.TOP_LEVEL_KEYS
if different_keys:
raise errors.FormatError(u'Undefined keys: {}'.format(
different_keys))
self._LoadFieldDefinitions(data, self._field_definitions)
class ArtifactDefinitionProfileSectionLoader(obj.ProfileSectionLoader):
"""Loads artifacts from the artifact profiles."""
name = "$ARTIFACTS"
def LoadIntoProfile(self, session, profile, art_definitions):
for definition in art_definitions:
try:
profile.AddDefinition(definition)
except errors.FormatError as e:
session.logging.debug(
"Skipping Artifact %s: %s", definition.get("name"), e)
return profile
class ArtifactProfile(obj.Profile):
"""A profile containing artifact definitions."""
# This will contain the definitions.
def __init__(self, *args, **kwargs):
super(ArtifactProfile, self).__init__(*args, **kwargs)
self.definitions = []
self.definitions_by_name = {}
def AddDefinition(self, definition):
"""Add a new definition from a dict."""
self.definitions.append(definition)
self.definitions_by_name[definition["name"]] = definition
def GetDefinitionByName(self, name, source_types=None):
if source_types is None:
source_types = SOURCE_TYPES
definition = self.definitions_by_name[name]
return ArtifactDefinition(definition, source_types)
def GetDefinitions(self, source_types=None):
if source_types is None:
source_types = SOURCE_TYPES
for definition in self.definitions:
try:
yield ArtifactDefinition(definition, source_types)
except errors.FormatError:
pass
class ArtifactsCollector(plugin.TypedProfileCommand,
plugin.Command):
"""Collects artifacts."""
name = "artifact_collector"
__args = [
dict(name="artifacts", positional=True, required=True,
type="ArrayStringParser",
help="A list of artifact names to collect."),
dict(name="artifact_files", type="ArrayStringParser",
help="A list of additional yaml files to load which contain "
"artifact definitions."),
dict(name="definitions", type="ArrayStringParser",
help="An inline artifact definition in yaml format."),
dict(name="create_timeline", type="Bool", default=False,
help="Also generate a timeline file."),
dict(name="copy_files", type="Bool", default=False,
help="Copy files into the output."),
dict(name="writer", type="Choices",
choices=lambda: (
x.name for x in list(BaseArtifactResultWriter.classes.values())),
help="Writer for artifact results."),
dict(name="output_path",
help="Path suitable for dumping files."),
]
table_header = [
dict(name="divider", type="Divider"),
dict(name="result"),
]
table_options = dict(
suppress_headers=True
)
def column_types(self):
return dict(path=common.FileInformation(filename="/etc"))
def __init__(self, *args, **kwargs):
super(ArtifactsCollector, self).__init__(*args, **kwargs)
self.artifact_profile = self.session.LoadProfile("artifacts")
extra_definitions = [
open(x).read() for x in self.plugin_args.artifact_files]
extra_definitions.extend(self.plugin_args.definitions or [])
# Make a copy of the artifact registry.
if extra_definitions:
self.artifact_profile = self.artifact_profile.copy()
for definition in extra_definitions:
for definition_data in yaml.safe_load_all(definition):
self.artifact_profile.AddDefinition(definition_data)
self.seen = set()
self.supported_os = self.get_supported_os(self.session)
if self.supported_os is None:
raise plugin.PluginError(
"Unable to determine running environment.")
# Make sure the args make sense.
if self.plugin_args.output_path is None:
if self.plugin_args.copy_files:
raise plugin.PluginError(
"Can only copy files when an output file is specified.")
if self.plugin_args.create_timeline:
raise plugin.PluginError(
"Can only create timelines when an output file "
"is specified.")
@classmethod
def get_supported_os(cls, session):
# Determine which context we are running in. If we are running in live
# mode, we use the platform to determine the supported OS, otherwise we
# determine it from the profile.
if session.GetParameter("live"):
return platform.system()
elif session.profile.metadata("os") == "linux":
return "Linux"
elif session.profile.metadata("os") == "windows":
return "Windows"
elif session.profile.metadata("os") == "darwin":
return "Darwin"
def _evaluate_conditions(self, conditions):
# TODO: Implement an expression parser for these. For now we just return
# True always.
return True
def collect_artifact(self, artifact_name):
if artifact_name in self.seen:
return
self.seen.add(artifact_name)
try:
definition = self.artifact_profile.GetDefinitionByName(
artifact_name)
except KeyError:
self.session.logging.error("Unknown artifact %s" % artifact_name)
return
# This artifact is not for us.
if self.supported_os not in definition.supported_os:
self.session.logging.debug(
"Skipping artifact %s: Supported OS: %s, but we are %s",
definition.name, definition.supported_os,
self.supported_os)
return
if not self._evaluate_conditions(definition.conditions):
return
yield dict(divider="Artifact: %s" % definition.name)
for source in definition.sources:
# This source is not for us.
if not source.is_active(session=self.session):
continue
for result in source.apply(
artifact_name=definition.name,
session=self.session,
collector=self):
if isinstance(result, dict):
yield result
else:
yield dict(result=result)
def collect(self):
# Figure out a sensible default for the output writer.
if (self.plugin_args.output_path is not None and
self.plugin_args.writer is None):
if os.path.isdir(self.plugin_args.output_path):
self.plugin_args.writer = "Directory"
else:
self.plugin_args.writer = "Zip"
if self.plugin_args.writer:
impl = BaseArtifactResultWriter.ImplementationByName(
self.plugin_args.writer)
with impl(session=self.session,
copy_files=self.plugin_args.copy_files,
create_timeline=self.plugin_args.create_timeline,
output=self.plugin_args.output_path) as writer:
for x in self._collect(writer=writer):
yield x
else:
for x in self._collect():
yield x
def _collect(self, writer=None):
for artifact_name in self.plugin_args.artifacts:
for hit in self.collect_artifact(artifact_name):
if "result" in hit and writer:
writer.write_result(hit["result"])
yield hit
class ArtifactsView(plugin.TypedProfileCommand,
plugin.Command):
name = "artifact_view"
__args = [
dict(name="artifacts", type="ArrayStringParser", positional=True,
help="A list of artifacts to display")
]
table_header = [
dict(name="divider", type="Divider"),
dict(name="Message")
]
def collect(self):
artifact_profile = self.session.LoadProfile("artifacts")
for artifact in self.plugin_args.artifacts:
definition = artifact_profile.definitions_by_name.get(artifact)
if definition:
yield dict(divider=artifact)
yield dict(Message=yaml_utils.safe_dump(definition))
class ArtifactsList(plugin.TypedProfileCommand,
plugin.Command):
"""List details about all known artifacts."""
name = "artifact_list"
__args = [<|fim▁hole|> dict(name="supported_os", type="ArrayStringParser", required=False,
help="If specified show for these OSs, otherwise autodetect "
"based on the current image."),
dict(name="labels", type="ArrayStringParser",
help="Filter by these labels."),
dict(name="all", type="Bool",
help="Show all artifacts."),
]
table_header = [
dict(name="Name", width=30),
dict(name="OS", width=8),
dict(name="Labels", width=20),
dict(name="Types", width=20),
dict(name="Description", width=50),
]
def collect(self):
# Empty means autodetect based on the image.
if not self.plugin_args.supported_os:
supported_os = set([
ArtifactsCollector.get_supported_os(self.session)])
else:
supported_os = set(self.plugin_args.supported_os)
for definition in self.session.LoadProfile(
"artifacts").GetDefinitions():
if (not self.plugin_args.all and
not supported_os.intersection(definition.supported_os)):
continue
# Determine the type:
types = set()
for source in definition.sources:
if self.plugin_args.all or source.is_active(
session=self.session):
types.add(source.type_indicator)
if self.plugin_args.regex.match(definition.name):
yield (definition.name, definition.supported_os,
definition.labels, sorted(types), definition.doc)
class ArtifactResult_TextObjectRenderer(text.TextObjectRenderer):
renders_type = "ArtifactResult"
def render_row(self, target, **_):
column_names = [x["name"] for x in target.fields]
table = text.TextTable(
columns=target.fields,
renderer=self.renderer,
session=self.session)
if not target.results:
return text.Cell("")
result = [
text.JoinedCell(*[text.Cell(x) for x in column_names]),
text.JoinedCell(*[text.Cell("-" * len(x)) for x in column_names])]
for row in target.results:
ordered_row = []
for column in column_names:
ordered_row.append(row.get(column))
result.append(table.get_row(*ordered_row))
result = text.StackedCell(*result)
return result
class ArtifactResult_DataExportObjectRenderer(
json_renderer.StateBasedObjectRenderer):
renders_type = "ArtifactResult"
renderers = ["DataExportRenderer"]
def GetState(self, item, **_):
return dict(artifact_name=item.artifact_name,
result_type=item.result_type,
fields=item.fields,
results=item.results)<|fim▁end|> | dict(name="regex", type="RegEx",
default=".",
help="Filter the artifact name."), |
<|file_name|>test_template_tags.py<|end_file_name|><|fim▁begin|>"""
Almost all test cases covers both tag calling and template using.
"""
from __future__ import print_function, unicode_literals
from django.conf import settings as django_settings
from django.contrib.contenttypes.models import ContentType
from django.http import HttpRequest
from django.utils.six import assertCountEqual
from wiki.conf import settings
from wiki.forms import CreateRootForm
from wiki.models import Article, ArticleForObject, ArticleRevision
from wiki.templatetags.wiki_tags import (article_for_object, login_url,
wiki_form, wiki_render)
from ..base import TemplateTestCase
if not django_settings.configured:
django_settings.configure()
# XXX article_for_object accepts context, but not using it
class ArticleForObjectTemplatetagTest(TemplateTestCase):
template = """
{% load wiki_tags %}
{% article_for_object obj as anything %}
{{ anything }}
"""
def setUp(self):
super(ArticleForObjectTemplatetagTest, self).setUp()
from wiki.templatetags import wiki_tags
wiki_tags._cache = {}
def test_obj_arg_is_not_a_django_model(self):
from wiki.templatetags import wiki_tags
with self.assertRaises(TypeError):
article_for_object({}, '')
with self.assertRaises(TypeError):
article_for_object({'request': 100500}, {})
with self.assertRaises(TypeError):
self.render({'obj': 'tiger!'})
self.assertEqual(len(wiki_tags._cache), 0)
def test_obj_is_not_in__cache_and_articleforobject_is_not_exist(self):
from wiki.templatetags.wiki_tags import _cache as cache
obj = Article.objects.create()
article_for_object({}, obj)
self.assertIn(obj, cache)
self.assertIsNone(cache[obj])
self.assertEqual(len(cache), 1)
self.render({'obj': obj})
self.assertIn(obj, cache)
self.assertIsNone(cache[obj])
self.assertEqual(len(cache), 1)
def test_obj_is_not_in__cache_and_articleforobjec_is_exist(self):
from wiki.templatetags.wiki_tags import _cache as cache
a = Article.objects.create()
content_type = ContentType.objects.get_for_model(a)
ArticleForObject.objects.create(
article=a,
content_type=content_type,
object_id=1
)
output = article_for_object({}, a)
self.assertEqual(output, a)
self.assertIn(a, cache)
self.assertEqual(cache[a], a)
self.assertEqual(len(cache), 1)
self.render({'obj': a})
self.assertIn(a, cache)
self.assertEqual(cache[a], a)
self.assertEqual(len(cache), 1)
def test_obj_in__cache_and_articleforobject_is_not_exist(self):
model = Article.objects.create()
from wiki.templatetags import wiki_tags
wiki_tags._cache = {model: 'spam'}
article_for_object({}, model)
self.assertIn(model, wiki_tags._cache)
self.assertIsNone(wiki_tags._cache[model])
self.assertEqual(len(wiki_tags._cache), 1)
self.render({'obj': model})
self.assertIn(model, wiki_tags._cache)
self.assertIsNone(wiki_tags._cache[model])
self.assertEqual(len(wiki_tags._cache), 1)
self.assertNotIn('spam', wiki_tags._cache.values())
def test_obj_in__cache_and_articleforobjec_is_exist(self):
article = Article.objects.create()
content_type = ContentType.objects.get_for_model(article)
ArticleForObject.objects.create(
article=article,
content_type=content_type,
object_id=1
)
from wiki.templatetags import wiki_tags
wiki_tags._cache = {article: 'spam'}
output = article_for_object({}, article)
self.assertEqual(output, article)
self.assertIn(article, wiki_tags._cache)
self.assertEqual(wiki_tags._cache[article], article)
output = self.render({'obj': article})
self.assertIn(article, wiki_tags._cache)
self.assertEqual(wiki_tags._cache[article], article)
expected = 'Article without content (1)'
self.assertIn(expected, output)
# TODO manage plugins in template
class WikiRenderTest(TemplateTestCase):
template = """
{% load wiki_tags %}
{% wiki_render article pc %}
"""
def tearDown(self):
from wiki.core.plugins import registry
registry._cache = {}
super(WikiRenderTest, self).tearDown()
keys = ['article',
'content',
'preview',
'plugins',
'STATIC_URL',
'CACHE_TIMEOUT'
]
def test_if_preview_content_is_none(self):
# monkey patch
from wiki.core.plugins import registry
registry._cache = {'ham': 'spam'}
article = Article.objects.create()
output = wiki_render({}, article)
assertCountEqual(self, self.keys, output)
self.assertEqual(output['article'], article)
self.assertIsNone(output['content'])
self.assertIs(output['preview'], False)
self.assertEqual(output['plugins'], {'ham': 'spam'})
self.assertEqual(output['STATIC_URL'], django_settings.STATIC_URL)
self.assertEqual(output['CACHE_TIMEOUT'], settings.CACHE_TIMEOUT)
# Additional check
self.render({'article': article, 'pc': None})
def test_called_with_preview_content_and_article_have_current_revision(self):
article = Article.objects.create()
ArticleRevision.objects.create(
article=article,
title="Test title",
content="Some beauty test text"
)
content = (
"""This is a normal paragraph\n"""
"""\n"""
"""Headline\n"""
"""========\n"""
)
expected_markdown = (
"""<p>This is a normal paragraph</p>\n"""
"""<h1 id="wiki-toc-headline">Headline</h1>"""
)
# monkey patch
from wiki.core.plugins import registry
registry._cache = {'spam': 'eggs'}
output = wiki_render({}, article, preview_content=content)
assertCountEqual(self, self.keys, output)
self.assertEqual(output['article'], article)
self.assertMultiLineEqual(output['content'], expected_markdown)
self.assertIs(output['preview'], True)
self.assertEqual(output['plugins'], {'spam': 'eggs'})
self.assertEqual(output['STATIC_URL'], django_settings.STATIC_URL)
self.assertEqual(output['CACHE_TIMEOUT'], settings.CACHE_TIMEOUT)
output = self.render({'article': article, 'pc': content})
self.assertIn(expected_markdown, output)
def test_called_with_preview_content_and_article_dont_have_current_revision(
self):
article = Article.objects.create()
content = (
"""This is a normal paragraph\n"""
"""\n"""
"""Headline\n"""
"""========\n"""
)
# monkey patch
from wiki.core.plugins import registry
registry._cache = {'spam': 'eggs'}
output = wiki_render({}, article, preview_content=content)
assertCountEqual(self, self.keys, output)
self.assertEqual(output['article'], article)
self.assertMultiLineEqual(output['content'], '')
self.assertIs(output['preview'], True)
self.assertEqual(output['plugins'], {'spam': 'eggs'})
self.assertEqual(output['STATIC_URL'], django_settings.STATIC_URL)
self.assertEqual(output['CACHE_TIMEOUT'], settings.CACHE_TIMEOUT)
self.render({'article': article, 'pc': content})
class WikiFormTest(TemplateTestCase):
template = """
{% load wiki_tags %}
{% wiki_form form_obj %}
"""
def test_form_obj_is_not_baseform_instance(self):
context = {'test_key': 'test_value'}
form_obj = 'ham'
with self.assertRaises(TypeError):<|fim▁hole|> with self.assertRaises(TypeError):
self.render({'test_key': 100500})
self.assertEqual(context, {'test_key': 'test_value'})
def test_form_obj_is_baseform_instance(self):
context = {'test_key': 'test_value'}
# not by any special reasons, just a form
form_obj = CreateRootForm()
wiki_form(context, form_obj)
self.assertEqual(context, {'test_key': 'test_value', 'form': form_obj})
self.render({'form_obj': form_obj})
self.assertEqual(context, {'test_key': 'test_value', 'form': form_obj})
class LoginUrlTest(TemplateTestCase):
template = """
{% load wiki_tags %}
{% login_url as some_url %}
{{ some_url }}
"""
def test_no_request_in_context(self):
with self.assertRaises(KeyError):
login_url({})
with self.assertRaises(KeyError):
self.render({})
def test_login_url_if_no_query_string_in_request(self):
r = HttpRequest()
r.META = {}
r.path = 'best/test/page/ever/'
output = login_url({'request': r})
expected = '/_accounts/login/?next=best/test/page/ever/'
self.assertEqual(output, expected)
output = self.render({'request': r})
self.assertIn(expected, output)
def test_login_url_if_query_string_is_empty(self):
r = HttpRequest()
r.META = {'QUERY_STRING': ''}
r.path = 'best/test/page/ever/'
output = login_url({'request': r})
expected = '/_accounts/login/?next=best/test/page/ever/'
self.assertEqual(output, expected)
output = self.render({'request': r})
self.assertIn(expected, output)
def test_login_url_if_query_string_is_not_empty(self):
r = HttpRequest()
r.META = {'QUERY_STRING': 'title=Main_page&action=raw'}
r.path = 'best/test/page/ever/'
context = {'request': r}
output = login_url(context)
expected = (
'/_accounts/login/'
'?next=best/test/page/ever/%3Ftitle%3DMain_page%26action%3Draw'
)
self.assertEqual(output, expected)
output = self.render({'request': r})
self.assertIn(expected, output)<|fim▁end|> | wiki_form(context, form_obj)
self.assertEqual(context, {'test_key': 'test_value'})
|
<|file_name|>voxel_tree.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(test, feature(test))]
use cgmath::Ray3;
use std::mem;
use std::ops::{Deref, DerefMut};
use raycast;
use voxel;
use voxel::Voxel;
#[derive(Debug)]
pub struct VoxelTree {
/// The log_2 of the tree's size.
lg_size: u8,
/// Force the top level to always be branches;
/// it saves a branch in the grow logic.
contents: Branches,
}
#[derive(Debug, PartialEq, Eq)]
#[repr(C)]
pub struct Branches {
// xyz ordering
// This isn't an array because we can't move out of an array.
lll: TreeBody,
llh: TreeBody,
lhl: TreeBody,
lhh: TreeBody,
hll: TreeBody,
hlh: TreeBody,
hhl: TreeBody,
hhh: TreeBody,
}
/// The main, recursive, tree-y part of the `VoxelTree`.
#[derive(Debug, PartialEq, Eq)]
pub enum TreeBody {
Empty,
Leaf(Voxel),
Branch(Box<Branches>),
}
impl Branches {
pub fn empty() -> Branches {
Branches {
lll: TreeBody::Empty,
llh: TreeBody::Empty,
lhl: TreeBody::Empty,
lhh: TreeBody::Empty,
hll: TreeBody::Empty,
hlh: TreeBody::Empty,
hhl: TreeBody::Empty,
hhh: TreeBody::Empty,
}
}
pub fn get<'a>(&'a self, x: usize, y: usize, z: usize) -> &'a TreeBody {
let this: &'a [[[TreeBody; 2]; 2]; 2] = unsafe {
mem::transmute(self)
};
&this[x][y][z]
}
pub fn get_mut<'a>(&'a mut self, x: usize, y: usize, z: usize) -> &'a mut TreeBody {
let this: &'a mut [[[TreeBody; 2]; 2]; 2] = unsafe {
mem::transmute(self)
};
&mut this[x][y][z]
}
}
impl VoxelTree {
pub fn new() -> VoxelTree {
VoxelTree {
lg_size: 0,
contents: Branches::empty(),
}
}
/// Is this voxel (non-strictly) within an origin-centered voxel with
/// size `2^lg_size`?
pub fn contains_bounds(&self, voxel: &voxel::Bounds) -> bool {
let high;
if voxel.lg_size >= 0 {
high = (1 << self.lg_size) >> voxel.lg_size;
} else {
high = (1 << self.lg_size) << (-voxel.lg_size);
}
voxel.x < high &&
voxel.y < high &&
voxel.z < high &&
{
let low = -high;
voxel.x >= low &&
voxel.y >= low &&
voxel.z >= low &&
true
}
}
/// Ensure that this tree can hold the provided voxel.
pub fn grow_to_hold(&mut self, voxel: &voxel::Bounds) {
while !self.contains_bounds(voxel) {
// Double the bounds in every direction.
self.lg_size += 1;
// Pull out `self.contents` so we can move out of it.
let contents = mem::replace(&mut self.contents, Branches::empty());
// We re-construct the tree with bounds twice the size (but still centered
// around the origin) by deconstructing the top level of branches,
// creating a new doubly-sized top level, and moving the old branches back
// in as the new top level's children. e.g. in 2D:
//
// ---------------------------
// | | |0| | |
// | | |0| | |
// --------------- ------------|0|------------
// | 1 |0| 2 | | | 1 |0| 2 | |
// | |0| | | | |0| | |
// |------0------| |------------0------------|
// 000000000000000 ==> |0000000000000000000000000|
// |------0------| |------------0------------|
// | |0| | | | |0| | |
// | 3 |0| 4 | | | 3 |0| 4 | |
// --------------- |------------0------------|
// | | |0| | |
// | | |0| | |
// ---------------------------
macro_rules! at(
($c_idx:ident, $b_idx:ident) => {{
let mut branches = Branches::empty();
branches.$b_idx = contents.$c_idx;
TreeBody::Branch(Box::new(branches))
}}
);
self.contents =
Branches {
lll: at!(lll, hhh),
llh: at!(llh, hhl),
lhl: at!(lhl, hlh),
lhh: at!(lhh, hll),
hll: at!(hll, lhh),
hlh: at!(hlh, lhl),
hhl: at!(hhl, llh),
hhh: at!(hhh, lll),
};
}
}
fn find_mask(&self, voxel: &voxel::Bounds) -> i32 {
// When we compare the voxel position to octree bounds to choose subtrees
// for insertion, we'll be comparing voxel position to values of 2^n and
// -2^n, so we can just use the position bits to branch directly.
// This actually works for negative values too, without much wrestling:
// we need to branch on the sign bit up front, but after that, two's
// complement magic means the branching on bits works regardless of sign.
let mut mask = (1 << self.lg_size) >> 1;
// Shift everything by the voxel's lg_size, so we can compare the mask to 0
// to know whether we're done.
if voxel.lg_size >= 0 {
mask = mask >> voxel.lg_size;
} else {
// TODO: Check for overflow.
mask = mask << -voxel.lg_size;
}
mask
}
fn find_mut<'a, Step, E>(
&'a mut self,
voxel: &voxel::Bounds,
mut step: Step,
) -> Result<&'a mut TreeBody, E> where
Step: FnMut(&'a mut TreeBody) -> Result<&'a mut Branches, E>,
{
let mut mask = self.find_mask(voxel);
let mut branches = &mut self.contents;
macro_rules! iter(
($select:expr, $step:block) => {{
let branches_temp = branches;
let x = $select(voxel.x);
let y = $select(voxel.y);
let z = $select(voxel.z);
let branch = branches_temp.get_mut(x, y, z);
$step;
// We've reached the voxel.
if mask == 0 {
return Ok(branch)
}
branches = try!(step(branch));
}}
);
iter!(|x| (x >= 0) as usize, {});
loop {
iter!(
|x| ((x & mask) != 0) as usize,
// Branch through half this size next time.
{ mask = mask >> 1; }
);
}
}
fn find<'a, Step, E>(
&'a self,
voxel: &voxel::Bounds,
mut step: Step,
) -> Result<&'a TreeBody, E> where
Step: FnMut(&'a TreeBody) -> Result<&'a Branches, E>,
{
let mut mask = self.find_mask(voxel);
let mut branches = &self.contents;
macro_rules! iter(
($select:expr, $step:block) => {{
let branches_temp = branches;
let x = $select(voxel.x);
let y = $select(voxel.y);
let z = $select(voxel.z);
let branch = branches_temp.get(x, y, z);
$step;
// We've reached the voxel.
if mask == 0 {
return Ok(branch)
}
branches = try!(step(branch));
}}
);
iter!(|x| (x >= 0) as usize, {});
loop {
iter!(
|x| { ((x & mask) != 0) as usize },
// Branch through half this size next time.
{ mask = mask >> 1; }
);
}
}
/// Find a voxel inside this tree.
/// If it doesn't exist, it will be created as empty.
pub fn get_mut_or_create<'a>(&'a mut self, voxel: &voxel::Bounds) -> &'a mut TreeBody {
self.grow_to_hold(voxel);
let branch: Result<_, ()> =
self.find_mut(voxel, |branch| { Ok(VoxelTree::get_mut_or_create_step(branch)) });
branch.unwrap()
}
fn get_mut_or_create_step<'a>(
branch: &'a mut TreeBody,
) -> &'a mut Branches {
// "Step down" the tree.
match *branch {
// Branches; we can go straight to the branching logic.
TreeBody::Branch(ref mut b) => b,
// Otherwise, keep going, but we need to insert a voxel inside the
// space occupied by the current branch.
TreeBody::Empty => {
// Replace this branch with 8 empty sub-branches - who's gonna notice?
*branch = TreeBody::Branch(Box::new(Branches::empty()));
match *branch {
TreeBody::Branch(ref mut b) => b,
_ => unreachable!(),
}
},
TreeBody::Leaf(_) => {
// Erase this leaf and replace it with 8 empty sub-branches.
// This behavior is pretty debatable, but we need to do something,
// and it's easier to debug accidentally replacing a big chunk
// with a smaller one than to debug a nop.
*branch = TreeBody::Branch(Box::new(Branches::empty()));
match *branch {
TreeBody::Branch(ref mut b) => b,
_ => unreachable!(),
}
},
}
}
/// Find a voxel inside this tree.
pub fn get<'a>(&'a self, voxel: &voxel::Bounds) -> Option<&'a Voxel> {
if !self.contains_bounds(voxel) {
return None
}
let get_step = |branch| {<|fim▁hole|> match branch {
&TreeBody::Branch(ref branches) => Ok(branches.deref()),
_ => Err(()),
}
};
match self.find(voxel, get_step) {
Ok(&TreeBody::Leaf(ref t)) => Some(t),
_ => None,
}
}
/// Find a voxel inside this tree.
pub fn get_mut<'a>(&'a mut self, voxel: &voxel::Bounds) -> Option<&'a mut Voxel> {
if !self.contains_bounds(voxel) {
return None
}
let get_step = |branch| {
match branch {
&mut TreeBody::Branch(ref mut branches) => Ok(branches.deref_mut()),
_ => Err(()),
}
};
match self.find_mut(voxel, get_step) {
Ok(&mut TreeBody::Leaf(ref mut t)) => Some(t),
_ => None,
}
}
pub fn cast_ray<'a, Act, R>(
&'a self,
ray: &Ray3<f32>,
act: &mut Act,
) -> Option<R>
where
// TODO: Does this *have* to be callback-based?
Act: FnMut(voxel::Bounds, &'a Voxel) -> Option<R>
{
let coords = [
if ray.origin.x >= 0.0 {1} else {0},
if ray.origin.y >= 0.0 {1} else {0},
if ray.origin.z >= 0.0 {1} else {0},
];
// NB: The children are half the size of the tree itself,
// but tree.lg_size=0 means it extends tree.lg_size=0 in *each direction*,
// so the "actual" size of the tree as a voxel would be tree.lg_size+1.
let child_lg_size = self.lg_size as i16;
let mut make_bounds = |coords: [usize; 3]| {
voxel::Bounds {
x: coords[0] as i32 - 1,
y: coords[1] as i32 - 1,
z: coords[2] as i32 - 1,
lg_size: child_lg_size,
}
};
match raycast::cast_ray_branches(
&self.contents,
ray,
None,
coords,
&mut make_bounds,
act,
) {
Ok(r) => Some(r),
Err(_) => None,
}
}
}
#[cfg(test)]
mod tests {
extern crate test;
use voxel;
use super::{VoxelTree, TreeBody};
#[test]
fn insert_and_lookup() {
let mut tree: VoxelTree<i32> = VoxelTree::new();
*tree.get_mut_or_create(voxel::Bounds::new(1, 1, 1, 0)) = TreeBody::Leaf(1);
*tree.get_mut_or_create(voxel::Bounds::new(8, -8, 4, 0)) = TreeBody::Leaf(2);
*tree.get_mut_or_create(voxel::Bounds::new(2, 0, 4, 4)) = TreeBody::Leaf(3);
*tree.get_mut_or_create(voxel::Bounds::new(9, 0, 16, 2)) = TreeBody::Leaf(4);
*tree.get_mut_or_create(voxel::Bounds::new(9, 0, 16, 2)) = TreeBody::Leaf(5);
assert_eq!(tree.get(voxel::Bounds::new(1, 1, 1, 0)), Some(&1));
assert_eq!(tree.get(voxel::Bounds::new(8, -8, 4, 0)), Some(&2));
assert_eq!(tree.get(voxel::Bounds::new(9, 0, 16, 2)), Some(&5));
assert_eq!(tree.get(voxel::Bounds::new(2, 0, 4, 4)), None);
}
#[test]
fn wrong_voxel_size_is_not_found() {
let mut tree: VoxelTree<i32> = VoxelTree::new();
*tree.get_mut_or_create(voxel::Bounds::new(4, 4, -4, 1)) = TreeBody::Leaf(1);
assert_eq!(tree.get(voxel::Bounds::new(4, 4, -4, 0)), None);
assert_eq!(tree.get(voxel::Bounds::new(4, 4, -4, 2)), None);
}
#[test]
fn grow_is_transparent() {
let mut tree: VoxelTree<i32> = VoxelTree::new();
*tree.get_mut_or_create(voxel::Bounds::new(1, 1, 1, 0)) = TreeBody::Leaf(1);
tree.grow_to_hold(voxel::Bounds::new(0, 0, 0, 1));
tree.grow_to_hold(voxel::Bounds::new(0, 0, 0, 2));
tree.grow_to_hold(voxel::Bounds::new(-32, 32, -128, 3));
assert_eq!(tree.get(voxel::Bounds::new(1, 1, 1, 0)), Some(&1));
}
#[test]
fn simple_cast_ray() {
let mut tree: VoxelTree<i32> = VoxelTree::new();
*tree.get_mut_or_create(voxel::Bounds::new(1, 1, 1, 0)) = TreeBody::Leaf(1);
*tree.get_mut_or_create(voxel::Bounds::new(4, 4, 4, 0)) = TreeBody::Leaf(2);
let actual = tree.cast_ray(
[4.5, 3.0, 4.5],
[0.1, 0.8, 0.1],
// Return the first voxel we hit.
&mut |bounds, v| Some((bounds, v)),
);
assert_eq!(actual, Some((voxel::Bounds::new(4, 4, 4, 0), &2)));
}
#[bench]
fn simple_inserts(bencher: &mut test::Bencher) {
let mut tree: VoxelTree<i32> = VoxelTree::new();
tree.grow_to_hold(voxel::Bounds::new(0, 0, 0, 30));
bencher.iter(|| {
*tree.get_mut_or_create(voxel::Bounds::new(0, 0, 0, 0)) = TreeBody::Leaf(0);
});
test::black_box(tree);
}
#[bench]
fn bench_cast_ray(bencher: &mut test::Bencher) {
let mut tree: VoxelTree<i32> = VoxelTree::new();
tree.grow_to_hold(voxel::Bounds::new(0, 0, 0, 30));
*tree.get_mut_or_create(voxel::Bounds::new(1, 1, 1, 0)) = TreeBody::Leaf(1);
*tree.get_mut_or_create(voxel::Bounds::new(4, 4, 4, 0)) = TreeBody::Leaf(2);
bencher.iter(|| {
let r = tree.cast_ray(
[4.5, 3.0, 4.5],
[0.1, 0.8, 0.1],
// Return the first voxel we hit.
&mut |bounds, v| Some((bounds, v)),
);
test::black_box(r);
});
}
}<|fim▁end|> | |
<|file_name|>IndexController.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import com.unidev.platform.web.WebUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
import java.util.Map;
/**
* Frontend controller
*/
@Controller
public class IndexController {
@Autowired
private HttpServletRequest request;
@Autowired
private MyIPService myIPService;
@RequestMapping("/")
public ModelAndView index() {
String ip = myIPService.extractClinetIp(request);
List<Map.Entry<String, Object>> headers = myIPService.extractHeaders(request);
ModelAndView modelAndView = new ModelAndView("index");
modelAndView.addObject("ip", ip);
modelAndView.addObject("headers", headers);
return modelAndView;
}
}<|fim▁end|> | package com.unidev.myip.web;
import com.unidev.myip.MyIPService; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='MapperTools',
packages=['MapperTools'],
version='0.1',
description='A python 2.7 implementation of Mapper algorithm for Topological Data Analysis',<|fim▁hole|> long_description=readme(),
url='http://github.com/alpatania',
author='Alice Patania',
author_email='[email protected]',
license='MIT',
classifiers=['Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7'],
install_requires=['hdbscan', 'sklearn', 'pandas', 'collections'],
include_package_data=True,
zip_safe=False)<|fim▁end|> | keywords='mapper TDA python', |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># encoding: utf8<|fim▁end|> | |
<|file_name|>MatrixDiagPart.java<|end_file_name|><|fim▁begin|>/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================*/
// This class has been generated, DO NOT EDIT!
package org.tensorflow.op.linalg;
import java.util.Arrays;
import org.tensorflow.GraphOperation;
import org.tensorflow.Operand;
import org.tensorflow.Operation;
import org.tensorflow.OperationBuilder;
import org.tensorflow.Output;
import org.tensorflow.op.RawOp;
import org.tensorflow.op.RawOpInputs;
import org.tensorflow.op.Scope;
import org.tensorflow.op.annotation.Endpoint;
import org.tensorflow.op.annotation.OpInputsMetadata;
import org.tensorflow.op.annotation.OpMetadata;
import org.tensorflow.op.annotation.Operator;
import org.tensorflow.proto.framework.DataType;
import org.tensorflow.types.TInt32;
import org.tensorflow.types.family.TType;
/**
* Returns the batched diagonal part of a batched tensor.
* Returns a tensor with the {@code k[0]}-th to {@code k[1]}-th diagonals of the batched
* {@code input}.
* <p>Assume {@code input} has {@code r} dimensions {@code [I, J, ..., L, M, N]}.
* Let {@code max_diag_len} be the maximum length among all diagonals to be extracted,
* {@code max_diag_len = min(M + min(k[1], 0), N + min(-k[0], 0))}
* Let {@code num_diags} be the number of diagonals to extract,
* {@code num_diags = k[1] - k[0] + 1}.
* <p>If {@code num_diags == 1}, the output tensor is of rank {@code r - 1} with shape
* {@code [I, J, ..., L, max_diag_len]} and values:
* <pre>
* diagonal[i, j, ..., l, n]
* = input[i, j, ..., l, n+y, n+x] ; if 0 <= n+y < M and 0 <= n+x < N,
* padding_value ; otherwise.
* </pre>
* <p>where {@code y = max(-k[1], 0)}, {@code x = max(k[1], 0)}.
* <p>Otherwise, the output tensor has rank {@code r} with dimensions
* {@code [I, J, ..., L, num_diags, max_diag_len]} with values:
* <pre>
* diagonal[i, j, ..., l, m, n]
* = input[i, j, ..., l, n+y, n+x] ; if 0 <= n+y < M and 0 <= n+x < N,
* padding_value ; otherwise.
* </pre>
* <p>where {@code d = k[1] - m}, {@code y = max(-d, 0)}, and {@code x = max(d, 0)}.
* <p>The input must be at least a matrix.
* <p>For example:
* <pre>
* input = np.array([[[1, 2, 3, 4], # Input shape: (2, 3, 4)
* [5, 6, 7, 8],
* [9, 8, 7, 6]],
* [[5, 4, 3, 2],
* [1, 2, 3, 4],
* [5, 6, 7, 8]]])
*
* # A main diagonal from each batch.
* tf.matrix_diag_part(input) ==> [[1, 6, 7], # Output shape: (2, 3)
* [5, 2, 7]]
*
* # A superdiagonal from each batch.
* tf.matrix_diag_part(input, k = 1)
* ==> [[2, 7, 6], # Output shape: (2, 3)
* [4, 3, 8]]
*
* # A tridiagonal band from each batch.
* tf.matrix_diag_part(input, k = (-1, 1))
* ==> [[[2, 7, 6], # Output shape: (2, 3, 3)
* [1, 6, 7],
* [5, 8, 0]],
* [[4, 3, 8],
* [5, 2, 7],
* [1, 6, 0]]]
*
* # Padding value = 9
* tf.matrix_diag_part(input, k = (1, 3), padding_value = 9)
* ==> [[[4, 9, 9], # Output shape: (2, 3, 3)
* [3, 8, 9],
* [2, 7, 6]],
* [[2, 9, 9],
* [3, 4, 9],
* [4, 3, 8]]]
* </pre>
*
* @param <T> data type for {@code diagonal} output
*/
@OpMetadata(
opType = MatrixDiagPart.OP_NAME,
inputsClass = MatrixDiagPart.Inputs.class
)
@Operator(
group = "linalg"
)
public final class MatrixDiagPart<T extends TType> extends RawOp implements Operand<T> {
/**
* The name of this op, as known by TensorFlow core engine
*/
public static final String OP_NAME = "MatrixDiagPartV2";
private Output<T> diagonal;
public MatrixDiagPart(Operation operation) {
super(operation, OP_NAME);
int outputIdx = 0;
diagonal = operation.output(outputIdx++);
}
/**
* Factory method to create a class wrapping a new MatrixDiagPartV2 operation.
*
* @param scope current scope
* @param input Rank {@code r} tensor where {@code r >= 2}.
* @param k Diagonal offset(s). Positive value means superdiagonal, 0 refers to the main
* diagonal, and negative value means subdiagonals. {@code k} can be a single integer
* (for a single diagonal) or a pair of integers specifying the low and high ends
* of a matrix band. {@code k[0]} must not be larger than {@code k[1]}.
* @param paddingValue The value to fill the area outside the specified diagonal band with.
* Default is 0.
* @param <T> data type for {@code MatrixDiagPartV2} output and operands
* @return a new instance of MatrixDiagPart
*/
@Endpoint(
describeByClass = true
)
public static <T extends TType> MatrixDiagPart<T> create(Scope scope, Operand<T> input,
Operand<TInt32> k, Operand<T> paddingValue) {
OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "MatrixDiagPart");
opBuilder.addInput(input.asOutput());
opBuilder.addInput(k.asOutput());
opBuilder.addInput(paddingValue.asOutput());
return new MatrixDiagPart<>(opBuilder.build());
}
/**
* Gets diagonal.
* The extracted diagonal(s).
* @return diagonal.
*/
public Output<T> diagonal() {
return diagonal;
}
@Override
public Output<T> asOutput() {
return diagonal;
}
@OpInputsMetadata(
outputsClass = MatrixDiagPart.class<|fim▁hole|> * Rank {@code r} tensor where {@code r >= 2}.
*/
public final Operand<T> input;
/**
* Diagonal offset(s). Positive value means superdiagonal, 0 refers to the main
* diagonal, and negative value means subdiagonals. {@code k} can be a single integer
* (for a single diagonal) or a pair of integers specifying the low and high ends
* of a matrix band. {@code k[0]} must not be larger than {@code k[1]}.
*/
public final Operand<TInt32> k;
/**
* The value to fill the area outside the specified diagonal band with.
* Default is 0.
*/
public final Operand<T> paddingValue;
/**
* The T attribute
*/
public final DataType T;
public Inputs(GraphOperation op) {
super(new MatrixDiagPart<>(op), op, Arrays.asList("T"));
int inputIndex = 0;
input = (Operand<T>) op.input(inputIndex++);
k = (Operand<TInt32>) op.input(inputIndex++);
paddingValue = (Operand<T>) op.input(inputIndex++);
T = op.attributes().getAttrType("T");
}
}
}<|fim▁end|> | )
public static class Inputs<T extends TType> extends RawOpInputs<MatrixDiagPart<T>> {
/** |
<|file_name|>skelp.go<|end_file_name|><|fim▁begin|>package cmd
import (
"fmt"
"io"
"github.com/brainicorn/skelp/generator"
"github.com/brainicorn/skelp/skelputil"
"github.com/mgutz/ansi"
"github.com/spf13/cobra"
)
// Flags that are to be added to commands.
var (
quietFlag bool
noColorFlag bool
homedirFlag string
skelpdirFlag string
)
func NewSkelpCommand() *cobra.Command {
skelpCmd := &cobra.Command{
Use: "skelp",
Short: "A commandline tool for generating skeleton projects",
Long: `skelp is a commandline tool for applying templates to a directory.
Skelp can be used to generate full project skeletons and/or apply templates to
an existing project.`,
SilenceErrors: true,
SilenceUsage: true,
PersistentPreRunE: validateRootFlags,
}
skelpCmd.PersistentFlags().BoolVar(&quietFlag, "quiet", false, "run in 'quiet mode'")
skelpCmd.PersistentFlags().BoolVar(&noColorFlag, "no-color", false, "turn off terminal colors")
skelpCmd.PersistentFlags().StringVar(&homedirFlag, "homedir", "", "path to override user's home directory where skelp stores data")
skelpCmd.PersistentFlags().StringVar(&skelpdirFlag, "skelpdir", "", "override name of skelp folder within the user's home directory")
addCommandsToRoot(skelpCmd)
return skelpCmd
}
func validateRootFlags(cmd *cobra.Command, args []string) error {
if noColorFlag {
ansi.DisableColors(true)
}
if !skelputil.IsBlank(homedirFlag) && !skelputil.PathExists(homedirFlag) {
return newUserError(fmt.Sprintf("%s is not a valid path for --homedir flag", homedirFlag))
}
return nil
}
func addCommandsToRoot(cmd *cobra.Command) {
cmd.AddCommand(newApplyCommand())
cmd.AddCommand(newAliasCommand())
cmd.AddCommand(newBashmeCommand())
}
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute(args []string, out io.Writer) int {
var cmd *cobra.Command
var err error
exitcode := 0
skelpCmd := NewSkelpCommand()
skelpCmd.SetArgs(args)
if out != nil {
skelpCmd.SetOutput(out)
}
if cmd, err = skelpCmd.ExecuteC(); err != nil {
exitcode = 1
if isUserError(err) {
cmd.Println(colorError(err.Error()))<|fim▁hole|> } else {
cmd.Println(colorError(err.Error()))
}
}
return exitcode
}
func getBaseOptions() generator.SkelpOptions {
opts := generator.DefaultOptions()
if !skelputil.IsBlank(homedirFlag) {
opts.HomeDirOverride = homedirFlag
}
if !skelputil.IsBlank(skelpdirFlag) {
opts.SkelpDirOverride = skelpdirFlag
}
return opts
}
func colorError(s string) string {
return ansi.Color(s, "red+b")
}<|fim▁end|> | cmd.Println(cmd.UsageString()) |
<|file_name|>libraries.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012-2016 Seafile Ltd.
import logging
from rest_framework.authentication import SessionAuthentication
from rest_framework.permissions import IsAdminUser
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from django.template.defaultfilters import filesizeformat
from django.utils.translation import ugettext as _
from seaserv import ccnet_api, seafile_api
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error
from seahub.base.accounts import User
from seahub.signals import repo_deleted
from seahub.views import get_system_default_repo_id
from seahub.admin_log.signals import admin_operation
from seahub.admin_log.models import REPO_CREATE, REPO_DELETE, REPO_TRANSFER
from seahub.share.models import FileShare, UploadLinkShare
from seahub.base.templatetags.seahub_tags import email2nickname, email2contact_email
from seahub.group.utils import is_group_member, group_id_to_name
from seahub.utils.repo import get_related_users_by_repo, normalize_repo_status_code, normalize_repo_status_str
from seahub.utils import is_valid_dirent_name, is_valid_email
from seahub.utils.timeutils import timestamp_to_isoformat_timestr
from seahub.api2.endpoints.group_owned_libraries import get_group_id_by_repo_owner
try:
from seahub.settings import MULTI_TENANCY
except ImportError:
MULTI_TENANCY = False
logger = logging.getLogger(__name__)
def get_repo_info(repo):
repo_owner = seafile_api.get_repo_owner(repo.repo_id)
if not repo_owner:
try:
org_repo_owner = seafile_api.get_org_repo_owner(repo.repo_id)
except Exception:
org_repo_owner = None
owner = repo_owner or org_repo_owner or ''
result = {}
result['id'] = repo.repo_id
result['name'] = repo.repo_name
result['owner'] = owner
result['owner_email'] = owner
result['owner_contact_email'] = email2contact_email(owner)
result['size'] = repo.size
result['size_formatted'] = filesizeformat(repo.size)
result['encrypted'] = repo.encrypted
result['file_count'] = repo.file_count
result['status'] = normalize_repo_status_code(repo.status)
result['last_modified'] = timestamp_to_isoformat_timestr(repo.last_modified)
if '@seafile_group' in owner:
group_id = get_group_id_by_repo_owner(owner)
result['group_name'] = group_id_to_name(group_id)
result['owner_name'] = group_id_to_name(group_id)
else:
result['owner_name'] = email2nickname(owner)
return result
class AdminLibraries(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser,)
def get(self, request, format=None):
""" List 'all' libraries (by name/owner/page)
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
order_by = request.GET.get('order_by', '').lower().strip()
if order_by and order_by not in ('size', 'file_count'):
error_msg = 'order_by invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# search libraries (by name/owner)
repo_name = request.GET.get('name', '')
owner = request.GET.get('owner', '')
repos = []
if repo_name and owner:
# search by name and owner
orgs = ccnet_api.get_orgs_by_user(owner)
if orgs:
org_id = orgs[0].org_id
owned_repos = seafile_api.get_org_owned_repo_list(org_id, owner)
else:
owned_repos = seafile_api.get_owned_repo_list(owner)
for repo in owned_repos:
if not repo.name or repo.is_virtual:
continue
if repo_name in repo.name:
repo_info = get_repo_info(repo)
repos.append(repo_info)
return Response({"name": repo_name, "owner": owner, "repos": repos})
elif repo_name:
# search by name(keyword in name)
repos_all = seafile_api.get_repo_list(-1, -1)
for repo in repos_all:
if not repo.name or repo.is_virtual:
continue
if repo_name in repo.name:
repo_info = get_repo_info(repo)
repos.append(repo_info)
return Response({"name": repo_name, "owner": '', "repos": repos})
elif owner:
# search by owner
orgs = ccnet_api.get_orgs_by_user(owner)
if orgs:
org_id = orgs[0].org_id
owned_repos = seafile_api.get_org_owned_repo_list(org_id, owner)
else:
owned_repos = seafile_api.get_owned_repo_list(owner)
for repo in owned_repos:
if repo.is_virtual:
continue
repo_info = get_repo_info(repo)
repos.append(repo_info)
return Response({"name": '', "owner": owner, "repos": repos})
# get libraries by page
try:
current_page = int(request.GET.get('page', '1'))
per_page = int(request.GET.get('per_page', '100'))
except ValueError:
current_page = 1
per_page = 100
start = (current_page - 1) * per_page
limit = per_page + 1
if order_by:
repos_all = seafile_api.get_repo_list(start, limit, order_by)
else:
repos_all = seafile_api.get_repo_list(start, limit)
if len(repos_all) > per_page:
repos_all = repos_all[:per_page]
has_next_page = True
else:
has_next_page = False
default_repo_id = get_system_default_repo_id()
repos_all = [r for r in repos_all if not r.is_virtual]
repos_all = [r for r in repos_all if r.repo_id != default_repo_id]
return_results = []
for repo in repos_all:
repo_info = get_repo_info(repo)
return_results.append(repo_info)
page_info = {
'has_next_page': has_next_page,
'current_page': current_page
}
return Response({"page_info": page_info, "repos": return_results})
def post(self, request):
""" Admin create library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
repo_name = request.data.get('name', None)
if not repo_name:
error_msg = 'name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
<|fim▁hole|> username = request.user.username
repo_owner = request.data.get('owner', None)
if repo_owner:
try:
User.objects.get(email=repo_owner)
except User.DoesNotExist:
error_msg = 'User %s not found.' % repo_owner
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
else:
repo_owner = username
try:
repo_id = seafile_api.create_repo(repo_name, '', repo_owner)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# send admin operation log signal
admin_op_detail = {
"id": repo_id,
"name": repo_name,
"owner": repo_owner,
}
admin_operation.send(sender=None, admin_name=request.user.username,
operation=REPO_CREATE, detail=admin_op_detail)
repo = seafile_api.get_repo(repo_id)
repo_info = get_repo_info(repo)
return Response(repo_info)
class AdminLibrary(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser,)
def get(self, request, repo_id, format=None):
""" get info of a library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo_info = get_repo_info(repo)
return Response(repo_info)
def delete(self, request, repo_id, format=None):
""" delete a library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
if get_system_default_repo_id() == repo_id:
error_msg = _('System library can not be deleted.')
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
repo = seafile_api.get_repo(repo_id)
if not repo:
# for case of `seafile-data` has been damaged
# no `repo object` will be returned from seafile api
# delete the database record anyway
try:
seafile_api.remove_repo(repo_id)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({'success': True})
repo_name = repo.name
repo_owner = seafile_api.get_repo_owner(repo_id)
if not repo_owner:
repo_owner = seafile_api.get_org_repo_owner(repo_id)
try:
seafile_api.remove_repo(repo_id)
try:
org_id = seafile_api.get_org_id_by_repo_id(repo_id)
related_usernames = get_related_users_by_repo(repo_id,
org_id if org_id and org_id > 0 else None)
except Exception as e:
logger.error(e)
org_id = -1
related_usernames = []
# send signal for seafevents
repo_deleted.send(sender=None, org_id=-1, operator=request.user.username,
usernames=related_usernames, repo_owner=repo_owner,
repo_id=repo_id, repo_name=repo.name)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# send admin operation log signal
admin_op_detail = {
"id": repo_id,
"name": repo_name,
"owner": repo_owner,
}
admin_operation.send(sender=None, admin_name=request.user.username,
operation=REPO_DELETE, detail=admin_op_detail)
return Response({'success': True})
def put(self, request, repo_id, format=None):
""" update a library status, transfer a library, rename a library
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
# argument check
new_status = request.data.get('status', None)
if new_status:
if new_status not in ('normal', 'read-only'):
error_msg = 'status invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
new_repo_name = request.data.get('name', None)
if new_repo_name:
if not is_valid_dirent_name(new_repo_name):
error_msg = 'name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
new_owner = request.data.get('owner', None)
if new_owner:
if not is_valid_email(new_owner):
error_msg = 'owner invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# resource check
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if new_status:
try:
seafile_api.set_repo_status(repo_id, normalize_repo_status_str(new_status))
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if new_repo_name:
try:
res = seafile_api.edit_repo(repo_id, new_repo_name, '', None)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if res == -1:
e = 'Admin rename failed: ID of library is %s, edit_repo api called failed.' % \
repo_id
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if new_owner:
try:
new_owner_obj = User.objects.get(email=new_owner)
except User.DoesNotExist:
error_msg = 'User %s not found.' % new_owner
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if not new_owner_obj.permissions.can_add_repo():
error_msg = _('Transfer failed: role of %s is %s, can not add library.') % \
(new_owner, new_owner_obj.role)
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if MULTI_TENANCY:
try:
if seafile_api.get_org_id_by_repo_id(repo_id) > 0:
error_msg = 'Can not transfer organization library.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if ccnet_api.get_orgs_by_user(new_owner):
error_msg = 'Can not transfer library to organization user %s' % new_owner
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
repo_owner = seafile_api.get_repo_owner(repo_id)
if new_owner == repo_owner:
error_msg = _("Library can not be transferred to owner.")
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
# get repo shared to user/group list
shared_users = seafile_api.list_repo_shared_to(
repo_owner, repo_id)
shared_groups = seafile_api.list_repo_shared_group_by_user(
repo_owner, repo_id)
# get all pub repos
pub_repos = []
if not request.cloud_mode:
pub_repos = seafile_api.list_inner_pub_repos_by_owner(repo_owner)
# transfer repo
seafile_api.set_repo_owner(repo_id, new_owner)
# reshare repo to user
for shared_user in shared_users:
shared_username = shared_user.user
if new_owner == shared_username:
continue
seafile_api.share_repo(repo_id, new_owner,
shared_username, shared_user.perm)
# reshare repo to group
for shared_group in shared_groups:
shared_group_id = shared_group.group_id
if not is_group_member(shared_group_id, new_owner):
continue
seafile_api.set_group_repo(repo_id, shared_group_id,
new_owner, shared_group.perm)
# reshare repo to links
try:
UploadLinkShare.objects.filter(username=repo_owner, repo_id=repo_id).update(username=new_owner)
FileShare.objects.filter(username=repo_owner, repo_id=repo_id).update(username=new_owner)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# check if current repo is pub-repo
# if YES, reshare current repo to public
for pub_repo in pub_repos:
if repo_id != pub_repo.id:
continue
seafile_api.add_inner_pub_repo(repo_id, pub_repo.permission)
break
# send admin operation log signal
admin_op_detail = {
"id": repo_id,
"name": repo.name,
"from": repo_owner,
"to": new_owner,
}
admin_operation.send(sender=None, admin_name=request.user.username,
operation=REPO_TRANSFER, detail=admin_op_detail)
repo = seafile_api.get_repo(repo_id)
repo_info = get_repo_info(repo)
return Response(repo_info)
class AdminSearchLibrary(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser,)
def get(self, request, format=None):
""" Search library by name.
Permission checking:
1. only admin can perform this action.
"""
if not request.user.admin_permissions.can_manage_library():
return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
query_str = request.GET.get('query', '').lower().strip()
if not query_str:
error_msg = 'query invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
repos = seafile_api.search_repos_by_name(query_str)
default_repo_id = get_system_default_repo_id()
repos = [r for r in repos if not r.is_virtual]
repos = [r for r in repos if r.repo_id != default_repo_id]
email_dict = {}
name_dict = {}
contact_email_dict = {}
for repo in repos:
# get owner email
repo_id = repo.repo_id
repo_owner = seafile_api.get_repo_owner(repo_id)
if not repo_owner:
try:
org_repo_owner = seafile_api.get_org_repo_owner(repo_id)
except Exception:
org_repo_owner = ''
owner_email = repo_owner or org_repo_owner or ''
if repo_id not in email_dict:
email_dict[repo_id] = owner_email
# get owner name
if repo_id not in name_dict:
# is department library
if '@seafile_group' in owner_email:
group_id = get_group_id_by_repo_owner(owner_email)
owner_name = group_id_to_name(group_id)
else:
owner_name = email2nickname(owner_email)
name_dict[repo_id] = owner_name
# get owner contact_email
if repo_id not in contact_email_dict:
if '@seafile_group' in owner_email:
owner_contact_email = ''
else:
owner_contact_email = email2contact_email(owner_email)
contact_email_dict[repo_id] = owner_contact_email
result = []
for repo in repos:
info = {}
info['id'] = repo.repo_id
info['name'] = repo.repo_name
info['owner_email'] = email_dict.get(repo.repo_id, '')
info['owner_name'] = name_dict.get(repo.repo_id, '')
info['owner_contact_email'] = contact_email_dict.get(repo.repo_id, '')
info['size'] = repo.size
info['encrypted'] = repo.encrypted
info['file_count'] = repo.file_count
info['status'] = normalize_repo_status_code(repo.status)
result.append(info)
return Response({"repo_list": result})<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^test_click/$', 'django.views.generic.simple.direct_to_template',
{'template': 'test_app/wm_test_click.html'}, name='wm_test_click')
)<|fim▁end|> | |
<|file_name|>shell.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""<|fim▁hole|>from __future__ import print_function, division
from topology.platforms.shell import PExpectShell, PExpectBashShell
class DockerExecMixin(object):
"""
Docker ``exec`` connection mixin for the Topology shell API.
This class implements a ``_get_connect_command()`` method that allows to
interact with a shell through a ``docker exec`` interactive command, and
extends the constructor to request for container related parameters.
:param str container: Container unique identifier.
:param str command: Command to be executed with the ``docker exec`` that
will launch an interactive session.
"""
def __init__(self, container, command, *args, **kwargs):
self._container = container
self._command = command
super(DockerExecMixin, self).__init__(*args, **kwargs)
def _get_connect_command(self):
return 'docker exec -i -t {} {}'.format(
self._container, self._command
)
class DockerShell(DockerExecMixin, PExpectShell):
"""
Generic ``docker exec`` shell for unspecified interactive session.
"""
class DockerBashShell(DockerExecMixin, PExpectBashShell):
"""
Specialized ``docker exec`` shell that will run and setup a bash
interactive session.
"""
__all__ = ['DockerShell', 'DockerBashShell']<|fim▁end|> | Docker shell helper class module.
"""
from __future__ import unicode_literals, absolute_import |
<|file_name|>public_currencies.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
__author__ = 'ivan.shynkarenka'
import argparse
from TTWebClient.TickTraderWebClient import TickTraderWebClient
def main():
parser = argparse.ArgumentParser(description='TickTrader Web API sample')
parser.add_argument('web_api_address', help='TickTrader Web API address')
args = parser.parse_args()
# Create instance of the TickTrader Web API client
client = TickTraderWebClient(args.web_api_address)<|fim▁hole|> currencies = client.get_public_all_currencies()
for c in currencies:
print('Currency: {0}'.format(c['Name']))
currency = client.get_public_currency(currencies[0]['Name'])
print("{0} currency precision: {1}".format(currency[0]['Name'], currency[0]['Precision']))
if __name__ == '__main__':
main()<|fim▁end|> |
# Public currencies |
<|file_name|>pyircbot.py<|end_file_name|><|fim▁begin|>import socket, threading, thread, os, sys, time
def create(settings):
return IrcBot(settings)
# Revision, yolo
class IrcBot:
_settings = {}
_debug = None
_client = "ArmedGuys IRC Bot"
_version = "0.5"
_env = "Python"
_socket = None
# Channels the bot is in
_channels = []
# Message Loop
_messageThreadRunning = True
_messageThread = None
# Event Queue Loop
_queueThreadRunning = True
_queueThread = None
_queue = None
def __init__(self, settings):
self._settings = settings
self._queue = IrcEventQueue()
if "debug" in settings:
self._debug = DebugLog(settings['nick'])
def connect(self):
if "host" in self._settings and "port" in self._settings:
self._socket = socket.create_connection((self._settings['host'], self._settings['port']))
# Register events
self._queue.RegisterHandler(IrcEvent.PacketRecieved, self.onpacket)
self._queue.RegisterHandler(IrcEvent.MessageRecieved, self.onmessage)
self._queue.RegisterHandler(IrcEvent.PingRecieved, self.onping)
# before sending, create message & queue loops
self.startQueueThread() # start event queue thread
self.startMessageThread() # start message queue thread
# begin connection
if "serverpassword" in self._settings:
self.out("PASS %s\r\n" % self._settings['serverpassword'])
self.out("NICK %s\r\n" % self._settings['nick'])
self.out("USER %s %s bla :%s\r\n" % (self._settings['ident'], self._settings['host'], self._settings['realname']))
def reconnect(self): # reconnect assumes events are all intact, that socket is closed and that queue thread is still running
if self._messageThreadRunning == False and self._queueThreadRunning == True:
self._socket = socket.create_connection((self._settings['host'], self._settings['port']))
# before sending, create message & queue loops
self._messageThreadRunning = True # reset msgthread state
self.startMessageThread() # start message queue thread
# begin connection
if "serverpassword" in self._settings:
self.out("PASS %s\r\n" % self._settings['serverpassword'])
self.out("NICK %s\r\n" % self._settings['nick'])
self.out("USER %s %s bla :%s\r\n" % (self._settings['ident'], self._settings['host'], self._settings['realname']))
def startMessageThread(self):
try:
self._messageThread = threading.Thread(target=self.messageThread)
self._messageThread.start()
except:
print "exception: %s" % str(sys.exec_info())
def startQueueThread(self):
try:
self._queueThread = threading.Thread(target=self.queueThread)
self._queueThread.start()
except:
print "exception: %s" % str(sys.exec_info())
def messageThread(self):
tempbuf = ""
while self._messageThreadRunning == True:
try:
sockbuf = self._socket.recv(4096)
if sockbuf == "": # dead connection
self._messageThreadRunning = False
self._queue.event(IrcEvent.BotLostConnection, None)
self._socket.close()
if "debug" in self._settings:
self._debug.write("BOT LOST CONNECTION", "Unknown reason")
else:
sockbuf = tempbuf + sockbuf
if "\n" in sockbuf: # should always happen
pcks = sockbuf.split("\n") # Splits them up as full IRC Commands, anyone cut off by buffer size gets put in a temp buffer and used next loop
tempbuf = pcks.pop()
for pck in pcks:
pck = pck.rstrip()
if "debug" in self._settings:
self._debug.write("GOT PACKET", pck)
packet = IrcPacket(pck)
self._queue.event(IrcEvent.PacketRecieved, packet)
except:
print "exception: %s\n" % str(sys.exc_info())
self._messageThreadRunning = False
self._socket.close()
self._queue.event(IrcEvent.BotLostConnection, None)
if "debug" in self._settings:
self._debug.write("MESSAGETHREAD EXCEPTION", str(sys.exc_info()))
def queueThread(self):
while self._queueThreadRunning == True:
next = self._queue.next()
self._queue.Handle(next)
time.sleep(0.001)
######################################### EVENT HANDLER HANDLING HANDLE HANDLING HANDLE HAND #############
def RegisterEventHandler(self, type, handler):
self._queue.RegisterHandler(type, handler)
def UnregisterEventHandler(self, type, handler):
self._queue.UnregisterHandler(type, handler)
######################################### EVENT HANDLING #################################################
def onpacket(self, type, data):
if type == IrcEvent.PacketRecieved:
if data.command == "PING":
self._queue.event(IrcEvent.PingRecieved, data.message)
if data.command == "ERROR":
self._queue.event(IrcEvent.IrcError, data)
else: # can't say this is the best implementation, but hey, it woerkz
self._queue.event(IrcEvent.MessageRecieved, data)
def onping(self, type, data):
if type == IrcEvent.PingRecieved:
self.out("PONG :%s\r\n" % data)
def onmessage(self, type, data):
# print "Recieved message of type: %s from %s" % (data.command, data.sender)
if type == IrcEvent.MessageRecieved:
if data.command == "PRIVMSG":
self._queue.event(IrcEvent.PrivmsgRecieved, data)
#print "privmsg reciever: %s" % data.params[0]
if data.params[0][0] != "#":
self._queue.event(IrcEvent.QueryRecieved, data)
else:
self._queue.event(IrcEvent.ChanmsgRecieved, data)
if data.command == "NOTICE":
self._queue.event(IrcEvent.NoticeRecieved, data)
if data.command == "TOPIC":
self._queue.event(IrcEvent.TopicChanged, data)
if data.command == "JOIN":
self._queue.event(IrcEvent.UserJoined, data)
if data.command == "PART":
self._queue.event(IrcEvent.UserLeft, data)
if data.command == "NICK":
self._queue.event(IrcEvent.NickChanged, data)
######################################### BOT CONTROL ####################################################
def exit(self, message):
self.out("QUIT :%s" % message)
self._queueThreadRunning = False
self._messageThreadRunning = False
self._socket.close()
# basic send types
def out(self, data):
if len(data) == 0: return
if "debug" in self._settings:
self._debug.write("SENT PACKET", data.rstrip())
if "\r\n" not in data:
data = data + "\r\n"
if self._socket:
self._socket.send(data)
def msg(self, target, message):
self.out("PRIVMSG %s :%s\r\n" % (target,message))
def notice(self, target, message):
self.out("NOTICE %s :%s\r\n" % (target, message))
# Channel stuff
def join(self, channel):
self._channels.append(channel)<|fim▁hole|> def leave(self, channel):
self.out("PART :%s\r\n" % channel)
try:
self._channels.remove(channel)
except:
pass
# Other stuff
def status(self, status):
if status == "":
self.out("NICK %s\r\n" % self._settings['nick'])
else:
self.out("NICK %s|%s\r\n" % (self._settings['nick'], status))
########################### EVENT QUEUE #########################################
class IrcEvent:
PacketRecieved = 0
MessageRecieved = 1
PingRecieved = 2
NoticeRecieved = 3
PrivmsgRecieved = 4
ChanmsgRecieved = 5
QueryRecieved = 6
TopicChanged = 7
UserJoined = 8
UserLeft = 9
NickChanged = 10
BotLostConnection = 11
IrcError = 12
class IrcEventQueue:
EventHandlers = {}
next = None
_queue = None
def RegisterHandler(self, event, handler):
if event in self.EventHandlers:
self.EventHandlers[event].append(handler)
else:
self.EventHandlers[event] = [handler]
def UnregisterHandler(self, event, handler):
if event in IrcEventQueue.EventHandlers:
try:
self.EventHandlers[event].remove(handler)
except:
pass
def Handle(self, event):
if event[0] in self.EventHandlers:
for e in self.EventHandlers[event[0]]:
e(event[0], event[1])
# Constructor
def __init__(self):
self._queue = self.ThreadsafeQueue()
self.next = self._queue.get
def event(self, type, data): # queue an event
self._queue.enqueue((type, data))
class ThreadsafeQueue:
def __init__(self):
self._eventList = []
self._newEventCondition = threading.Condition()
def enqueue(self, event): # adds an event to the queue
with self._newEventCondition:
self._eventList.append(event)
self._newEventCondition.notify()
def empty(self): # returns True if list is empty
with self._newEventCondition:
return len(self._eventList) == 0
def get(self):
with self._newEventCondition:
while self.empty():
self._newEventCondition.wait()
return self._eventList.pop(0)
########################### BOT COMPONENTS ######################################
class IrcPacket:
sender = ""
command = "" # command, numerical or text
params = None # any numerical reply params
message = "" # after "last" :
def __init__(self, buf):
self.params = []
if buf[0] == ":": # events generally
self.sender = ""
if ":" in buf[1:]:
d = buf[1:].split(":",1)
cm = d[0].strip()
if " " in cm: # must probably always happen, else will "never" happen
cmpar = cm.split(" ")
self.sender = cmpar[0]
self.command = cmpar[1]
self.params = cmpar[2:]
else:
self.command = cm
self.message = d[1]
else:
cm = buf[1:].strip()
if " " in cm: # must probably always happen, else will "never" happen
cmpar = cm.split(" ")
self.sender = cmpar[0]
self.command = cmpar[1]
self.params = cmpar[2:]
else:
self.command = cm
else:
self.sender = None
if ":" in buf:
d = buf.split(":",1)
cm = d[0].strip()
if " " in cm: # must probably always happen, else will "never" happen
cmpar = cm.split(" ")
self.command = cmpar[0]
self.params = cmpar[1:]
else:
self.command = cm
self.message = d[1]
else:
cm = buf.strip()
if " " in cm: # must probably always happen, else will "never" happen
cmpar = cm.split(" ")
self.command = cmpar[0]
self.params = cmpar[1:]
else:
self.command = cm
class IrcUser:
nick = ""
ident = ""
host = ""
def __init__(self, userstring):
if "!" in userstring:
d = userstring.split('!')
self.nick = d[0]
d = d[1].split("@")
self.ident = d[0]
self.host = d[1]
class DebugLog:
f = None
def __init__(self, prefix):
self.f = open("%s_irc.log" % prefix, "w")
def write(self, prefix, data):
self.f.write("[%s] [%s]: %s\r\n" % (time.time(), prefix, data))
self.f.flush()
############# STANDARD BOT ROUTINES ##############
class StandardBotRoutines:
_bot = None
_botSettings = None
# channels to join
_channels = []
# nickserv password to use
_nickservpassword = None
def __init__(self, bot, settings):
self._bot = bot
self._botSettings = settings
self._bot.RegisterEventHandler(IrcEvent.MessageRecieved, self.onMsgRecieved)
# join channel and nickserv auth
def queueJoinChannels(self, channels):
self._channels = channels
def queueNickServAuth(self, password):
self._nickservpassword = password
# automatic reconnect after internet connection issue
def autoReconnect(self):
self._bot.RegisterEventHandler(IrcEvent.BotLostConnection, self.onLostConn)
def onLostConn(self, type, data):
time.sleep(5)
print "reconnecting..."
self._bot.reconnect()
# handles join and nickserv pw
def onMsgRecieved(self, type, data):
if type == IrcEvent.MessageRecieved and data.command == "376": # end MOTD, auth w/ NickServ and join channels
if self._nickservpassword != None:
self._bot.msg("NickServ", "IDENTIFY %s" % self._nickservpassword)
for channel in self._channels:
self._bot.join(channel)
############# TEST CODE ###############
if __name__ == "__main__":
def bot_lost_connection_test(data1, data2):
print str(data2)
def user_joined(data1, data2):
bot.notice("#Pie-Studios", "Travis CI build currently running!")
bot.exit("Tests complete!")
settings = {
'host': "irc.rizon.net",
'port': 6667,
'nick': 'pyircbot',
'ident': 'pyircbot',
'realname': 'TheLeagueSpecialist',
'debug': False,
}
bot = create(settings)
standard = StandardBotRoutines(bot, settings)
standard.queueJoinChannels(["#Pie-Studios"])
standard.autoReconnect()
bot.RegisterEventHandler(IrcEvent.UserJoined, user_joined)
bot.RegisterEventHandler(IrcEvent.BotLostConnection, bot_lost_connection_test)
bot.connect()<|fim▁end|> | self.out("JOIN :%s\r\n" % channel)
|
<|file_name|>data_pipe.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::marker;
use std::mem;
use std::ops;
use std::ptr;
use std::slice;
use std::vec;
use crate::system::ffi;
// This full import is intentional; nearly every type in mojo_types needs to be used.
use crate::system::handle;
use crate::system::handle::{CastHandle, Handle};
use crate::system::mojo_types::*;
#[repr(u32)]
/// Create flags for data pipes
pub enum Create {
None = 0,
}
#[repr(u32)]
/// Write flags for data pipes
pub enum Write {
None = 0,
/// Write all the data to the pipe if possible or none at all
AllOrNone = 1 << 0,
}
<|fim▁hole|>/// Read flags for data pipes
pub enum Read {
None = 0,
/// Read all the data from the pipe if possible, or none at all
AllOrNone = 1 << 0,
/// Dequeue the message recieved rather than reading it
Discard = 1 << 1,
/// Get information about the queue on the pipe but do not perform the
/// read
Query = 1 << 2,
/// Read data off the pipe's queue but do not dequeue it
Peek = 1 << 3,
}
/// Intermediary structure in a two-phase read.
/// Reads of the requested buffer must be done directly
/// through this data structure which must then be committed.
pub struct ReadDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
buffer: &'b [T],
/// Contains a reference to parent to end commit
/// and prevent it from outliving its parent handle.
parent: &'p Consumer<T>,
}
impl<'b, 'p, T> ReadDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
/// Attempts to commit the read, that is, end the two-phase read
/// started by the parent Consumer<T> object. On a successful
/// commit, consumes self, otherwise returns self to try again.
pub fn commit(self, bytes_read: usize) -> Option<(Self, MojoResult)> {
let result = unsafe { self.parent.end_read(bytes_read) };
if result == MojoResult::Okay { None } else { Some((self, result)) }
}
/// Returns the length of the underlying buffer
pub fn len(&self) -> usize {
self.buffer.len()
}
}
impl<'b, 'p, T> ops::Index<usize> for ReadDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
type Output = T;
/// Overloads the indexing ([]) operator for reads.
///
/// Part of reimplementing the array interface to be
/// able to use the structure naturally.
fn index(&self, index: usize) -> &T {
&self.buffer[index]
}
}
/// Intermediary structure in a two-phase write.
/// Writes to the requested buffer must be done directly
/// through this data structure which must then be committed.
pub struct WriteDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
buffer: &'b mut [T],
/// Contains a reference to parent to end commit
/// and prevent it from outliving its parent handle.
parent: &'p Producer<T>,
}
impl<'b, 'p, T> WriteDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
/// Attempts to commit the write, that is, end the two-phase
/// write started by a Producer. On a successful
/// commit, consumes self, otherwise returns self to try again.
pub fn commit(self, bytes_written: usize) -> Option<(Self, MojoResult)> {
let result = unsafe { self.parent.end_write(bytes_written) };
if result == MojoResult::Okay { None } else { Some((self, result)) }
}
/// Returns the length of the underlying buffer
pub fn len(&self) -> usize {
self.buffer.len()
}
}
impl<'b, 'p, T> ops::Index<usize> for WriteDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
type Output = T;
/// Overloads the indexing ([]) operator for reads.
///
/// Part of reimplementing the array interface to be
/// able to use the structure naturally.
fn index(&self, index: usize) -> &T {
&self.buffer[index]
}
}
impl<'b, 'p, T> ops::IndexMut<usize> for WriteDataBuffer<'b, 'p, T>
where
'p: 'b,
T: 'p,
{
/// Overloads the indexing ([]) operator for writes.
///
/// Part of reimplementing the array interface to be
/// able to use the structure naturally.
fn index_mut(&mut self, index: usize) -> &mut T {
&mut self.buffer[index]
}
}
/// Creates a data pipe, represented as a consumer
/// and a producer. Additionally, we associate a type
/// T with the data pipe, as data pipes operate in terms
/// of elements. In this way we can enforce type safety.
///
/// Capacity, as an input, must be given in number of elements.
/// Use a capacity of 0 in order to use some system-dependent
/// default capacity.
pub fn create<T>(
flags: CreateFlags,
capacity: u32,
) -> Result<(Consumer<T>, Producer<T>), MojoResult> {
let elem_size = mem::size_of::<T>() as u32;
let opts = ffi::MojoCreateDataPipeOptions::new(flags, elem_size, capacity * elem_size);
// TODO(mknyszek): Make sure handles are valid
let mut chandle: MojoHandle = 0;
let mut phandle: MojoHandle = 0;
let raw_opts = &opts as *const ffi::MojoCreateDataPipeOptions;
let r = MojoResult::from_code(unsafe {
ffi::MojoCreateDataPipe(
raw_opts,
&mut phandle as *mut MojoHandle,
&mut chandle as *mut MojoHandle,
)
});
if r != MojoResult::Okay {
Err(r)
} else {
Ok((
Consumer::<T> {
handle: unsafe { handle::acquire(chandle) },
_elem_type: marker::PhantomData,
},
Producer::<T> {
handle: unsafe { handle::acquire(phandle) },
_elem_type: marker::PhantomData,
},
))
}
}
/// Creates a data pipe, represented as a consumer
/// and a producer, using the default Mojo options.
pub fn create_default() -> Result<(Consumer<u8>, Producer<u8>), MojoResult> {
create::<u8>(Create::None as u32, 0)
}
/// Represents the consumer half of a data pipe.
/// This data structure wraps a handle and acts
/// effectively as a typed handle.
///
/// The purpose of the _elem_type field is to associate
/// a type with the consumer, as a data pipe works
/// in elements.
pub struct Consumer<T> {
handle: handle::UntypedHandle,
_elem_type: marker::PhantomData<T>,
}
impl<T> Consumer<T> {
/// Perform a read operation on the consumer end of the data pipe. As
/// a result, we get an std::vec::Vec filled with whatever was written.
pub fn read(&self, flags: ReadFlags) -> Result<vec::Vec<T>, MojoResult> {
let mut options = ffi::MojoReadDataOptions::new(Read::Query as ReadFlags);
let mut num_bytes: u32 = 0;
let r_prelim = unsafe {
ffi::MojoReadData(
self.handle.get_native_handle(),
&options as *const _,
ptr::null_mut() as *mut ffi::c_void,
&mut num_bytes as *mut u32,
)
};
if r_prelim != 0 || num_bytes == 0 {
return Err(MojoResult::from_code(r_prelim));
}
options.flags = flags;
let elem_size: u32 = mem::size_of::<T>() as u32;
// TODO(mknyszek): make sure elem_size divides into num_bytes
let mut buf: vec::Vec<T> = vec::Vec::with_capacity((num_bytes / elem_size) as usize);
let r = MojoResult::from_code(unsafe {
ffi::MojoReadData(
self.handle.get_native_handle(),
&options as *const _,
buf.as_mut_ptr() as *const ffi::c_void,
&mut num_bytes as *mut u32,
)
});
unsafe { buf.set_len((num_bytes / elem_size) as usize) }
if r != MojoResult::Okay { Err(r) } else { Ok(buf) }
}
/// Start two-phase read and return a ReadDataBuffer to perform
/// read and commit.
pub fn begin(&self) -> Result<ReadDataBuffer<T>, MojoResult> {
let wrapped_result = unsafe { self.begin_read() };
match wrapped_result {
Ok(arr) => Ok(ReadDataBuffer::<T> { buffer: arr, parent: self }),
Err(r) => Err(r),
}
}
/// A private function that performs the first half of two-phase reading.
/// Kept private because it is unsafe to use (the array received may not
/// be valid if end_read is performed).
unsafe fn begin_read(&self) -> Result<&[T], MojoResult> {
let mut buf_num_bytes: u32 = 0;
let mut pbuf: *mut ffi::c_void = ptr::null_mut();
let r = MojoResult::from_code(ffi::MojoBeginReadData(
self.handle.get_native_handle(),
ptr::null(),
&mut pbuf,
&mut buf_num_bytes as *mut u32,
));
if r != MojoResult::Okay {
Err(r)
} else {
let buf_elems = (buf_num_bytes as usize) / mem::size_of::<T>();
let buf = slice::from_raw_parts(pbuf as *mut T, buf_elems);
Ok(buf)
}
}
/// A private function that performs the second half of two-phase reading.
/// Kept private because it is unsafe to use (the array received from start_read
/// may not be valid if end_read is performed).
///
/// Also assumes loads/stores aren't reordered such that a load/store may be
/// optimized to be run AFTER MojoEndReadData(). In general, this is true as long
/// as raw pointers are used, but Rust's memory model is still undefined. If you're
/// getting a bad/strange runtime error, it might be for this reason.
unsafe fn end_read(&self, elems_read: usize) -> MojoResult {
let elem_size = mem::size_of::<T>();
MojoResult::from_code(ffi::MojoEndReadData(
self.handle.get_native_handle(),
(elems_read * elem_size) as u32,
ptr::null(),
))
}
}
impl<T> CastHandle for Consumer<T> {
/// Generates a Consumer from an untyped handle wrapper
/// See mojo::system::handle for information on untyped vs. typed
unsafe fn from_untyped(handle: handle::UntypedHandle) -> Self {
Consumer::<T> { handle: handle, _elem_type: marker::PhantomData }
}
/// Consumes this object and produces a plain handle wrapper
/// See mojo::system::handle for information on untyped vs. typed
fn as_untyped(self) -> handle::UntypedHandle {
self.handle
}
}
impl<T> Handle for Consumer<T> {
/// Returns the native handle wrapped by this structure.
///
/// See mojo::system::handle for information on handle wrappers
fn get_native_handle(&self) -> MojoHandle {
self.handle.get_native_handle()
}
}
/// Represents the consumer half of a data pipe.
/// This data structure wraps a handle and acts
/// effectively as a typed handle.
///
/// The purpose of the _elem_type field is to associate
/// a type with the consumer, as a data pipe works
/// in elements.
pub struct Producer<T> {
handle: handle::UntypedHandle,
_elem_type: marker::PhantomData<T>,
}
impl<T> Producer<T> {
/// Perform a write operation on the producer end of the data pipe.
/// Returns the number of elements actually written.
pub fn write(&self, data: &[T], flags: WriteFlags) -> Result<usize, MojoResult> {
let mut num_bytes = (data.len() * mem::size_of::<T>()) as u32;
let options = ffi::MojoWriteDataOptions::new(flags);
let r = MojoResult::from_code(unsafe {
ffi::MojoWriteData(
self.handle.get_native_handle(),
data.as_ptr() as *const ffi::c_void,
&mut num_bytes as *mut u32,
&options as *const _,
)
});
if r != MojoResult::Okay { Err(r) } else { Ok(num_bytes as usize) }
}
/// Start two-phase write and return a WriteDataBuffer to perform
/// write and commit.
///
/// Borrows self as mutable so that no other operation may happen on
/// the producer until the two-phase write is committed.
pub fn begin(&self) -> Result<WriteDataBuffer<T>, MojoResult> {
let wrapped_result = unsafe { self.begin_write() };
match wrapped_result {
Ok(arr) => Ok(WriteDataBuffer::<T> { buffer: arr, parent: self }),
Err(r) => Err(r),
}
}
/// A private function that performs the first half of two-phase writing.
/// Kept private because it is unsafe to use (the array received may not
/// be valid if end_write is performed).
unsafe fn begin_write(&self) -> Result<&mut [T], MojoResult> {
let mut buf_num_bytes: u32 = 0;
let mut pbuf: *mut ffi::c_void = ptr::null_mut();
let r = MojoResult::from_code(ffi::MojoBeginWriteData(
self.handle.get_native_handle(),
ptr::null(),
&mut pbuf,
&mut buf_num_bytes as *mut u32,
));
if r != MojoResult::Okay {
Err(r)
} else {
let buf_elems = (buf_num_bytes as usize) / mem::size_of::<T>();
let buf = slice::from_raw_parts_mut(pbuf as *mut T, buf_elems);
Ok(buf)
}
}
/// A private function that performs the second half of two-phase writing.
/// Kept private because it is unsafe to use (the array received from start_write
/// may not be valid if end_write is performed).
///
/// Also assumes loads/stores aren't reordered such that a load/store may be
/// optimized to be run AFTER MojoEndWriteData(). In general, this is true as long
/// as raw pointers are used, but Rust's memory model is still undefined. If you're
/// getting a bad/strange runtime error, it might be for this reason.
unsafe fn end_write(&self, elems_written: usize) -> MojoResult {
let elem_size = mem::size_of::<T>();
MojoResult::from_code(ffi::MojoEndWriteData(
self.handle.get_native_handle(),
(elems_written * elem_size) as u32,
ptr::null(),
))
}
}
impl<T> CastHandle for Producer<T> {
/// Generates a Consumer from an untyped handle wrapper
/// See mojo::system::handle for information on untyped vs. typed
unsafe fn from_untyped(handle: handle::UntypedHandle) -> Self {
Producer::<T> { handle: handle, _elem_type: marker::PhantomData }
}
/// Consumes this object and produces a plain handle wrapper
/// See mojo::system::handle for information on untyped vs. typed
fn as_untyped(self) -> handle::UntypedHandle {
self.handle
}
}
impl<T> Handle for Producer<T> {
/// Returns the native handle wrapped by this structure.
///
/// See mojo::system::handle for information on handle wrappers
fn get_native_handle(&self) -> MojoHandle {
self.handle.get_native_handle()
}
}<|fim▁end|> | #[repr(u32)] |
<|file_name|>xauth.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import time
import urllib
from urlparse import parse_qs
from oauth_provider.tests.auth import BaseOAuthTestCase, METHOD_URL_QUERY, METHOD_AUTHORIZATION_HEADER, METHOD_POST_REQUEST_BODY
class XAuthTestCase(BaseOAuthTestCase):
def setUp(self):
super(XAuthTestCase, self).setUp()
self.consumer.xauth_allowed = True
self.consumer.save()
def _accesss_token(self, method=METHOD_URL_QUERY):
parameters = {
"oauth_consumer_key": self.CONSUMER_KEY,
"oauth_consumer_secret": self.CONSUMER_SECRET,
"oauth_nonce": "12981230918711",
'oauth_signature_method': 'PLAINTEXT',
'oauth_signature': "%s&%s" % (self.CONSUMER_SECRET, ""),
'oauth_timestamp': str(int(time.time())),
'oauth_version': '1.0',
'x_auth_mode': "client_auth",
'x_auth_password': self.password,
'x_auth_username': self.username,
}
if method==METHOD_AUTHORIZATION_HEADER:
header = self._get_http_authorization_header(parameters)
response = self.c.get("/oauth/access_token/", HTTP_AUTHORIZATION=header)
elif method==METHOD_URL_QUERY:
response = self.c.get("/oauth/access_token/", parameters)
elif method==METHOD_POST_REQUEST_BODY:
body = urllib.urlencode(parameters)
response = self.c.post("/oauth/access_token/", body, content_type="application/x-www-form-urlencoded")
else:
raise NotImplementedError
self.assertEqual(response.status_code, 200)<|fim▁hole|> self.ACCESS_TOKEN_KEY = response_params['oauth_token'][0]
self.ACCESS_TOKEN_SECRET = response_params['oauth_token_secret'][0]
def test_xauth(self):
self._access_token(x_auth_mode="client_auth",
x_auth_password=self.password,
x_auth_username=self.username)
assert self.ACCESS_TOKEN_KEY
assert self.ACCESS_TOKEN_SECRET
def test_xauth_using_email(self):
self._access_token(x_auth_mode="client_auth",
x_auth_password=self.password,
x_auth_username=self.email)
assert self.ACCESS_TOKEN_KEY
assert self.ACCESS_TOKEN_SECRET<|fim▁end|> | response_params = parse_qs(response.content)
|
<|file_name|>calculate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Calculate controller"""
from __future__ import division
import collections
import copy
import itertools
import os
import time
from openfisca_core.legislations import ParameterNotFound
from .. import conf, contexts, conv, environment, model, wsgihelpers
def N_(message):
return message
def build_output_variables(simulations, use_label, variables):
return [
{
variable: simulation.get_holder(variable).to_value_json(use_label = use_label)
for variable in variables
}
for simulation in simulations
]
def fill_test_cases_with_values(intermediate_variables, scenarios, simulations, use_label, variables):
output_test_cases = []
for scenario, simulation in itertools.izip(scenarios, simulations):
if intermediate_variables:
holders = []
for step in simulation.traceback.itervalues():
holder = step['holder']
if holder not in holders:
holders.append(holder)
else:
holders = [
simulation.get_holder(variable)
for variable in variables
]
test_case = scenario.to_json()['test_case']
for holder in holders:
variable_value_json = holder.to_value_json(use_label = use_label)
if variable_value_json is None:
continue
variable_name = holder.column.name
entity_members = test_case[holder.entity.key_plural]
if isinstance(variable_value_json, dict):
for entity_member_index, entity_member in enumerate(entity_members):
entity_member[variable_name] = {}
for period, array_or_dict_json in variable_value_json.iteritems():
if type(array_or_dict_json) == dict:
if len(array_or_dict_json) == 1:
entity_member[variable_name][period] = \
array_or_dict_json[array_or_dict_json.keys()[0]][entity_member_index]
else:
entity_member[variable_name][period] = {}
for key, array in array_or_dict_json.iteritems():
entity_member[variable_name][period][key] = array[entity_member_index]
else:
entity_member[variable_name][period] = array_or_dict_json[entity_member_index]
else:
for entity_member, cell_json in itertools.izip(entity_members, variable_value_json):
entity_member[variable_name] = cell_json
output_test_cases.append(test_case)
return output_test_cases
@wsgihelpers.wsgify
def api1_calculate(req):
def calculate_simulations(scenarios, variables, trace):
simulations = []
for scenario_index, scenario in enumerate(scenarios):
simulation = scenario.new_simulation(trace = trace)
for variable_name in variables:
try:
simulation.calculate_output(variable_name)
except ParameterNotFound as exc:
raise wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = 1,
context = inputs.get('context'),
error = collections.OrderedDict(sorted(dict(
code = 500,
errors = [{"scenarios": {scenario_index: exc.to_json()}}],
).iteritems())),
method = req.script_name,
params = inputs,
url = req.url.decode('utf-8'),
).iteritems())),
headers = headers,
)
simulations.append(simulation)
return simulations
total_start_time = time.time()
ctx = contexts.Ctx(req)
headers = wsgihelpers.handle_cross_origin_resource_sharing(ctx)
assert req.method == 'POST', req.method
if conf['load_alert']:
try:
load_average = os.getloadavg()
except (AttributeError, OSError):
# When load average is not available, always accept request.
pass
else:
if load_average[0] / environment.cpu_count > 1:
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = 1,
error = collections.OrderedDict(sorted(dict(
code = 503, # Service Unavailable
message = ctx._(u'Server is overloaded: {} {} {}').format(*load_average),
).iteritems())),
method = req.script_name,
url = req.url.decode('utf-8'),
).iteritems())),
headers = headers,
)
content_type = req.content_type
if content_type is not None:
content_type = content_type.split(';', 1)[0].strip()
if content_type != 'application/json':
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = 1,
error = collections.OrderedDict(sorted(dict(
code = 400, # Bad Request
message = ctx._(u'Bad content-type: {}').format(content_type),
).iteritems())),
method = req.script_name,
url = req.url.decode('utf-8'),
).iteritems())),
headers = headers,
)
inputs, error = conv.pipe(
conv.make_input_to_json(object_pairs_hook = collections.OrderedDict),
conv.test_isinstance(dict),
conv.not_none,
)(req.body, state = ctx)
if error is not None:
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = 1,
error = collections.OrderedDict(sorted(dict(
code = 400, # Bad Request
errors = [conv.jsonify_value(error)],
message = ctx._(u'Invalid JSON in request POST body'),
).iteritems())),
method = req.script_name,
params = req.body,
url = req.url.decode('utf-8'),
).iteritems())),
headers = headers,
)
str_list_to_reforms = conv.make_str_list_to_reforms()
data, errors = conv.struct(
dict(
base_reforms = str_list_to_reforms,
context = conv.test_isinstance(basestring), # For asynchronous calls
intermediate_variables = conv.pipe(
conv.test_isinstance((bool, int)),
conv.anything_to_bool,
conv.default(False),
),
labels = conv.pipe( # Return labels (of enumerations) instead of numeric values.
conv.test_isinstance((bool, int)),
conv.anything_to_bool,
conv.default(False),
),
output_format = conv.pipe(
conv.test_isinstance(basestring),
conv.test_in(['test_case', 'variables']),
conv.default('test_case'),
),
reforms = str_list_to_reforms,
scenarios = conv.pipe(
conv.test_isinstance(list),
conv.uniform_sequence(
conv.not_none, # Real conversion is done once tax-benefit system is known.
),
conv.test(lambda scenarios: len(scenarios) >= 1, error = N_(u'At least one scenario is required')),
conv.test(lambda scenarios: len(scenarios) <= 100,
error = N_(u"There can't be more than 100 scenarios")),
conv.not_none,
),
time = conv.pipe(
conv.test_isinstance((bool, int)),
conv.anything_to_bool,
conv.default(False),
),
trace = conv.pipe(
conv.test_isinstance((bool, int)),
conv.anything_to_bool,
conv.default(False),
),
validate = conv.pipe(
conv.test_isinstance((bool, int)),
conv.anything_to_bool,
conv.default(False),
),
variables = conv.pipe(
conv.test_isinstance(list),
conv.uniform_sequence(
conv.pipe(
conv.test_isinstance(basestring),
conv.empty_to_none,
# Remaining of conversion is done once tax-benefit system is known.
conv.not_none,
),
constructor = set,
),
conv.test(lambda variables: len(variables) >= 1, error = N_(u'At least one variable is required')),
conv.not_none,
),
),
)(inputs, state = ctx)
if errors is None:
compose_reforms_start_time = time.time()
country_tax_benefit_system = model.tax_benefit_system
base_tax_benefit_system = model.get_cached_composed_reform(
reform_keys = data['base_reforms'],
tax_benefit_system = country_tax_benefit_system,
) if data['base_reforms'] is not None else country_tax_benefit_system
if data['reforms'] is not None:
reform_tax_benefit_system = model.get_cached_composed_reform(
reform_keys = data['reforms'],
tax_benefit_system = base_tax_benefit_system,
)
compose_reforms_end_time = time.time()
compose_reforms_time = compose_reforms_end_time - compose_reforms_start_time
build_scenarios_start_time = time.time()
base_scenarios, base_scenarios_errors = conv.uniform_sequence(
base_tax_benefit_system.Scenario.make_json_to_cached_or_new_instance(
ctx = ctx,
repair = data['validate'],
tax_benefit_system = base_tax_benefit_system,
)
)(data['scenarios'], state = ctx)
errors = {'scenarios': base_scenarios_errors} if base_scenarios_errors is not None else None
if errors is None and data['reforms'] is not None:
reform_scenarios, reform_scenarios_errors = conv.uniform_sequence(
reform_tax_benefit_system.Scenario.make_json_to_cached_or_new_instance(
ctx = ctx,
repair = data['validate'],
tax_benefit_system = reform_tax_benefit_system,
)
)(data['scenarios'], state = ctx)
errors = {'scenarios': reform_scenarios_errors} if reform_scenarios_errors is not None else None
build_scenarios_end_time = time.time()
build_scenarios_time = build_scenarios_end_time - build_scenarios_start_time
if errors is None:
data, errors = conv.struct(
dict(
variables = conv.uniform_sequence(
conv.make_validate_variable(
base_tax_benefit_system = base_tax_benefit_system,
reform_tax_benefit_system = reform_tax_benefit_system if data['reforms'] else None,
reforms = data['reforms'],
),
),
),
default = conv.noop,
)(data, state = ctx)
if errors is not None:
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = 1,
context = inputs.get('context'),
error = collections.OrderedDict(sorted(dict(
code = 400, # Bad Request
errors = [conv.jsonify_value(errors)],
message = ctx._(u'Bad parameters in request'),
).iteritems())),
method = req.script_name,
params = inputs,
url = req.url.decode('utf-8'),
).iteritems())),
headers = headers,
)
scenarios = base_scenarios if data['reforms'] is None else reform_scenarios
suggestions = {}
for scenario_index, scenario in enumerate(scenarios):
if data['validate']:
original_test_case = scenario.test_case
scenario.test_case = copy.deepcopy(original_test_case)
suggestion = scenario.suggest() # This modifies scenario.test_case!
if data['validate']:
scenario.test_case = original_test_case
if suggestion is not None:
suggestions.setdefault('scenarios', {})[scenario_index] = suggestion
if not suggestions:
suggestions = None
if data['validate']:
# Only a validation is requested. Don't launch simulation
total_end_time = time.time()
total_time = total_end_time - total_start_time
response_data = dict(
apiVersion = 1,
context = inputs.get('context'),
method = req.script_name,
params = inputs,
repaired_scenarios = [
scenario.to_json()
for scenario in scenarios
],
suggestions = suggestions,
url = req.url.decode('utf-8'),
)
if data['time']:
response_data['time'] = collections.OrderedDict(sorted(dict(
build_scenarios = build_scenarios_time,
compose_reforms = compose_reforms_time,
total = total_time,
).iteritems())),
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(response_data.iteritems())),
headers = headers,
)
calculate_simulation_start_time = time.time()
trace_simulations = data['trace'] or data['intermediate_variables']
base_simulations = calculate_simulations(scenarios, data['variables'], trace = trace_simulations)
if data['reforms'] is not None:
reform_simulations = calculate_simulations(reform_scenarios, data['variables'], trace = trace_simulations)
calculate_simulation_end_time = time.time()
calculate_simulation_time = calculate_simulation_end_time - calculate_simulation_start_time
if data['output_format'] == 'test_case':
base_value = fill_test_cases_with_values(
intermediate_variables = data['intermediate_variables'],
scenarios = base_scenarios,
simulations = base_simulations,
use_label = data['labels'],
variables = data['variables'],
)
if data['reforms'] is not None:
reform_value = fill_test_cases_with_values(
intermediate_variables = data['intermediate_variables'],
scenarios = reform_scenarios,
simulations = reform_simulations,
use_label = data['labels'],
variables = data['variables'],
)
else:
assert data['output_format'] == 'variables'
base_value = build_output_variables(
simulations = base_simulations,
use_label = data['labels'],
variables = data['variables'],
)
if data['reforms'] is not None:
reform_value = build_output_variables(
simulations = reform_simulations,
use_label = data['labels'],
variables = data['variables'],
)
if data['trace']:
simulations_variables_json = []
tracebacks_json = []
simulations = reform_simulations if data['reforms'] is not None else base_simulations
for simulation in simulations:
simulation_variables_json = {}
traceback_json = []
for (variable_name, period), step in simulation.traceback.iteritems():
holder = step['holder']
if variable_name not in simulation_variables_json:
variable_value_json = holder.to_value_json()
if variable_value_json is not None:
simulation_variables_json[variable_name] = variable_value_json
column = holder.column
input_variables_infos = step.get('input_variables_infos')
parameters_infos = step.get('parameters_infos')
traceback_json.append(collections.OrderedDict(sorted(dict(
cell_type = column.val_type, # Unification with OpenFisca Julia name.
default_input_variables = step.get('default_input_variables', False),
entity = column.entity,
input_variables = [
(input_variable_name, str(input_variable_period))
for input_variable_name, input_variable_period in input_variables_infos
] if input_variables_infos else None,
is_computed = step.get('is_computed', False),<|fim▁hole|> period = str(period) if period is not None else None,
).iteritems())))
simulations_variables_json.append(simulation_variables_json)
tracebacks_json.append(traceback_json)
else:
simulations_variables_json = None
tracebacks_json = None
response_data = collections.OrderedDict(sorted(dict(
apiVersion = 1,
context = data['context'],
method = req.script_name,
params = inputs,
suggestions = suggestions,
tracebacks = tracebacks_json,
url = req.url.decode('utf-8'),
value = reform_value if data['reforms'] is not None else base_value,
variables = simulations_variables_json,
).iteritems()))
if data['reforms'] is not None:
response_data['base_value'] = base_value
total_end_time = time.time()
total_time = total_end_time - total_start_time
if data['time']:
response_data['time'] = collections.OrderedDict(sorted(dict(
build_scenarios = build_scenarios_time,
compose_reforms = compose_reforms_time,
calculate_simulation = calculate_simulation_time,
total = total_time,
).iteritems()))
return wsgihelpers.respond_json(ctx, response_data, headers = headers)<|fim▁end|> | label = column.label if column.label != variable_name else None,
name = variable_name,
parameters = parameters_infos or None, |
<|file_name|>formatters.py<|end_file_name|><|fim▁begin|>#!/bin/env python
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/formatters.py
__all__=('Formatter','DecimalFormatter')
__version__=''' $Id: formatters.py 3959 2012-09-27 14:39:39Z robin $ '''
__doc__="""
These help format numbers and dates in a user friendly way.
Used by the graphics framework.
"""
import string, sys, os, re
class Formatter:
"Base formatter - simply applies python format strings"
def __init__(self, pattern):
self.pattern = pattern
def format(self, obj):
return self.pattern % obj
def __repr__(self):
return "%s('%s')" % (self.__class__.__name__, self.pattern)
def __call__(self, x):
return self.format(x)
_ld_re=re.compile(r'^\d*\.')
_tz_re=re.compile('0+$')
class DecimalFormatter(Formatter):
"""lets you specify how to build a decimal.
A future NumberFormatter class will take Microsoft-style patterns
instead - "$#,##0.00" is WAY easier than this."""
def __init__(self, places=2, decimalSep='.', thousandSep=None, prefix=None, suffix=None):
if places=='auto':
self.calcPlaces = self._calcPlaces
else:
self.places = places
self.dot = decimalSep
self.comma = thousandSep
self.prefix = prefix
self.suffix = suffix<|fim▁hole|> def _calcPlaces(self,V):
'''called with the full set of values to be formatted so we can calculate places'''
self.places = max([len(_tz_re.sub('',_ld_re.sub('',str(v)))) for v in V])
def format(self, num):
# positivize the numbers
sign=num<0
if sign:
num = -num
places, sep = self.places, self.dot
strip = places<=0
if places and strip: places = -places
strInt = ('%.' + str(places) + 'f') % num
if places:
strInt, strFrac = strInt.split('.')
strFrac = sep + strFrac
if strip:
while strFrac and strFrac[-1] in ['0',sep]: strFrac = strFrac[:-1]
else:
strFrac = ''
if self.comma is not None:
strNew = ''
while strInt:
left, right = strInt[0:-3], strInt[-3:]
if left == '':
#strNew = self.comma + right + strNew
strNew = right + strNew
else:
strNew = self.comma + right + strNew
strInt = left
strInt = strNew
strBody = strInt + strFrac
if sign: strBody = '-' + strBody
if self.prefix:
strBody = self.prefix + strBody
if self.suffix:
strBody = strBody + self.suffix
return strBody
def __repr__(self):
return "%s(places=%d, decimalSep=%s, thousandSep=%s, prefix=%s, suffix=%s)" % (
self.__class__.__name__,
self.places,
repr(self.dot),
repr(self.comma),
repr(self.prefix),
repr(self.suffix)
)
if __name__=='__main__':
def t(n, s, places=2, decimalSep='.', thousandSep=None, prefix=None, suffix=None):
f=DecimalFormatter(places,decimalSep,thousandSep,prefix,suffix)
r = f(n)
print("places=%2d dot=%-4s comma=%-4s prefix=%-4s suffix=%-4s result=%10s %s" %(f.places, f.dot, f.comma, f.prefix, f.suffix,r, r==s and 'OK' or 'BAD'))
t(1000.9,'1,000.9',1,thousandSep=',')
t(1000.95,'1,001.0',1,thousandSep=',')
t(1000.95,'1,001',-1,thousandSep=',')
t(1000.9,'1,001',0,thousandSep=',')
t(1000.9,'1000.9',1)
t(1000.95,'1001.0',1)
t(1000.95,'1001',-1)
t(1000.9,'1001',0)
t(1000.1,'1000.1',1)
t(1000.55,'1000.6',1)
t(1000.449,'1000.4',-1)
t(1000.45,'1000',0)<|fim▁end|> | |
<|file_name|>mandate_pdfs_service.py<|end_file_name|><|fim▁begin|># WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class MandatePdfsService(base_service.BaseService):
"""Service class that provides access to the mandate_pdfs
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.MandatePdf
RESOURCE_NAME = 'mandate_pdfs'
def create(self,params=None, headers=None):<|fim▁hole|> """Create a mandate PDF.
Generates a PDF mandate and returns its temporary URL.
Customer and bank account details can be left blank (for a blank
mandate), provided manually, or inferred from the ID of an existing
[mandate](#core-endpoints-mandates).
By default, we'll generate PDF mandates in English.
To generate a PDF mandate in another language, set the
`Accept-Language` header when creating the PDF mandate to the relevant
[ISO 639-1](http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes)
language code supported for the scheme.
| Scheme | Supported languages
|
| :--------------- |
:-------------------------------------------------------------------------------------------------------------------------------------------
|
| ACH | English (`en`)
|
| Autogiro | English (`en`), Swedish (`sv`)
|
| Bacs | English (`en`)
|
| BECS | English (`en`)
|
| BECS NZ | English (`en`)
|
| Betalingsservice | Danish (`da`), English (`en`)
|
| PAD | English (`en`)
|
| SEPA Core | Danish (`da`), Dutch (`nl`), English (`en`),
French (`fr`), German (`de`), Italian (`it`), Portuguese (`pt`),
Spanish (`es`), Swedish (`sv`) |
Args:
params (dict, optional): Request body.
Returns:
MandatePdf
"""
path = '/mandate_pdfs'
if params is not None:
params = {self._envelope_key(): params}
response = self._perform_request('POST', path, params, headers,
retry_failures=True)
return self._resource_for(response)<|fim▁end|> | |
<|file_name|>tmpdir.go<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: Apache-2.0
// Copyright 2018 Authors of Cilium
package test
import (
"os"<|fim▁hole|>var Tmpdir string
func init() {
var err error
Tmpdir, err = os.MkdirTemp("", "cilium_envoy_go_test")
if err != nil {
log.Fatal("Failed to create a temporaty directory for testing")
}
}<|fim▁end|> |
log "github.com/sirupsen/logrus"
)
|
<|file_name|>reportsectiondetail.cpp<|end_file_name|><|fim▁begin|>/*
* OpenRPT report writer and rendering engine
* Copyright (C) 2001-2007 by OpenMFG, LLC ([email protected])
* Copyright (C) 2007-2008 by Adam Pigg ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*/
#include "reportsectiondetail.h"
#include "reportsectiondetailgroup.h"
#include "reportsection.h"
#include <QDomDocument>
#include <kdebug.h>
//
// ReportSectionDetail
//
ReportSectionDetail::ReportSectionDetail(KoReportDesigner * rptdes)
: QWidget(rptdes)
{
setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
m_pageBreak = BreakNone;
m_vboxlayout = new QVBoxLayout(this);
m_vboxlayout->setSpacing(0);
m_vboxlayout->setMargin(0);
m_reportDesigner = rptdes;
m_detail = new ReportSection(rptdes /*, this*/);
m_vboxlayout->addWidget(m_detail);
this->setLayout(m_vboxlayout);
}
ReportSectionDetail::~ReportSectionDetail()
{
// Qt should be handling everything for us
m_reportDesigner = 0;
}
int ReportSectionDetail::pageBreak() const
{
return m_pageBreak;
}
void ReportSectionDetail::setPageBreak(int pb)
{
m_pageBreak = pb;
}
ReportSection * ReportSectionDetail::detailSection() const
{
return m_detail;
}
void ReportSectionDetail::buildXML(QDomDocument & doc, QDomElement & section)
{
if (pageBreak() != ReportSectionDetail::BreakNone) {
QDomElement spagebreak = doc.createElement("pagebreak");
if (pageBreak() == ReportSectionDetail::BreakAtEnd)
spagebreak.setAttribute("when", "at end");
section.appendChild(spagebreak);
}
foreach(ReportSectionDetailGroup* rsdg, groupList) {
rsdg->buildXML(doc, section);
}
// detail section
QDomElement gdetail = doc.createElement("report:section");
gdetail.setAttribute("report:section-type", "detail");
m_detail->buildXML(doc, gdetail);
section.appendChild(gdetail);
}
void ReportSectionDetail::initFromXML(QDomNode & section)
{
QDomNodeList nl = section.childNodes();
QDomNode node;
QString n;
for (int i = 0; i < nl.count(); i++) {
node = nl.item(i);
n = node.nodeName();
//kDebug() << n;
if (n == "pagebreak") {
QDomElement eThis = node.toElement();
if (eThis.attribute("when") == "at end")
setPageBreak(BreakAtEnd);
} else if (n == "report:group") {
ReportSectionDetailGroup * rsdg = new ReportSectionDetailGroup("unnamed", this, this);
rsdg->initFromXML( node.toElement() );
insertGroupSection(groupSectionCount(), rsdg);
} else if (n == "report:section" && node.toElement().attribute("report:section-type") == "detail") {
//kDebug() << "Creating detail section";
m_detail->initFromXML(node);
} else {
// unknown element
kWarning() << "while parsing section encountered and unknown element: " << n;
}
}
}
KoReportDesigner * ReportSectionDetail::reportDesigner() const
{
return m_reportDesigner;
}
int ReportSectionDetail::groupSectionCount() const
{
return groupList.count();
}
ReportSectionDetailGroup * ReportSectionDetail::groupSection(int i) const
{
return groupList.at(i);
}
void ReportSectionDetail::insertGroupSection(int idx, ReportSectionDetailGroup * rsd)
{
groupList.insert(idx, rsd);
rsd->groupHeader()->setParent(this);
rsd->groupFooter()->setParent(this);
idx = 0;
int gi = 0;
for (gi = 0; gi < (int) groupList.count(); gi++) {
rsd = groupList.at(gi);
m_vboxlayout->removeWidget(rsd->groupHeader());
m_vboxlayout->insertWidget(idx, rsd->groupHeader());
idx++;
}
m_vboxlayout->removeWidget(m_detail);
m_vboxlayout->insertWidget(idx, m_detail);
idx++;
for (gi = ((int) groupList.count() - 1); gi >= 0; --gi) {
rsd = groupList.at(gi);
m_vboxlayout->removeWidget(rsd->groupFooter());
m_vboxlayout->insertWidget(idx, rsd->groupFooter());
idx++;
}
if (m_reportDesigner) m_reportDesigner->setModified(true);
adjustSize();
}
int ReportSectionDetail::indexOfGroupSection(const QString & column) const
{
// find the item by its name
for (uint i = 0; i < (uint)groupList.count(); i++) {
ReportSectionDetailGroup * rsd = groupList.at(i);
if (column == rsd->column()) return i;
}
return -1;
}
void ReportSectionDetail::removeGroupSection(int idx, bool del)
{
ReportSectionDetailGroup * rsd = groupList.at(idx);
m_vboxlayout->removeWidget(rsd->groupHeader());
m_vboxlayout->removeWidget(rsd->groupFooter());
groupList.removeAt(idx);
if (m_reportDesigner) m_reportDesigner->setModified(true);
if (del) delete rsd;<|fim▁hole|>{
QSize s;
foreach(ReportSectionDetailGroup* rsdg, groupList) {
if (rsdg->groupHeaderVisible()) s += rsdg->groupHeader()->size();
if (rsdg->groupFooterVisible()) s += rsdg->groupFooter()->size();
}
return s += m_detail->size();
}
void ReportSectionDetail::setSectionCursor(const QCursor& c)
{
if (m_detail)
m_detail->setSectionCursor(c);
foreach(ReportSectionDetailGroup* rsdg, groupList) {
if (rsdg->groupHeader())
rsdg->groupHeader()->setSectionCursor(c);
if (rsdg->groupFooter())
rsdg->groupFooter()->setSectionCursor(c);
}
}
void ReportSectionDetail::unsetSectionCursor()
{
if (m_detail)
m_detail->unsetSectionCursor();
foreach(ReportSectionDetailGroup* rsdg, groupList) {
if (rsdg->groupHeader())
rsdg->groupHeader()->unsetSectionCursor();
if (rsdg->groupFooter())
rsdg->groupFooter()->unsetSectionCursor();
}
}<|fim▁end|> | adjustSize();
}
QSize ReportSectionDetail::sizeHint() const |
<|file_name|>networks.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, 2014 Computational Molecular Biology Group, Free University
# Berlin, 14195 Berlin, Germany.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import math
import numpy as np
from matplotlib import pylab as plt
from matplotlib import rcParams
from six.moves import range
__author__ = 'noe, marscher'
# taken from networkx.drawing.layout and added hold_dim
def _fruchterman_reingold(A, dim=2, k=None, pos=None, fixed=None,
iterations=50, hold_dim=None):
# Position nodes in adjacency matrix A using Fruchterman-Reingold
# Entry point for NetworkX graph is fruchterman_reingold_layout()
try:
nnodes, _ = A.shape
except AttributeError:
raise RuntimeError(
"fruchterman_reingold() takes an adjacency matrix as input")
A = np.asarray(A) # make sure we have an array instead of a matrix
if pos is None:
# random initial positions
pos = np.asarray(np.random.random((nnodes, dim)), dtype=A.dtype)
else:
# make sure positions are of same type as matrix
pos = pos.astype(A.dtype)
# optimal distance between nodes
if k is None:
k = np.sqrt(1.0 / nnodes)
# the initial "temperature" is about .1 of domain area (=1x1)
# this is the largest step allowed in the dynamics.
t = 0.1
# simple cooling scheme.
# linearly step down by dt on each iteration so last iteration is size dt.
dt = t / float(iterations + 1)
delta = np.zeros((pos.shape[0], pos.shape[0], pos.shape[1]), dtype=A.dtype)
# the inscrutable (but fast) version
# this is still O(V^2)
# could use multilevel methods to speed this up significantly<|fim▁hole|> # matrix of difference between points
for i in range(pos.shape[1]):
delta[:, :, i] = pos[:, i, None] - pos[:, i]
# distance between points
distance = np.sqrt((delta**2).sum(axis=-1))
# enforce minimum distance of 0.01
distance = np.where(distance < 0.01, 0.01, distance)
# displacement "force"
displacement = np.transpose(np.transpose(delta) *
(k * k / distance**2 - A * distance / k))\
.sum(axis=1)
# update positions
length = np.sqrt((displacement**2).sum(axis=1))
length = np.where(length < 0.01, 0.1, length)
delta_pos = np.transpose(np.transpose(displacement) * t / length)
if fixed is not None:
# don't change positions of fixed nodes
delta_pos[fixed] = 0.0
# only update y component
if hold_dim == 0:
pos[:, 1] += delta_pos[:, 1]
# only update x component
elif hold_dim == 1:
pos[:, 0] += delta_pos[:, 0]
else:
pos += delta_pos
# cool temperature
t -= dt
pos = _rescale_layout(pos)
return pos
def _rescale_layout(pos, scale=1):
# rescale to (0,pscale) in all axes
# shift origin to (0,0)
lim = 0 # max coordinate for all axes
for i in range(pos.shape[1]):
pos[:, i] -= pos[:, i].min()
lim = max(pos[:, i].max(), lim)
# rescale to (0,scale) in all directions, preserves aspect
for i in range(pos.shape[1]):
pos[:, i] *= scale / lim
return pos
class NetworkPlot(object):
def __init__(self, A, pos=None, xpos=None, ypos=None):
r"""
Parameters
----------
A : ndarray(n,n)
weight matrix or adjacency matrix of the network to visualize
pos : ndarray(n,2)
user-defined positions
xpos : ndarray(n,)
user-defined x-positions
ypos : ndarray(n,)
user-defined y-positions
Examples
--------
We define first define a reactive flux by taking the following transition
matrix and computing TPT from state 2 to 3.
>>> import numpy as np
>>> P = np.array([[0.8, 0.15, 0.05, 0.0, 0.0],
... [0.1, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.1, 0.8, 0.0, 0.05],
... [0.0, 0.2, 0.0, 0.8, 0.0],
... [0.0, 0.02, 0.02, 0.0, 0.96]])
>>> from pyemma import msm
>>> F = msm.tpt(msm.markov_model(P), [2], [3])
now plot the gross flux
>>> NetworkPlot(F.gross_flux).plot_network() # doctest:+ELLIPSIS
<matplotlib.figure.Figure...
"""
if A.shape[0] >= 50:
import warnings
warnings.warn("The layout optimization method will take a long"
" time for large networks! It is recommended to"
" coarse grain your model first!")
self.A = A
self.pos = pos
self.xpos = xpos
self.ypos = ypos
def _draw_arrow(self, x1, y1, x2, y2, Dx, Dy, label="", width=1.0,
arrow_curvature=1.0, color="grey",
patchA=None, patchB=None, shrinkA=0, shrinkB=0):
"""
Draws a slightly curved arrow from (x1,y1) to (x2,y2).
Will allow the given patches at start end end.
"""
# set arrow properties
dist = math.sqrt(
((x2 - x1) / float(Dx))**2 + ((y2 - y1) / float(Dy))**2)
arrow_curvature *= 0.075 # standard scale
rad = arrow_curvature / (dist)
tail_width = width
head_width = max(0.5, 2 * width)
head_length = head_width
plt.annotate("",
xy=(x2, y2),
xycoords='data',
xytext=(x1, y1),
textcoords='data',
arrowprops=dict(arrowstyle='simple,head_length=%f,head_width=%f,tail_width=%f'
% (head_length, head_width, tail_width),
color=color, shrinkA=shrinkA, shrinkB=shrinkB,
patchA=patchA, patchB=patchB,
connectionstyle="arc3,rad=%f" % -rad),
zorder=0)
# weighted center position
center = np.array([0.55 * x1 + 0.45 * x2, 0.55 * y1 + 0.45 * y2])
v = np.array([x2 - x1, y2 - y1]) # 1->2 vector
vabs = np.abs(v)
vnorm = np.array([v[1], -v[0]]) # orthogonal vector
vnorm /= math.sqrt(np.dot(vnorm, vnorm)) # normalize
# cross product to determine the direction into which vnorm points
z = np.cross(v, vnorm)
if z < 0:
vnorm *= -1
offset = 0.5 * arrow_curvature * \
((vabs[0] / (vabs[0] + vabs[1]))
* Dx + (vabs[1] / (vabs[0] + vabs[1])) * Dy)
ptext = center + offset * vnorm
plt.text(ptext[0], ptext[1], label, size=14,
horizontalalignment='center', verticalalignment='center', zorder=1)
def plot_network(self,
state_sizes=None, state_scale=1.0, state_colors='#ff5500',
arrow_scale=1.0, arrow_curvature=1.0, arrow_labels='weights',
arrow_label_format='%10.2f', max_width=12, max_height=12,
figpadding=0.2, xticks=False, yticks=False):
"""
Draws a network using discs and curved arrows.
The thicknesses and labels of the arrows are taken from the off-diagonal matrix elements in A.
"""
if self.pos is None:
self.layout_automatic()
# number of nodes
n = len(self.pos)
# get bounds and pad figure
xmin = np.min(self.pos[:, 0])
xmax = np.max(self.pos[:, 0])
Dx = xmax - xmin
xmin -= Dx * figpadding
xmax += Dx * figpadding
Dx *= 1 + figpadding
ymin = np.min(self.pos[:, 1])
ymax = np.max(self.pos[:, 1])
Dy = ymax - ymin
ymin -= Dy * figpadding
ymax += Dy * figpadding
Dy *= 1 + figpadding
# sizes of nodes
if state_sizes is None:
state_sizes = 0.5 * state_scale * \
min(Dx, Dy)**2 * np.ones(n) / float(n)
else:
state_sizes = 0.5 * state_scale * \
min(Dx, Dy)**2 * state_sizes / (np.max(state_sizes) * float(n))
# automatic arrow rescaling
arrow_scale *= 1.0 / \
(np.max(self.A - np.diag(np.diag(self.A))) * math.sqrt(n))
# size figure
if (Dx / max_width > Dy / max_height):
figsize = (max_width, Dy * (max_width / Dx))
else:
figsize = (Dx / Dy * max_height, max_height)
fig = plt.gcf()
fig.set_size_inches(figsize, forward=True)
# font sizes
old_fontsize = rcParams['font.size']
rcParams['font.size'] = 20
# remove axis labels
frame = plt.gca()
if not xticks:
frame.axes.get_xaxis().set_ticks([])
if not yticks:
frame.axes.get_yaxis().set_ticks([])
# set node colors
if state_colors is None:
state_colors = '#ff5500' # None is not acceptable
if isinstance(state_colors, str):
state_colors = [state_colors] * n
else:
# transfrom from [0,1] to 255-scale
state_colors = [
plt.cm.binary(int(256.0 * state_colors[i])) for i in range(n)]
# set arrow labels
if isinstance(arrow_labels, np.ndarray):
L = arrow_labels
else:
L = np.empty(np.shape(self.A), dtype=object)
if arrow_labels is None:
L[:, :] = ''
elif arrow_labels.lower() == 'weights':
for i in range(n):
for j in range(n):
L[i, j] = arrow_label_format % self.A[i, j]
else:
rcParams['font.size'] = old_fontsize
raise ValueError('invalid arrow label format')
# draw circles
circles = []
for i in range(n):
fig = plt.gcf()
# choose color
c = plt.Circle(self.pos[i], radius=math.sqrt(
0.5 * state_sizes[i]) / 2.0, color=state_colors[i], zorder=2)
circles.append(c)
fig.gca().add_artist(c)
# add annotation
plt.text(self.pos[i][0], self.pos[i][1], str(i), size=14,
horizontalalignment='center', verticalalignment='center',
color='black', zorder=3)
assert len(circles) == n, "%i != %i" % (len(circles), n)
# draw arrows
for i in range(n):
for j in range(i + 1, n):
if (abs(self.A[i, j]) > 0):
self._draw_arrow(self.pos[i, 0], self.pos[i, 1],
self.pos[j, 0], self.pos[j, 1], Dx, Dy,
label=str(L[i, j]),
width=arrow_scale * self.A[i, j],
arrow_curvature=arrow_curvature,
patchA=circles[i], patchB=circles[j],
shrinkA=3, shrinkB=0)
if (abs(self.A[j, i]) > 0):
self._draw_arrow(self.pos[j, 0], self.pos[j, 1],
self.pos[i, 0], self.pos[i, 1], Dx, Dy,
label=str(L[j, i]),
width=arrow_scale * self.A[j, i],
arrow_curvature=arrow_curvature,
patchA=circles[j], patchB=circles[i],
shrinkA=3, shrinkB=0)
# plot
plt.xlim(xmin, xmax)
plt.ylim(ymin, ymax)
rcParams['font.size'] = old_fontsize
return fig
def _find_best_positions(self, G):
"""Finds best positions for the given graph (given as adjacency matrix)
nodes by minimizing a network potential.
"""
initpos = None
holddim = None
if self.xpos is not None:
y = np.random.random(len(self.xpos))
initpos = np.vstack((self.xpos, y)).T
holddim = 0
elif self.ypos is not None:
x = np.zeros_like(self.xpos)
initpos = np.vstack((x, self.ypos)).T
holddim = 1
# nothing to do
elif self.xpos is not None and self.ypos is not None:
return np.array([self.xpos, self.ypos]), 0
best_pos = _fruchterman_reingold(G, pos=initpos, dim=2, hold_dim=holddim)
# rescale fixed to user settings and balance the other coordinate
if self.xpos is not None:
# rescale x to fixed value
best_pos[:, 0] *= (np.max(self.xpos) - np.min(self.xpos)
) / (np.max(best_pos[:, 0]) - np.min(best_pos[:, 0]))
best_pos[:, 0] += np.min(self.xpos) - np.min(best_pos[:, 0])
# rescale y to balance
if np.max(best_pos[:, 1]) - np.min(best_pos[:, 1]) > 0.01:
best_pos[:, 1] *= (np.max(self.xpos) - np.min(self.xpos)
) / (np.max(best_pos[:, 1]) - np.min(best_pos[:, 1]))
if self.ypos is not None:
best_pos[:, 1] *= (np.max(self.ypos) - np.min(self.ypos)
) / (np.max(best_pos[:, 1]) - np.min(best_pos[:, 1]))
best_pos[:, 1] += np.min(self.ypos) - np.min(best_pos[:, 1])
# rescale x to balance
if np.max(best_pos[:, 0]) - np.min(best_pos[:, 0]) > 0.01:
best_pos[:, 0] *= (np.max(self.ypos) - np.min(self.ypos)
) / (np.max(best_pos[:, 0]) - np.min(best_pos[:, 0]))
return best_pos
def layout_automatic(self):
n = len(self.A)
I, J = np.where(self.A > 0.0)
# note: against intution this has to be of type float
A = np.zeros((n, n))
A[I, J] = 1
self.pos = self._find_best_positions(A)
def plot_markov_model(P, pos=None, state_sizes=None, state_scale=1.0,
state_colors='#ff5500', minflux=1e-6,
arrow_scale=1.0, arrow_curvature=1.0,
arrow_labels='weights', arrow_label_format='%2.e',
max_width=12, max_height=12, figpadding=0.2):
r"""Plots a network representation of a Markov model transition matrix
This visualization is not optimized for large matrices. It is meant to be
used for the visualization of small models with up to 10-20 states, e.g.
obtained by a HMM coarse-graining. If used with large network, the automatic
node positioning will be very slow and may still look ugly.
Parameters
----------
P : ndarray(n,n) or MSM object with attribute 'transition matrix'
Transition matrix or MSM object
pos : ndarray(n,2), optional, default=None
User-defined positions to draw the states on. If not given, will try
to place them automatically.
state_sizes : ndarray(n), optional, default=None
User-defined areas of the discs drawn for each state. If not given,
the stationary probability of P will be used.
state_colors : string or ndarray(n), optional, default='#ff5500' (orange)
Either a string with a Hex code for a single color used for all states,
or an array of values in [0,1] which will result in a grayscale plot
minflux : float, optional, default=1e-6
The minimal flux (p_i * p_ij) for a transition to be drawn
arrow_scale : float, optional, default=1.0
Relative arrow scale. Set to a value different from 1 to increase
or decrease the arrow width.
arrow_curvature : float, optional, default=1.0
Relative arrow curvature. Set to a value different from 1 to make
arrows more or less curved.
arrow_labels : 'weights', None or a ndarray(n,n) with label strings. Optional, default='weights'
Strings to be placed upon arrows. If None, no labels will be used.
If 'weights', the elements of P will be used. If a matrix of strings is
given by the user these will be used.
arrow_label_format : str, optional, default='%10.2f'
The numeric format to print the arrow labels
max_width = 12
The maximum figure width
max_height = 12
The maximum figure height
figpadding = 0.2
The relative figure size used for the padding
Returns
-------
fig, pos : matplotlib.Figure, ndarray(n,2)
a Figure object containing the plot and the positions of states.
Can be used later to plot a different network representation (e.g. the flux)
Examples
--------
>>> P = np.array([[0.8, 0.15, 0.05, 0.0, 0.0],
... [0.1, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.1, 0.8, 0.0, 0.05],
... [0.0, 0.2, 0.0, 0.8, 0.0],
... [0.0, 0.02, 0.02, 0.0, 0.96]])
>>> plot_markov_model(P) # doctest:+ELLIPSIS
(<matplotlib.figure.Figure..., array...)
"""
from pyemma.msm import analysis as msmana
if isinstance(P, np.ndarray):
P = P.copy()
else:
# MSM object? then get transition matrix first
P = P.transition_matrix.copy()
if state_sizes is None:
state_sizes = msmana.stationary_distribution(P)
if minflux > 0:
F = np.dot(np.diag(msmana.stationary_distribution(P)), P)
I, J = np.where(F < minflux)
P[I, J] = 0.0
plot = NetworkPlot(P, pos=pos)
ax = plot.plot_network(state_sizes=state_sizes, state_scale=state_scale,
state_colors=state_colors,
arrow_scale=arrow_scale, arrow_curvature=arrow_curvature,
arrow_labels=arrow_labels,
arrow_label_format=arrow_label_format,
max_width=max_width, max_height=max_height,
figpadding=figpadding, xticks=False, yticks=False)
return ax, plot.pos
def plot_flux(flux, pos=None, state_sizes=None, state_scale=1.0,
state_colors='#ff5500', minflux=1e-9,
arrow_scale=1.0, arrow_curvature=1.0, arrow_labels='weights',
arrow_label_format='%2.e', max_width=12, max_height=12,
figpadding=0.2, attribute_to_plot='net_flux'):
r"""Plots a network representation of the reactive flux
This visualization is not optimized for large fluxes. It is meant to be used
for the visualization of small models with up to 10-20 states, e.g. obtained
by a PCCA-based coarse-graining of the full flux. If used with large
network, the automatic node positioning will be very slow and may still look
ugly.
Parameters
----------
flux : :class:`ReactiveFlux <pyemma.msm.flux.ReactiveFlux>`
reactive flux object
pos : ndarray(n,2), optional, default=None
User-defined positions to draw the states on. If not given, will set the
x coordinates equal to the committor probability and try to place the y
coordinates automatically
state_sizes : ndarray(n), optional, default=None
User-defined areas of the discs drawn for each state. If not given, the
stationary probability of P will be used
state_colors : string or ndarray(n), optional, default='#ff5500' (orange)
Either a string with a Hex code for a single color used for all states,
or an array of values in [0,1] which will result in a grayscale plot
minflux : float, optional, default=1e-9
The minimal flux for a transition to be drawn
arrow_scale : float, optional, default=1.0
Relative arrow scale. Set to a value different from 1 to increase or
decrease the arrow width.
arrow_curvature : float, optional, default=1.0
Relative arrow curvature. Set to a value different from 1 to make arrows
more or less curved.
arrow_labels : 'weights', None or a ndarray(n,n) with label strings. Optional, default='weights'
Strings to be placed upon arrows. If None, no labels will be used. If
'weights', the elements of P will be used. If a matrix of strings is
given by the user these will be used.
arrow_label_format : str, optional, default='%10.2f'
The numeric format to print the arrow labels
max_width : int (default = 12)
The maximum figure width
max_height: int (default = 12)
The maximum figure height
figpadding: float (default = 0.2)
The relative figure size used for the padding
Returns
-------
(fig, pos) : matpotlib.Figure instance, ndarray
Axes instances containing the plot. Use pyplot.show() to display it.
The positions of states. Can be used later to plot a different network
representation (e.g. the flux).
Examples
--------
We define first define a reactive flux by taking the following transition
matrix and computing TPT from state 2 to 3
>>> import numpy as np
>>> P = np.array([[0.8, 0.15, 0.05, 0.0, 0.0],
... [0.1, 0.75, 0.05, 0.05, 0.05],
... [0.05, 0.1, 0.8, 0.0, 0.05],
... [0.0, 0.2, 0.0, 0.8, 0.0],
... [0.0, 0.02, 0.02, 0.0, 0.96]])
>>> from pyemma import msm
>>> F = msm.tpt(msm.markov_model(P), [2], [3])
>>> F.flux[:] *= 100
Scale the flux by 100 is basically a change of units to get numbers close
to 1 (avoid printing many zeros). Now we visualize the flux:
>>> plot_flux(F) # doctest:+ELLIPSIS
(<matplotlib.figure.Figure..., array...)
"""
F = getattr(flux, attribute_to_plot)
if minflux > 0:
I, J = np.where(F < minflux)
F[I, J] = 0.0
c = flux.committor
if state_sizes is None:
state_sizes = flux.stationary_distribution
plot = NetworkPlot(F, pos=pos, xpos=c)
ax = plot.plot_network(state_sizes=state_sizes, state_scale=state_scale,
state_colors=state_colors,
arrow_scale=arrow_scale, arrow_curvature=arrow_curvature,
arrow_labels=arrow_labels,
arrow_label_format=arrow_label_format,
max_width=max_width, max_height=max_height,
figpadding=figpadding, xticks=True, yticks=False)
plt.xlabel('Committor probability')
return ax, plot.pos<|fim▁end|> | for _ in range(iterations): |
<|file_name|>l-calc_ms.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="ms_MY">
<context>
<name>mainUI</name>
<message>
<location filename="../mainUI.ui" line="14"/>
<location filename="../mainUI.cpp" line="53"/>
<source>Calculator</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.ui" line="657"/>
<source>Advanced Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="83"/>
<source>Percentage %1</source>
<translation type="unfinished"></translation>
</message>
<message><|fim▁hole|> <location filename="../mainUI.cpp" line="85"/>
<source>Power %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="87"/>
<source>Base-10 Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="89"/>
<source>Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="91"/>
<source>Constant Pi %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="94"/>
<source>Square Root %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="96"/>
<source>Logarithm %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="98"/>
<source>Natural Log %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="101"/>
<source>Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="103"/>
<source>Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="105"/>
<source>Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="108"/>
<source>Arc Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="110"/>
<source>Arc Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="112"/>
<source>Arc Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="115"/>
<source>Hyperbolic Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="117"/>
<source>Hyperbolic Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="119"/>
<source>Hyperbolic Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="182"/>
<source>Save Calculator History</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//!Ingame related
pub mod data;
pub mod event;
pub mod request;
pub mod state;
pub use self::data::Data;<|fim▁hole|>pub use self::request::Request;
pub use self::state::State;<|fim▁end|> | pub use self::event::Event; |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/* eslint-disable new-cap */
/* eslint-disable no-console */
const express = require('express');
const morgan = require('morgan');
const bodyParser = require('body-parser');
const mongoose = require('mongoose');
const routes = require('./server/routes');
const config = require('./server/config');
const app = express();
const apiRouter = express.Router();
const env = process.env.NODE_ENV;
if (env === 'test') {
config.database = config.test_database;
config.port = 8080;
}
app.use(morgan('dev'));
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use('/api', routes(apiRouter));
app.use((req, res, next) => {
res.setHeader('Access-Coontrol-Allow-Origin', '*');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE');
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With, content-type');
next();
});
mongoose.connect(config.database, (err) => {
if (err) {
console.log('connection error', err);
} else {
console.log('connection successful');
}
});
<|fim▁hole|> if (err) {
console.log('Connection error', err);
} else {
console.log('Listening on port:', config.port);
}
});
module.exports = app;<|fim▁end|> | app.listen(config.port, (err) => { |
<|file_name|>acceptance.py<|end_file_name|><|fim▁begin|>"""
This config file extends the test environment configuration
so that we can run the lettuce acceptance tests.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
from .test import *
from .sauce import *
# You need to start the server in debug mode,
# otherwise the browser will not render the pages correctly
DEBUG = True
SITE_NAME = 'localhost:{}'.format(LETTUCE_SERVER_PORT)
# Output Django logs to a file
import logging
logging.basicConfig(filename=TEST_ROOT / "log" / "lms_acceptance.log", level=logging.ERROR)
# set root logger level
logging.getLogger().setLevel(logging.ERROR)
import os
from random import choice
def seed():
return os.getppid()
# Silence noisy logs
LOG_OVERRIDES = [
('track.middleware', logging.CRITICAL),
('codejail.safe_exec', logging.ERROR),
('edx.courseware', logging.ERROR),
('audit', logging.ERROR),
('instructor_task.api_helper', logging.ERROR),
]
for log_name, log_level in LOG_OVERRIDES:
logging.getLogger(log_name).setLevel(log_level)
update_module_store_settings(
MODULESTORE,
doc_store_settings={
'db': 'acceptance_xmodule',
'collection': 'acceptance_modulestore_%s' % seed(),
},
module_store_options={
'fs_root': TEST_ROOT / "data",
},
default_store=os.environ.get('DEFAULT_STORE', 'draft'),
)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': 'localhost',
'db': 'acceptance_xcontent_%s' % seed(),
}
}
# Set this up so that 'paver lms --settings=acceptance' and running the
# harvest command both use the same (test) database
# which they can flush without messing up your dev db
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / "db" / "test_edx.db",
'TEST_NAME': TEST_ROOT / "db" / "test_edx.db",
'OPTIONS': {<|fim▁hole|> }
}
TRACKING_BACKENDS.update({
'mongo': {
'ENGINE': 'track.backends.mongodb.MongoBackend'
}
})
EVENT_TRACKING_BACKENDS['tracking_logs']['OPTIONS']['backends'].update({
'mongo': {
'ENGINE': 'eventtracking.backends.mongodb.MongoBackend',
'OPTIONS': {
'database': 'track'
}
}
})
BULK_EMAIL_DEFAULT_FROM_EMAIL = "[email protected]"
# Forums are disabled in test.py to speed up unit tests, but we do not have
# per-test control for lettuce acceptance tests.
# If you are writing an acceptance test that needs the discussion service enabled,
# do not write it in lettuce, but instead write it using bok-choy.
# DO NOT CHANGE THIS SETTING HERE.
FEATURES['ENABLE_DISCUSSION_SERVICE'] = False
# Use the auto_auth workflow for creating users and logging them in
FEATURES['AUTOMATIC_AUTH_FOR_TESTING'] = True
# Enable third-party authentication
FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True
THIRD_PARTY_AUTH = {
"Google": {
"SOCIAL_AUTH_GOOGLE_OAUTH2_KEY": "test",
"SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET": "test"
},
"Facebook": {
"SOCIAL_AUTH_FACEBOOK_KEY": "test",
"SOCIAL_AUTH_FACEBOOK_SECRET": "test"
}
}
# Enable fake payment processing page
FEATURES['ENABLE_PAYMENT_FAKE'] = True
# Enable email on the instructor dash
FEATURES['ENABLE_INSTRUCTOR_EMAIL'] = True
FEATURES['REQUIRE_COURSE_EMAIL_AUTH'] = False
FEATURES['ENABLE_SPECIAL_EXAMS'] = True
# Don't actually send any requests to Software Secure for student identity
# verification.
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
# HACK
# Setting this flag to false causes imports to not load correctly in the lettuce python files
# We do not yet understand why this occurs. Setting this to true is a stopgap measure
USE_I18N = True
FEATURES['ENABLE_FEEDBACK_SUBMISSION'] = False
# Include the lettuce app for acceptance testing, including the 'harvest' django-admin command
INSTALLED_APPS += ('lettuce.django',)
LETTUCE_APPS = ('courseware', 'instructor')
# Lettuce appears to have a bug that causes it to search
# `instructor_task` when we specify the `instructor` app.
# This causes some pretty cryptic errors as lettuce tries
# to parse files in `instructor_task` as features.
# As a quick workaround, explicitly exclude the `instructor_task` app.
LETTUCE_AVOID_APPS = ('instructor_task',)
LETTUCE_BROWSER = os.environ.get('LETTUCE_BROWSER', 'chrome')
# Where to run: local, saucelabs, or grid
LETTUCE_SELENIUM_CLIENT = os.environ.get('LETTUCE_SELENIUM_CLIENT', 'local')
SELENIUM_GRID = {
'URL': 'http://127.0.0.1:4444/wd/hub',
'BROWSER': LETTUCE_BROWSER,
}
#####################################################################
# See if the developer has any local overrides.
try:
from .private import * # pylint: disable=import-error
except ImportError:
pass
# Because an override for where to run will affect which ports to use,
# set these up after the local overrides.
# Configure XQueue interface to use our stub XQueue server
XQUEUE_INTERFACE = {
"url": "http://127.0.0.1:{0:d}".format(XQUEUE_PORT),
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
# Point the URL used to test YouTube availability to our stub YouTube server
YOUTUBE['API'] = "http://127.0.0.1:{0}/get_youtube_api/".format(YOUTUBE_PORT)
YOUTUBE['METADATA_URL'] = "http://127.0.0.1:{0}/test_youtube/".format(YOUTUBE_PORT)
YOUTUBE['TEXT_API']['url'] = "127.0.0.1:{0}/test_transcripts_youtube/".format(YOUTUBE_PORT)
if FEATURES.get('ENABLE_COURSEWARE_SEARCH') or \
FEATURES.get('ENABLE_DASHBOARD_SEARCH') or \
FEATURES.get('ENABLE_COURSE_DISCOVERY'):
# Use MockSearchEngine as the search engine for test scenario
SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine"
# Generate a random UUID so that different runs of acceptance tests don't break each other
import uuid
SECRET_KEY = uuid.uuid4().hex
ANONYMOUS_ID_SECRET_KEY = SECRET_KEY
USERNAME_CIPHER_SECRET_KEY = SECRET_KEY
############################### PIPELINE #######################################
PIPELINE_ENABLED = False
# We want to make sure that any new migrations are run
# see https://groups.google.com/forum/#!msg/django-developers/PWPj3etj3-U/kCl6pMsQYYoJ
MIGRATION_MODULES = {}<|fim▁end|> | 'timeout': 30,
},
'ATOMIC_REQUESTS': True, |
<|file_name|>font_cache_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use font_template::{FontTemplate, FontTemplateDescriptor};
use fontsan;
use ipc_channel::ipc::{self, IpcReceiver, IpcSender};
use net_traits::{CoreResourceThread, FetchResponseMsg, fetch_async};
use net_traits::request::{Destination, RequestInit};
use platform::font_context::FontContextHandle;
use platform::font_list::SANS_SERIF_FONT_FAMILY;
use platform::font_list::for_each_available_family;
use platform::font_list::for_each_variation;
use platform::font_list::last_resort_font_families;
use platform::font_list::system_default_family;
use platform::font_template::FontTemplateData;
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::fmt;
use std::mem;
use std::ops::Deref;
use std::sync::{Arc, Mutex};
use std::thread;
use std::u32;
use style::font_face::{EffectiveSources, Source};
use style::properties::longhands::font_family::computed_value::{FontFamily, FamilyName};
use webrender_api;
/// A list of font templates that make up a given font family.
struct FontTemplates {
templates: Vec<FontTemplate>,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct FontTemplateInfo {
pub font_template: Arc<FontTemplateData>,
pub font_key: webrender_api::FontKey,
}
impl FontTemplates {
fn new() -> FontTemplates {
FontTemplates {
templates: vec!(),
}
}
/// Find a font in this family that matches a given descriptor.
fn find_font_for_style(&mut self, desc: &FontTemplateDescriptor, fctx: &FontContextHandle)
-> Option<Arc<FontTemplateData>> {
// TODO(Issue #189): optimize lookup for
// regular/bold/italic/bolditalic with fixed offsets and a
// static decision table for fallback between these values.
for template in &mut self.templates {
let maybe_template = template.data_for_descriptor(fctx, desc);
if maybe_template.is_some() {
return maybe_template;
}
}
// We didn't find an exact match. Do more expensive fuzzy matching.
// TODO(#190): Do a better job.
let (mut best_template_data, mut best_distance) = (None, u32::MAX);
for template in &mut self.templates {
if let Some((template_data, distance)) =
template.data_for_approximate_descriptor(fctx, desc) {
if distance < best_distance {
best_template_data = Some(template_data);
best_distance = distance
}
}
}
if best_template_data.is_some() {
return best_template_data
}
// If a request is made for a font family that exists,
// pick the first valid font in the family if we failed
// to find an exact match for the descriptor.
for template in &mut self.templates {
let maybe_template = template.get();
if maybe_template.is_some() {
return maybe_template;
}
}
None
}
fn add_template(&mut self, identifier: Atom, maybe_data: Option<Vec<u8>>) {
for template in &self.templates {
if *template.identifier() == identifier {
return;
}
}
if let Ok(template) = FontTemplate::new(identifier, maybe_data) {
self.templates.push(template);
}
}
}
/// Commands that the FontContext sends to the font cache thread.
#[derive(Debug, Deserialize, Serialize)]
pub enum Command {
GetFontTemplate(FontFamily, FontTemplateDescriptor, IpcSender<Reply>),
GetLastResortFontTemplate(FontTemplateDescriptor, IpcSender<Reply>),
GetFontInstance(webrender_api::FontKey, Au, IpcSender<webrender_api::FontInstanceKey>),
AddWebFont(LowercaseString, EffectiveSources, IpcSender<()>),
AddDownloadedWebFont(LowercaseString, ServoUrl, Vec<u8>, IpcSender<()>),
Exit(IpcSender<()>),
}
/// Reply messages sent from the font cache thread to the FontContext caller.
#[derive(Debug, Deserialize, Serialize)]
pub enum Reply {
GetFontTemplateReply(Option<FontTemplateInfo>),
}
/// The font cache thread itself. It maintains a list of reference counted
/// font templates that are currently in use.
struct FontCache {
port: IpcReceiver<Command>,
channel_to_self: IpcSender<Command>,
generic_fonts: HashMap<FontFamily, LowercaseString>,
local_families: HashMap<LowercaseString, FontTemplates>,
web_families: HashMap<LowercaseString, FontTemplates>,
font_context: FontContextHandle,
core_resource_thread: CoreResourceThread,
webrender_api: webrender_api::RenderApi,
webrender_fonts: HashMap<Atom, webrender_api::FontKey>,
font_instances: HashMap<(webrender_api::FontKey, Au), webrender_api::FontInstanceKey>,
}
fn populate_generic_fonts() -> HashMap<FontFamily, LowercaseString> {
let mut generic_fonts = HashMap::with_capacity(5);
append_map(&mut generic_fonts, FontFamily::Generic(atom!("serif")), "Times New Roman");
append_map(&mut generic_fonts, FontFamily::Generic(atom!("sans-serif")), SANS_SERIF_FONT_FAMILY);
append_map(&mut generic_fonts, FontFamily::Generic(atom!("cursive")), "Apple Chancery");
append_map(&mut generic_fonts, FontFamily::Generic(atom!("fantasy")), "Papyrus");
append_map(&mut generic_fonts, FontFamily::Generic(atom!("monospace")), "Menlo");
fn append_map(generic_fonts: &mut HashMap<FontFamily, LowercaseString>,
font_family: FontFamily,
mapped_name: &str) {
let family_name = {
let opt_system_default = system_default_family(font_family.name());
match opt_system_default {
Some(system_default) => LowercaseString::new(&system_default),
None => LowercaseString::new(mapped_name)
}
};
generic_fonts.insert(font_family, family_name);
}
generic_fonts
}
impl FontCache {
fn run(&mut self) {
loop {
let msg = self.port.recv().unwrap();
match msg {
Command::GetFontTemplate(family, descriptor, result) => {
let maybe_font_template = self.find_font_template(&family, &descriptor);
let _ = result.send(Reply::GetFontTemplateReply(maybe_font_template));
}
Command::GetLastResortFontTemplate(descriptor, result) => {
let font_template = self.last_resort_font_template(&descriptor);
let _ = result.send(Reply::GetFontTemplateReply(Some(font_template)));
}
Command::GetFontInstance(font_key, size, result) => {
let webrender_api = &self.webrender_api;
let instance_key = *self.font_instances
.entry((font_key, size))
.or_insert_with(|| {
let key = webrender_api.generate_font_instance_key();
let mut updates = webrender_api::ResourceUpdates::new();
updates.add_font_instance(key,
font_key,
size,
None,
None,
Vec::new());
webrender_api.update_resources(updates);
key
});
let _ = result.send(instance_key);
}
Command::AddWebFont(family_name, sources, result) => {
self.handle_add_web_font(family_name, sources, result);
}
Command::AddDownloadedWebFont(family_name, url, bytes, result) => {
let templates = &mut self.web_families.get_mut(&family_name).unwrap();
templates.add_template(Atom::from(url.to_string()), Some(bytes));
drop(result.send(()));
}
Command::Exit(result) => {
let _ = result.send(());
break;
}
}
}
}
fn handle_add_web_font(&mut self,
family_name: LowercaseString,
mut sources: EffectiveSources,
sender: IpcSender<()>) {
let src = if let Some(src) = sources.next() {
src
} else {
sender.send(()).unwrap();
return;
};
if !self.web_families.contains_key(&family_name) {
let templates = FontTemplates::new();
self.web_families.insert(family_name.clone(), templates);
}
match src {
Source::Url(url_source) => {
// https://drafts.csswg.org/css-fonts/#font-fetching-requirements
let url = match url_source.url.url() {
Some(url) => url.clone(),
None => return,
};
let request = RequestInit {
url: url.clone(),
destination: Destination::Font,
// TODO: Add a proper origin - Can't import GlobalScope from gfx
// We can leave origin to be set by default
.. RequestInit::default()
};
let channel_to_self = self.channel_to_self.clone();
let bytes = Mutex::new(Vec::new());
let response_valid = Mutex::new(false);
debug!("Loading @font-face {} from {}", family_name, url);
fetch_async(request, &self.core_resource_thread, move |response| {
match response {
FetchResponseMsg::ProcessRequestBody |
FetchResponseMsg::ProcessRequestEOF => (),
FetchResponseMsg::ProcessResponse(meta_result) => {
trace!("@font-face {} metadata ok={:?}", family_name, meta_result.is_ok());
*response_valid.lock().unwrap() = meta_result.is_ok();
}
FetchResponseMsg::ProcessResponseChunk(new_bytes) => {
trace!("@font-face {} chunk={:?}", family_name, new_bytes);
if *response_valid.lock().unwrap() {
bytes.lock().unwrap().extend(new_bytes.into_iter())
}
}
FetchResponseMsg::ProcessResponseEOF(response) => {
trace!("@font-face {} EOF={:?}", family_name, response);
if response.is_err() || !*response_valid.lock().unwrap() {
let msg = Command::AddWebFont(family_name.clone(), sources.clone(), sender.clone());
channel_to_self.send(msg).unwrap();
return;
}
let bytes = mem::replace(&mut *bytes.lock().unwrap(), vec![]);
trace!("@font-face {} data={:?}", family_name, bytes);
let bytes = match fontsan::process(&bytes) {
Ok(san) => san,
Err(_) => {
// FIXME(servo/fontsan#1): get an error message
debug!("Sanitiser rejected web font: \
family={} url={:?}", family_name, url);
let msg = Command::AddWebFont(family_name.clone(), sources.clone(), sender.clone());
channel_to_self.send(msg).unwrap();
return;
},
};
let command =
Command::AddDownloadedWebFont(family_name.clone(),
url.clone(),
bytes,
sender.clone());
channel_to_self.send(command).unwrap();
}
}
});
}
Source::Local(ref font) => {
let font_face_name = LowercaseString::new(&font.name);
let templates = &mut self.web_families.get_mut(&family_name).unwrap();
let mut found = false;
for_each_variation(&font_face_name, |path| {
found = true;
templates.add_template(Atom::from(&*path), None);
});
if found {
sender.send(()).unwrap();
} else {
let msg = Command::AddWebFont(family_name, sources, sender);
self.channel_to_self.send(msg).unwrap();
}
}
}
}
fn refresh_local_families(&mut self) {
self.local_families.clear();
for_each_available_family(|family_name| {
let family_name = LowercaseString::new(&family_name);
if !self.local_families.contains_key(&family_name) {
let templates = FontTemplates::new();
self.local_families.insert(family_name, templates);
}
});
}
fn transform_family(&self, family: &FontFamily) -> LowercaseString {
match self.generic_fonts.get(family) {
None => LowercaseString::new(family.name()),
Some(mapped_family) => (*mapped_family).clone()
}
}
fn find_font_in_local_family(&mut self, family_name: &LowercaseString, desc: &FontTemplateDescriptor)
-> Option<Arc<FontTemplateData>> {
// TODO(Issue #188): look up localized font family names if canonical name not found
// look up canonical name
if self.local_families.contains_key(family_name) {
debug!("FontList: Found font family with name={}", &**family_name);
let s = self.local_families.get_mut(family_name).unwrap();
if s.templates.is_empty() {
for_each_variation(family_name, |path| {
s.add_template(Atom::from(&*path), None);
});
}
// TODO(Issue #192: handle generic font families, like 'serif' and 'sans-serif'.
// if such family exists, try to match style to a font
s.find_font_for_style(desc, &self.font_context)
} else {
debug!("FontList: Couldn't find font family with name={}", &**family_name);
None
}
}
fn find_font_in_web_family(&mut self, family: &FontFamily, desc: &FontTemplateDescriptor)
-> Option<Arc<FontTemplateData>> {
let family_name = LowercaseString::new(family.name());
if self.web_families.contains_key(&family_name) {
let templates = self.web_families.get_mut(&family_name).unwrap();
templates.find_font_for_style(desc, &self.font_context)
} else {
None
}
}
fn get_font_template_info(&mut self, template: Arc<FontTemplateData>) -> FontTemplateInfo {
let webrender_api = &self.webrender_api;
let webrender_fonts = &mut self.webrender_fonts;
let font_key = *webrender_fonts.entry(template.identifier.clone()).or_insert_with(|| {
let font_key = webrender_api.generate_font_key();
let mut updates = webrender_api::ResourceUpdates::new();
match (template.bytes_if_in_memory(), template.native_font()) {
(Some(bytes), _) => updates.add_raw_font(font_key, bytes, 0),
(None, Some(native_font)) => updates.add_native_font(font_key, native_font),
(None, None) => updates.add_raw_font(font_key, template.bytes().clone(), 0),
}
webrender_api.update_resources(updates);
font_key
});
FontTemplateInfo {
font_template: template,
font_key: font_key,
}
}
fn find_font_template(&mut self, family: &FontFamily, desc: &FontTemplateDescriptor)
-> Option<FontTemplateInfo> {
let template = self.find_font_in_web_family(family, desc)
.or_else(|| {
let transformed_family = self.transform_family(family);
self.find_font_in_local_family(&transformed_family, desc)
});
template.map(|template| {
self.get_font_template_info(template)
})
}
fn last_resort_font_template(&mut self, desc: &FontTemplateDescriptor)
-> FontTemplateInfo {
let last_resort = last_resort_font_families();
for family in &last_resort {
let family = LowercaseString::new(family);
let maybe_font_in_family = self.find_font_in_local_family(&family, desc);
if let Some(family) = maybe_font_in_family {
return self.get_font_template_info(family)
}
}
panic!("Unable to find any fonts that match (do you have fallback fonts installed?)");
}
}
/// The public interface to the font cache thread, used exclusively by
/// the per-thread/thread FontContext structures.
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct FontCacheThread {
chan: IpcSender<Command>,
}
impl FontCacheThread {
pub fn new(core_resource_thread: CoreResourceThread,
webrender_api: webrender_api::RenderApi) -> FontCacheThread {
let (chan, port) = ipc::channel().unwrap();
let channel_to_self = chan.clone();
thread::Builder::new().name("FontCacheThread".to_owned()).spawn(move || {
// TODO: Allow users to specify these.
let generic_fonts = populate_generic_fonts();
let mut cache = FontCache {
port: port,
channel_to_self,
generic_fonts,
local_families: HashMap::new(),
web_families: HashMap::new(),
font_context: FontContextHandle::new(),
core_resource_thread,
webrender_api,
webrender_fonts: HashMap::new(),
font_instances: HashMap::new(),
};
cache.refresh_local_families();
cache.run();
}).expect("Thread spawning failed");
FontCacheThread {
chan: chan,
}
}
pub fn find_font_template(&self, family: FontFamily, desc: FontTemplateDescriptor)
-> Option<FontTemplateInfo> {
let (response_chan, response_port) =
ipc::channel().expect("failed to create IPC channel");
self.chan.send(Command::GetFontTemplate(family, desc, response_chan))
.expect("failed to send message to font cache thread");
let reply = response_port.recv()
.expect("failed to receive response to font request");
match reply {
Reply::GetFontTemplateReply(data) => {
data
}
}
}
pub fn last_resort_font_template(&self, desc: FontTemplateDescriptor)
-> FontTemplateInfo {
let (response_chan, response_port) =
ipc::channel().expect("failed to create IPC channel");
self.chan.send(Command::GetLastResortFontTemplate(desc, response_chan))
.expect("failed to send message to font cache thread");
let reply = response_port.recv()
.expect("failed to receive response to font request");
match reply {
Reply::GetFontTemplateReply(data) => {
data.unwrap()
}
}
}
pub fn add_web_font(&self, family: FamilyName, sources: EffectiveSources, sender: IpcSender<()>) {
self.chan.send(Command::AddWebFont(LowercaseString::new(&family.name), sources, sender)).unwrap();
}
pub fn get_font_instance(&self, key: webrender_api::FontKey, size: Au) -> webrender_api::FontInstanceKey {
let (response_chan, response_port) =
ipc::channel().expect("failed to create IPC channel");
self.chan.send(Command::GetFontInstance(key, size, response_chan))
.expect("failed to send message to font cache thread");
let instance_key = response_port.recv()
.expect("failed to receive response to font request");
instance_key
}
pub fn exit(&self) {
let (response_chan, response_port) = ipc::channel().unwrap();
self.chan.send(Command::Exit(response_chan)).expect("Couldn't send FontCacheThread exit message");
response_port.recv().expect("Couldn't receive FontCacheThread reply");
}
}
<|fim▁hole|>pub struct LowercaseString {
inner: String,
}
impl LowercaseString {
pub fn new(s: &str) -> LowercaseString {
LowercaseString {
inner: s.to_lowercase(),
}
}
}
impl Deref for LowercaseString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&*self.inner
}
}
impl fmt::Display for LowercaseString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}<|fim▁end|> | #[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] |
<|file_name|>qt_compat.py<|end_file_name|><|fim▁begin|>"""
Provide a common way to import Qt classes used by pytest-qt in a unique manner,
abstracting API differences between PyQt5 and PySide2/6.
.. note:: This module is not part of pytest-qt public API, hence its interface
may change between releases and users should not rely on it.
Based on from https://github.com/epage/PythonUtils.
"""
from collections import namedtuple
import os
import pytest
VersionTuple = namedtuple("VersionTuple", "qt_api, qt_api_version, runtime, compiled")
def _import(name):
"""Think call so we can mock it during testing"""
return __import__(name)
class _QtApi:
"""
Interface to the underlying Qt API currently configured for pytest-qt.
This object lazily loads all class references and other objects when the ``set_qt_api`` method
gets called, providing a uniform way to access the Qt classes.
"""
def __init__(self):
self._import_errors = {}
def _get_qt_api_from_env(self):
api = os.environ.get("PYTEST_QT_API")
supported_apis = [
"pyside6",
"pyside2",
"pyqt6",
"pyqt5",
]
if api is not None:
api = api.lower()
if api not in supported_apis: # pragma: no cover
msg = f"Invalid value for $PYTEST_QT_API: {api}, expected one of {supported_apis}"
raise pytest.UsageError(msg)
return api
def _guess_qt_api(self): # pragma: no cover
def _can_import(name):
try:
_import(name)
return True
except ModuleNotFoundError as e:
self._import_errors[name] = str(e)
return False
# Note, not importing only the root namespace because when uninstalling from conda,
# the namespace can still be there.
if _can_import("PySide6.QtCore"):
return "pyside6"
elif _can_import("PySide2.QtCore"):
return "pyside2"
elif _can_import("PyQt6.QtCore"):
return "pyqt6"
elif _can_import("PyQt5.QtCore"):
return "pyqt5"
return None
def set_qt_api(self, api):
self.pytest_qt_api = self._get_qt_api_from_env() or api or self._guess_qt_api()
self.is_pyside = self.pytest_qt_api in ["pyside2", "pyside6"]
self.is_pyqt = self.pytest_qt_api in ["pyqt5", "pyqt6"]
if not self.pytest_qt_api: # pragma: no cover
errors = "\n".join(
f" {module}: {reason}"
for module, reason in sorted(self._import_errors.items())
)
msg = (
"pytest-qt requires either PySide2, PySide6, PyQt5 or PyQt6 installed.\n"
+ errors
)
raise pytest.UsageError(msg)
_root_modules = {
"pyside6": "PySide6",
"pyside2": "PySide2",
"pyqt6": "PyQt6",
"pyqt5": "PyQt5",
}
_root_module = _root_modules[self.pytest_qt_api]
def _import_module(module_name):
m = __import__(_root_module, globals(), locals(), [module_name], 0)
return getattr(m, module_name)
self.QtCore = QtCore = _import_module("QtCore")
self.QtGui = _import_module("QtGui")
self.QtTest = _import_module("QtTest")
self.QtWidgets = _import_module("QtWidgets")
self._check_qt_api_version()
# qInfo is not exposed in PySide2/6 (#232)
if hasattr(QtCore, "QMessageLogger"):
self.qInfo = lambda msg: QtCore.QMessageLogger().info(msg)
elif hasattr(QtCore, "qInfo"):
self.qInfo = QtCore.qInfo
else:
self.qInfo = None
self.qDebug = QtCore.qDebug
self.qWarning = QtCore.qWarning
self.qCritical = QtCore.qCritical
self.qFatal = QtCore.qFatal
if self.is_pyside:
self.Signal = QtCore.Signal
self.Slot = QtCore.Slot
self.Property = QtCore.Property
elif self.is_pyqt:
self.Signal = QtCore.pyqtSignal
self.Slot = QtCore.pyqtSlot
self.Property = QtCore.pyqtProperty
else:
assert False, "Expected either is_pyqt or is_pyside"
def _check_qt_api_version(self):
if not self.is_pyqt:
# We support all PySide versions
return
if self.QtCore.PYQT_VERSION == 0x060000: # 6.0.0
raise pytest.UsageError(
"PyQt 6.0 is not supported by pytest-qt, use 6.1+ instead."
)
elif self.QtCore.PYQT_VERSION < 0x050B00: # 5.11.0
raise pytest.UsageError(
"PyQt < 5.11 is not supported by pytest-qt, use 5.11+ instead."
)
def exec(self, obj, *args, **kwargs):
# exec was a keyword in Python 2, so PySide2 (and also PySide6 6.0)
# name the corresponding method "exec_" instead.
#
# The old _exec() alias is removed in PyQt6 and also deprecated as of
# PySide 6.1:
# https://codereview.qt-project.org/c/pyside/pyside-setup/+/342095
if hasattr(obj, "exec"):
return obj.exec(*args, **kwargs)
return obj.exec_(*args, **kwargs)
def get_versions(self):
if self.pytest_qt_api == "pyside6":
import PySide6
version = PySide6.__version__
return VersionTuple(
"PySide6", version, self.QtCore.qVersion(), self.QtCore.__version__
)
elif self.pytest_qt_api == "pyside2":
import PySide2
version = PySide2.__version__
return VersionTuple(
"PySide2", version, self.QtCore.qVersion(), self.QtCore.__version__
)
elif self.pytest_qt_api == "pyqt6":
return VersionTuple(
"PyQt6",
self.QtCore.PYQT_VERSION_STR,
self.QtCore.qVersion(),
self.QtCore.QT_VERSION_STR,
)
elif self.pytest_qt_api == "pyqt5":
return VersionTuple(
"PyQt5",
self.QtCore.PYQT_VERSION_STR,<|fim▁hole|>
assert False, f"Internal error, unknown pytest_qt_api: {self.pytest_qt_api}"
qt_api = _QtApi()<|fim▁end|> | self.QtCore.qVersion(),
self.QtCore.QT_VERSION_STR,
) |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from crm.models import Person
from geocodable.models import LocationAlias
import uuid
class Event(models.Model):
name = models.CharField(max_length=200)
timestamp = models.DateTimeField()
end_timestamp = models.DateTimeField()
attendees = models.ManyToManyField(Person, related_name='events', blank=True)
uid = models.CharField(max_length=200, blank=True)
location = models.ForeignKey(LocationAlias, default=None, blank=True,
null=True)
instance_id = models.CharField(max_length=200, blank=True)
@property
def geo(self):
return {'lat': self.lat, 'lng': self.lng}
@property
def lat(self):
if self.location is not None:
return self.location.lat
else:
return None
@property
def lng(self):
if self.location is not None:
return self.location.lng
else:
return None
def __unicode__(self):<|fim▁hole|> return "%s (%s)"%(self.name, self.timestamp)<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from wagtail.documents.views import serve
urlpatterns = [
url(r'^(\d+)/(.*)$', serve.serve, name='wagtaildocs_serve'),
url(r'^authenticate_with_password/(\d+)/$', serve.authenticate_with_password,
name='wagtaildocs_authenticate_with_password'),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>main.ts<|end_file_name|><|fim▁begin|>import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';<|fim▁hole|><|fim▁end|> |
import { AppModule } from './app/app.module';
platformBrowserDynamic().bootstrapModule(AppModule); |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
# Register your models here.
from rcps.models import *
class IngredientToRecipeInline(admin.TabularInline):
model = Ingredient.recipes.through
verbose_name = 'Ингредиент'
verbose_name_plural = 'Ингредиенты'
class EquipmentInline(admin.TabularInline):
model = Equipment.equipment_recipes.through
verbose_name = 'Инструмент'
verbose_name_plural = 'Инструменты'
class TagInline(admin.TabularInline):
model = Tag.tag_recipes.through
verbose_name = 'Тег'
verbose_name_plural = 'Теги'
class RecipeAdmin(admin.ModelAdmin):
model = Recipe
fields = ['recipe_name', 'recipe_link']
inlines = (
IngredientToRecipeInline,
EquipmentInline,
TagInline,
)
class IngredientComponentInAlternativeInline(admin.TabularInline):
model = IngredientAlternative.ingredients.through
verbose_name = 'Ингредиент'
verbose_name_plural = 'Ингредиенты'
<|fim▁hole|>class IngredientAlternativeAdmin(admin.ModelAdmin):
model = IngredientAlternative
inlines = (
IngredientComponentInAlternativeInline,
)
admin.site.register(Recipe, RecipeAdmin)
admin.site.register(Ingredient)
admin.site.register(IngredientAlternative, IngredientAlternativeAdmin)
admin.site.register(IngredientCategory)
admin.site.register(Equipment)
admin.site.register(EquipmentCategory)
admin.site.register(IngredientReplacement)
admin.site.register(Tag)<|fim▁end|> | |
<|file_name|>dns-cache.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.DNSCache = exports.DNS_DEFAULT_EXPIRE = void 0;
var _dns = _interopRequireDefault(require("dns"));
var _net = _interopRequireDefault(require("net"));
var _logger = require("./logger");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
async function lookup(hostname) {
return new Promise((resolve, reject) => {
_dns.default.lookup(hostname, function (err, address) {
if (err) {
reject(err);
} else {
resolve(address);
}
});
});
}
function now() {
return Date.now();
}
const DNS_DEFAULT_EXPIRE = 3600000;
exports.DNS_DEFAULT_EXPIRE = DNS_DEFAULT_EXPIRE;
class DNSCache {
static init(expire) {
if (typeof expire === 'number' && expire >= 0) {
DNSCache.expire = expire;
}
DNSCache.pool = {};
}
static async get(hostname) {
if (_net.default.isIP(hostname)) {
return hostname;
}
let address = null;
if (!DNSCache.pool[hostname]) {
address = await lookup(hostname);
DNSCache._put(hostname, address);
} else {
const [addr, expire] = DNSCache.pool[hostname];
const _now = now();
if (_now >= expire) {
delete DNSCache.pool[hostname];
}
_logger.logger.verbose(`[dns-cache] hit: hostname=${hostname} resolved=${addr} ttl=${expire - _now}ms`);
address = addr;
}
return address;
}
static clear() {
DNSCache.pool = {};
}
static _put(hostname, address) {
if (DNSCache.expire > 0) {
const expire = now() + DNSCache.expire;
DNSCache.pool[hostname] = [address, expire];
}
}<|fim▁hole|>exports.DNSCache = DNSCache;
_defineProperty(DNSCache, "pool", {});
_defineProperty(DNSCache, "expire", DNS_DEFAULT_EXPIRE);<|fim▁end|> |
}
|
<|file_name|>include.py<|end_file_name|><|fim▁begin|>import util<|fim▁hole|>grid_margin_w = util.input.cfg_w / 6.0
grid_margin_h = util.input.cfg_h / 6.0
cell_w = util.input.cfg_w * 2.0 / 9.0
cell_h = util.input.cfg_h * 2.0 / 9.0
mark_none = []
mark_x = []
mark_o = []<|fim▁end|> | from util.include import *
|
<|file_name|>profile-controller.js<|end_file_name|><|fim▁begin|>angular.module('senseItWeb', null, null).controller('ProfileCtrl', function ($scope, OpenIdService, $state, fileReader) {
'use strict';
var _ = $scope._
, password_min = 6;
$scope.noyes = [
{value: '0', label: 'no'},
{value: '1', label: 'yes'}
];
if (!$scope.status.logged && $state.params.goBack) {
OpenIdService.registerWatcher($scope, function () {
if ($scope.status.logged) {
$state.go($state.previous);
}
});
}
$scope.form = new SiwFormManager(function () {
if ($scope.status.profile.metadata === null) {
$scope.status.profile.metadata = {};
}
if ($scope.status.profile.visibility === null) {
$scope.status.profile.visibility = {};
}
return $scope.status.profile;
}, ['username', 'email', 'notify1', 'notify2', 'notify3', 'notify4', 'notify5', 'metadata', 'visibility'], function () {
$scope.status.newUser = false;
$scope.openIdService.saveProfile().then(function (data) {
$scope.formError = data.responses.username || null;
if ($scope.formError) {
$scope.form.open('username');
}
});
}, function () {
$scope.formError = null;
}
);
$scope.visibilityDisplay = function () {
var options = [
['metadata', 'Profile information'],
['projectsJoined', 'Joined projects'],
['projectsCreated', 'Projects created by me']
].filter(function (option) {
return $scope.status.profile.visibility &&
$scope.status.profile.visibility[option[0]];
});
if (options.length > 0) {
return options.map(function (option) {
return '<b>' + option[1] + '</b>';
}).join(', ');
} else {
return 'none';
}
};
$scope.imageForm = new SiwFormManager(function () {
return $scope.status.profile;
}, [], function () {
$scope.openIdService.saveProfileImage($scope.imageForm.files);
});
$scope.filelistener = {
previewFile: null,
set: function (key, file) {
$scope.imageForm.setFile(key, file);
this.updatePreview();
},
clear: function (key) {
$scope.imageForm.clearFile(key);
this.updatePreview();
},
deleteFile: function (key) {
$scope.imageForm.deleteFile(key);
this.updatePreview();
},
updatePreview: function () {
if ($scope.imageForm.files['image']) {
fileReader.readAsDataUrl($scope.imageForm.files['image'], $scope).then(function (result) {
$scope.filelistener.previewFile = result;
});
} else {
$scope.filelistener.previewFile = null;
}
}
};
$scope.filelistener.updatePreview();
$scope.logout = function () {
$scope.openIdService.logout();
};
$scope.providerLogin = function (provider, action) {
OpenIdService.providerLogin(provider, action);
};
$scope.deleteConnection = function (providerId) {
$scope.openIdService.deleteConnection(providerId);
};
$scope.formError = null;
$scope.formErrorText = function () {
switch ($scope.formError) {
case 'username_empty':
return 'Username cannot be empty';
case 'username_not_available':
return 'Username not available (already taken)';
default:
return '';
}
};
$scope.loginMode = {
mode: 'login',
set: function (mode) {
this.mode = mode;
},
is: function (mode) {
return this.mode === mode;
}
};
$scope.login = {
editing: {username: '', password: ''},
error: {
username: false,
password: false
},
clearPassword: function () {
var p = this.editing.password;
this.editing.password = "";
return p;
},
submit: function () {
var ok = true;
this.editing.username = this.editing.username.trim();
if (this.editing.username.length === 0) {
this.error.username = _('Username cannot be empty.');
ok = false;
}
if (this.editing.password.length === 0) {
this.error.password = _('Password cannot be empty.');
ok = false;
}
if (ok) {
var error = this.error;
error.username = null;
OpenIdService.login(this.editing.username, this.clearPassword(), function (data) {
error.password = data === 'false' ? _('Username & password do not match.') : null;
});
}
}
};
$scope.register = {
recaptcha: {siteKey: $scope.cfg.recaptcha.siteKey},
editing: {username: '', password: '', repeatPassword: '', email: ''},
error: {username: false, password: false, repeatPassword: false, email: false, recaptcha: false},
clearPassword: function () {
var p = this.editing.password;
this.editing.password = this.editing.repeatPassword = "";
return p;
},
reset: function () {
this.editing = {username: '', password: '', repeatPassword: '', email: ''};
this.error = {username: false, password: false, repeatPassword: false, email: false, recaptcha: false};
},
submit: function () {
var ok = true;
this.editing.username = this.editing.username.trim();
this.editing.email = this.editing.email.trim();
this.editing.recaptcha = angular.element("#g-recaptcha-response").val();
this.error = {username: false, password: false, repeatPassword: false, email: false, recaptcha: false};
if (this.editing.username.length === 0) {
this.error.username = _('Username cannot be empty.');
ok = false;
}
if (this.editing.email.length === 0) {
this.error.email = _('Email cannot be empty.');
ok = false;
}
if (this.editing.password.length < password_min) {
this.error.password = _('Password must have at least {{n}} characters.', { 'n': password_min });
ok = false;
}
if (this.editing.password !== this.editing.repeatPassword) {
this.error.repeatPassword = _('Passwords do not match.');
ok = false;
}
if (this.editing.recaptcha === '') {
this.error.recaptcha = _('Are you a human being or a robot?');
ok = false;
}
if (ok) {
var error = this.error = {username: false, password: false, repeatPassword: false, email: false};
OpenIdService.register(this.editing.username, this.clearPassword(), this.editing.email, this.editing.recaptcha).then(function (data) {
switch (data.responses.registration) {
case 'username_exists':
error.username = _('Username not available.');
break;
case 'email_exists':
error.email = _('Email already associated with a different account.');
break;
case 'bad_recaptcha':
error.recaptcha = _('Captcha failed. Try again.');
grecaptcha.reset();
break;
}
});
}
}
};
$scope.reminder = {
recaptcha: {siteKey: $scope.cfg.recaptcha.siteKey},
editing: {email: ''},
error: {email: false},
reset: function () {
this.editing = {email: ''};
this.error = {email: false};
},
submit: function () {
var ok = true;
this.editing.email = this.editing.email.trim();
this.editing.recaptcha = angular.element("#g-recaptcha-response").val();
if (this.editing.email.length === 0) {
this.error.email = _('Email cannot be empty.');
ok = false;
}
if (this.editing.recaptcha === '') {
this.error.recaptcha = _('Are you a human being or a robot?');
ok = false;
}
if (ok) {
var error = this.error = {email: false};
OpenIdService.reminder(this.editing.email, this.editing.recaptcha).then(function (data) {
grecaptcha.reset();
switch (data.responses.reminder) {
case 'email_not_exists':
error.email = _('No account found with that email or username.');
break;
case 'bad_recaptcha':
error.recaptcha = _('Captcha failed. Try again.');
break;
case 'reminder_sent':
error.email = _('A password reminder has been sent.');
break;
}
});
}
}
};
$scope.password = {
set: function () {
return $scope.status.profile.passwordSet;
},
editing: false,
error: {
oldPassword: false,
newPassword: false,
repeatPassword: false
},
edit: function () {
this.editing = {oldPassword: '', newPassword: '', repeatPassword: ''};
},
close: function () {
this.editing = false;
},
cancel: function () {
this.close();
},
save: function () {
var ok = true;
if (this.editing.newPassword.length < password_min) {
this.error.password = _('Password must have at least {{n}} characters.', { 'n': password_min });
ok = false;
}
if (this.editing.newPassword !== this.editing.repeatPassword) {
this.error.repeatPassword = _('Passwords do not match.');
ok = false;<|fim▁hole|> }
if (ok) {
var self = this;
var error = self.error;
error.repeatPassword = false;
OpenIdService.setPassword(this.editing.oldPassword, this.editing.newPassword).then(function (data) {
switch (data.responses.oldpassword) {
case 'bad_password':
error.oldPassword = 'Old password is not valid.';
break;
default:
error.oldPassword = false;
break;
}
switch (data.responses.newpassword) {
case 'too_short':
error.newPassword = 'New password is too short.';
break;
case 'same_as_username':
error.newPassword = 'New password cannot be equal to your username.';
break;
default:
error.newPassword = false;
break;
}
if (!error.repeatPassword && !error.newPassword && !error.oldPassword) {
self.close();
}
});
}
}
};
});<|fim▁end|> | |
<|file_name|>StaticContentWarTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.integration.staticcontent.war;
<|fim▁hole|>import org.jboss.arquillian.drone.api.annotation.Drone;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
import org.wildfly.swarm.arquillian.adapter.ArtifactDependencies;
import org.wildfly.swarm.integration.base.TestConstants;
import org.wildfly.swarm.integration.staticcontent.StaticContentCommonTests;
import org.wildfly.swarm.undertow.WARArchive;
import java.util.Arrays;
import java.util.List;
import static org.fest.assertions.Assertions.assertThat;
/**
* @author Bob McWhirter
*/
@RunWith(Arquillian.class)
public class StaticContentWarTest implements StaticContentCommonTests {
@Drone
WebDriver browser;
@Deployment
public static Archive createDeployment() throws Exception {
WARArchive deployment = ShrinkWrap.create(WARArchive.class);
deployment.staticContent();
return deployment;
}
@ArtifactDependencies
public static List<String> appDependencies() {
return Arrays.asList(
"org.wildfly.swarm:wildfly-swarm-undertow"
);
}
@RunAsClient
@Test
public void testStaticContent() throws Exception {
assertBasicStaticContentWorks("");
}
@Override
public void assertContains(String path, String content) throws Exception {
browser.navigate().to(TestConstants.DEFAULT_URL + path);
assertThat(browser.getPageSource()).contains(content);
}
@Override
public void assertNotFound(String path) throws Exception {
assertThat(browser.getPageSource().contains("Not Found"));
}
}<|fim▁end|> | import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient; |
<|file_name|>newrankdlg.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2014 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from PyQt4 import QtCore, QtGui
import widgets
import api.character.rankadv
class NextRankDlg(QtGui.QDialog):
def __init__(self, pc, parent=None):
super(NextRankDlg, self).__init__(parent)
self.pc = pc
self.build_ui()
self.connect_signals()
# self.setWindowFlags(QtCore.Qt.Tool)
self.setWindowTitle(self.tr("L5R: CM - Advance Rank"))
def build_ui(self):
vbox = QtGui.QVBoxLayout(self)
vbox.addWidget(QtGui.QLabel(self.tr("""\
You can now advance your Rank,
what would you want to do?
""")))
self.bt_go_on = QtGui.QPushButton(
self.tr("Advance in my current school")
)
self.bt_new_school = QtGui.QPushButton(
self.tr("Join a new school"))
for bt in [self.bt_go_on, self.bt_new_school]:
bt.setMinimumSize(QtCore.QSize(0, 38))
vbox.addWidget(self.bt_go_on)
vbox.addWidget(self.bt_new_school)
vbox.setSpacing(12)
is_path = api.data.schools.is_path(
api.character.schools.get_current()
)
# check if the PC is following an alternate path<|fim▁hole|> if is_path:
# offer to going back
self.bt_go_on.setText(self.tr("Go back to your old school"))
def connect_signals(self):
self.bt_go_on.clicked.connect(self.simply_go_on)
self.bt_new_school.clicked.connect(self.join_new_school)
def join_new_school(self):
dlg = widgets.SchoolChooserDialog(self)
if dlg.exec_() == QtGui.QDialog.Rejected:
return
self.accept()
def simply_go_on(self):
is_path = api.data.schools.is_path(
api.character.schools.get_current()
)
# check if the PC is following an alternate path
if is_path:
# the PC want to go back to the old school.
# find the first school that is not a path
api.character.rankadv.leave_path()
else:
api.character.rankadv.advance_rank()
self.accept()
def test():
import sys
app = QtGui.QApplication(sys.argv)
dlg = NextRankDlg(None, None)
dlg.show()
sys.exit(app.exec_())
if __name__ == '__main__':
test()<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | """ nenga.address.migrations module """ |
<|file_name|>editorGroupModel.test.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import { EditorGroupModel, ISerializedEditorGroupModel, EditorCloseEvent } from 'vs/workbench/common/editor/editorGroupModel';
import { EditorExtensions, IEditorInputFactoryRegistry, EditorInput, IFileEditorInput, IEditorInputSerializer, CloseDirection, EditorsOrder } from 'vs/workbench/common/editor';
import { URI } from 'vs/base/common/uri';
import { TestLifecycleService, workbenchInstantiationService } from 'vs/workbench/test/browser/workbenchTestServices';
import { TestConfigurationService } from 'vs/platform/configuration/test/common/testConfigurationService';
import { TestInstantiationService } from 'vs/platform/instantiation/test/common/instantiationServiceMock';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { ILifecycleService } from 'vs/workbench/services/lifecycle/common/lifecycle';
import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace';
import { Registry } from 'vs/platform/registry/common/platform';
import { IEditorModel } from 'vs/platform/editor/common/editor';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { NullTelemetryService } from 'vs/platform/telemetry/common/telemetryUtils';
import { DiffEditorInput } from 'vs/workbench/common/editor/diffEditorInput';
import { IStorageService } from 'vs/platform/storage/common/storage';
import { DisposableStore } from 'vs/base/common/lifecycle';
import { TestContextService, TestStorageService } from 'vs/workbench/test/common/workbenchTestServices';
suite('Workbench editor group model', () => {
function inst(): IInstantiationService {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(ILifecycleService, new TestLifecycleService());
inst.stub(IWorkspaceContextService, new TestContextService());
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { openPositioning: 'right', focusRecentEditorAfterClose: true } });
inst.stub(IConfigurationService, config);
return inst;
}
function createEditorGroupModel(serialized?: ISerializedEditorGroupModel): EditorGroupModel {
return inst().createInstance(EditorGroupModel, serialized);
}
function closeAllEditors(group: EditorGroupModel): void {
for (const editor of group.getEditors(EditorsOrder.SEQUENTIAL)) {
group.closeEditor(editor, false);
}
}
function closeEditors(group: EditorGroupModel, except: EditorInput, direction?: CloseDirection): void {
const index = group.indexOf(except);
if (index === -1) {
return; // not found
}
<|fim▁hole|> for (let i = index - 1; i >= 0; i--) {
group.closeEditor(group.getEditorByIndex(i)!);
}
}
// Close to the right
else if (direction === CloseDirection.RIGHT) {
for (let i = group.getEditors(EditorsOrder.SEQUENTIAL).length - 1; i > index; i--) {
group.closeEditor(group.getEditorByIndex(i)!);
}
}
// Both directions
else {
group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).filter(editor => !editor.matches(except)).forEach(editor => group.closeEditor(editor));
}
}
interface GroupEvents {
opened: EditorInput[];
activated: EditorInput[];
closed: EditorCloseEvent[];
pinned: EditorInput[];
unpinned: EditorInput[];
sticky: EditorInput[];
unsticky: EditorInput[];
moved: EditorInput[];
disposed: EditorInput[];
}
function groupListener(group: EditorGroupModel): GroupEvents {
const groupEvents: GroupEvents = {
opened: [],
closed: [],
activated: [],
pinned: [],
unpinned: [],
sticky: [],
unsticky: [],
moved: [],
disposed: []
};
group.onDidOpenEditor(e => groupEvents.opened.push(e));
group.onDidCloseEditor(e => groupEvents.closed.push(e));
group.onDidActivateEditor(e => groupEvents.activated.push(e));
group.onDidChangeEditorPinned(e => group.isPinned(e) ? groupEvents.pinned.push(e) : groupEvents.unpinned.push(e));
group.onDidChangeEditorSticky(e => group.isSticky(e) ? groupEvents.sticky.push(e) : groupEvents.unsticky.push(e));
group.onDidMoveEditor(e => groupEvents.moved.push(e));
group.onWillDisposeEditor(e => groupEvents.disposed.push(e));
return groupEvents;
}
let index = 0;
class TestEditorInput extends EditorInput {
readonly resource = undefined;
constructor(public id: string) {
super();
}
override get typeId() { return 'testEditorInputForGroups'; }
override async resolve(): Promise<IEditorModel> { return null!; }
override matches(other: TestEditorInput): boolean {
return other && this.id === other.id && other instanceof TestEditorInput;
}
setDirty(): void {
this._onDidChangeDirty.fire();
}
setLabel(): void {
this._onDidChangeLabel.fire();
}
}
class NonSerializableTestEditorInput extends EditorInput {
readonly resource = undefined;
constructor(public id: string) {
super();
}
override get typeId() { return 'testEditorInputForGroups-nonSerializable'; }
override async resolve(): Promise<IEditorModel | null> { return null; }
override matches(other: NonSerializableTestEditorInput): boolean {
return other && this.id === other.id && other instanceof NonSerializableTestEditorInput;
}
}
class TestFileEditorInput extends EditorInput implements IFileEditorInput {
readonly preferredResource = this.resource;
constructor(public id: string, public resource: URI) {
super();
}
override get typeId() { return 'testFileEditorInputForGroups'; }
override async resolve(): Promise<IEditorModel | null> { return null; }
setPreferredName(name: string): void { }
setPreferredDescription(description: string): void { }
setPreferredResource(resource: URI): void { }
async setEncoding(encoding: string) { }
getEncoding() { return undefined; }
setPreferredEncoding(encoding: string) { }
setForceOpenAsBinary(): void { }
setMode(mode: string) { }
setPreferredMode(mode: string) { }
isResolved(): boolean { return false; }
override matches(other: TestFileEditorInput): boolean {
return other && this.id === other.id && other instanceof TestFileEditorInput;
}
}
function input(id = String(index++), nonSerializable?: boolean, resource?: URI): EditorInput {
if (resource) {
return new TestFileEditorInput(id, resource);
}
return nonSerializable ? new NonSerializableTestEditorInput(id) : new TestEditorInput(id);
}
interface ISerializedTestInput {
id: string;
}
class TestEditorInputSerializer implements IEditorInputSerializer {
static disableSerialize = false;
static disableDeserialize = false;
canSerialize(editorInput: EditorInput): boolean {
return true;
}
serialize(editorInput: EditorInput): string | undefined {
if (TestEditorInputSerializer.disableSerialize) {
return undefined;
}
let testEditorInput = <TestEditorInput>editorInput;
let testInput: ISerializedTestInput = {
id: testEditorInput.id
};
return JSON.stringify(testInput);
}
deserialize(instantiationService: IInstantiationService, serializedEditorInput: string): EditorInput | undefined {
if (TestEditorInputSerializer.disableDeserialize) {
return undefined;
}
let testInput: ISerializedTestInput = JSON.parse(serializedEditorInput);
return new TestEditorInput(testInput.id);
}
}
const disposables = new DisposableStore();
setup(() => {
TestEditorInputSerializer.disableSerialize = false;
TestEditorInputSerializer.disableDeserialize = false;
disposables.add(Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).registerEditorInputSerializer('testEditorInputForGroups', TestEditorInputSerializer));
});
teardown(() => {
disposables.clear();
index = 1;
});
test('Clone Group', function () {
const group = createEditorGroupModel();
const input1 = input() as TestEditorInput;
const input2 = input();
const input3 = input();
// Pinned and Active
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: false, active: true });
// Sticky
group.stick(input2);
assert.ok(group.isSticky(input2));
const clone = group.clone();
assert.notStrictEqual(group.id, clone.id);
assert.strictEqual(clone.count, 3);
let didEditorLabelChange = false;
const toDispose = clone.onDidEditorLabelChange(() => didEditorLabelChange = true);
input1.setLabel();
assert.ok(didEditorLabelChange);
assert.strictEqual(clone.isPinned(input1), true);
assert.strictEqual(clone.isActive(input1), false);
assert.strictEqual(clone.isSticky(input1), false);
assert.strictEqual(clone.isPinned(input2), true);
assert.strictEqual(clone.isActive(input2), false);
assert.strictEqual(clone.isSticky(input2), true);
assert.strictEqual(clone.isPinned(input3), false);
assert.strictEqual(clone.isActive(input3), true);
assert.strictEqual(clone.isSticky(input3), false);
toDispose.dispose();
});
test('contains()', function () {
const group = createEditorGroupModel();
const instantiationService = workbenchInstantiationService();
const input1 = input();
const input2 = input();
const diffInput1 = instantiationService.createInstance(DiffEditorInput, 'name', 'description', input1, input2, undefined);
const diffInput2 = instantiationService.createInstance(DiffEditorInput, 'name', 'description', input2, input1, undefined);
group.openEditor(input1, { pinned: true, active: true });
assert.strictEqual(group.contains(input1), true);
assert.strictEqual(group.contains(input1, { strictEquals: true }), true);
assert.strictEqual(group.contains(input1, { supportSideBySide: true }), true);
assert.strictEqual(group.contains(input2), false);
assert.strictEqual(group.contains(input2, { strictEquals: true }), false);
assert.strictEqual(group.contains(input2, { supportSideBySide: true }), false);
assert.strictEqual(group.contains(diffInput1), false);
assert.strictEqual(group.contains(diffInput2), false);
group.openEditor(input2, { pinned: true, active: true });
assert.strictEqual(group.contains(input1), true);
assert.strictEqual(group.contains(input2), true);
assert.strictEqual(group.contains(diffInput1), false);
assert.strictEqual(group.contains(diffInput2), false);
group.openEditor(diffInput1, { pinned: true, active: true });
assert.strictEqual(group.contains(input1), true);
assert.strictEqual(group.contains(input2), true);
assert.strictEqual(group.contains(diffInput1), true);
assert.strictEqual(group.contains(diffInput2), false);
group.openEditor(diffInput2, { pinned: true, active: true });
assert.strictEqual(group.contains(input1), true);
assert.strictEqual(group.contains(input2), true);
assert.strictEqual(group.contains(diffInput1), true);
assert.strictEqual(group.contains(diffInput2), true);
group.closeEditor(input1);
assert.strictEqual(group.contains(input1), false);
assert.strictEqual(group.contains(input1, { supportSideBySide: true }), true);
assert.strictEqual(group.contains(input2), true);
assert.strictEqual(group.contains(diffInput1), true);
assert.strictEqual(group.contains(diffInput2), true);
group.closeEditor(input2);
assert.strictEqual(group.contains(input1), false);
assert.strictEqual(group.contains(input1, { supportSideBySide: true }), true);
assert.strictEqual(group.contains(input2), false);
assert.strictEqual(group.contains(input2, { supportSideBySide: true }), true);
assert.strictEqual(group.contains(diffInput1), true);
assert.strictEqual(group.contains(diffInput2), true);
group.closeEditor(diffInput1);
assert.strictEqual(group.contains(input1), false);
assert.strictEqual(group.contains(input1, { supportSideBySide: true }), true);
assert.strictEqual(group.contains(input2), false);
assert.strictEqual(group.contains(input2, { supportSideBySide: true }), true);
assert.strictEqual(group.contains(diffInput1), false);
assert.strictEqual(group.contains(diffInput2), true);
group.closeEditor(diffInput2);
assert.strictEqual(group.contains(input1), false);
assert.strictEqual(group.contains(input1, { supportSideBySide: true }), false);
assert.strictEqual(group.contains(input2), false);
assert.strictEqual(group.contains(input2, { supportSideBySide: true }), false);
assert.strictEqual(group.contains(diffInput1), false);
assert.strictEqual(group.contains(diffInput2), false);
const input3 = input(undefined, true, URI.parse('foo://bar'));
const input4 = input(undefined, true, URI.parse('foo://barsomething'));
group.openEditor(input3, { pinned: true, active: true });
assert.strictEqual(group.contains(input4), false);
assert.strictEqual(group.contains(input3), true);
group.closeEditor(input3);
assert.strictEqual(group.contains(input3), false);
});
test('group serialization', function () {
inst().invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
const group = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
// Case 1: inputs can be serialized and deserialized
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: false, active: true });
let deserialized = createEditorGroupModel(group.serialize());
assert.strictEqual(group.id, deserialized.id);
assert.strictEqual(deserialized.count, 3);
assert.strictEqual(deserialized.getEditors(EditorsOrder.SEQUENTIAL).length, 3);
assert.strictEqual(deserialized.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 3);
assert.strictEqual(deserialized.isPinned(input1), true);
assert.strictEqual(deserialized.isPinned(input2), true);
assert.strictEqual(deserialized.isPinned(input3), false);
assert.strictEqual(deserialized.isActive(input3), true);
// Case 2: inputs cannot be serialized
TestEditorInputSerializer.disableSerialize = true;
deserialized = createEditorGroupModel(group.serialize());
assert.strictEqual(group.id, deserialized.id);
assert.strictEqual(deserialized.count, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.SEQUENTIAL).length, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
// Case 3: inputs cannot be deserialized
TestEditorInputSerializer.disableSerialize = false;
TestEditorInputSerializer.disableDeserialize = true;
deserialized = createEditorGroupModel(group.serialize());
assert.strictEqual(group.id, deserialized.id);
assert.strictEqual(deserialized.count, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.SEQUENTIAL).length, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
});
test('group serialization (sticky editor)', function () {
inst().invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
const group = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
// Case 1: inputs can be serialized and deserialized
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: false, active: true });
group.stick(input2);
assert.ok(group.isSticky(input2));
let deserialized = createEditorGroupModel(group.serialize());
assert.strictEqual(group.id, deserialized.id);
assert.strictEqual(deserialized.count, 3);
assert.strictEqual(deserialized.isPinned(input1), true);
assert.strictEqual(deserialized.isActive(input1), false);
assert.strictEqual(deserialized.isSticky(input1), false);
assert.strictEqual(deserialized.isPinned(input2), true);
assert.strictEqual(deserialized.isActive(input2), false);
assert.strictEqual(deserialized.isSticky(input2), true);
assert.strictEqual(deserialized.isPinned(input3), false);
assert.strictEqual(deserialized.isActive(input3), true);
assert.strictEqual(deserialized.isSticky(input3), false);
// Case 2: inputs cannot be serialized
TestEditorInputSerializer.disableSerialize = true;
deserialized = createEditorGroupModel(group.serialize());
assert.strictEqual(group.id, deserialized.id);
assert.strictEqual(deserialized.count, 0);
assert.strictEqual(deserialized.stickyCount, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.SEQUENTIAL).length, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
// Case 3: inputs cannot be deserialized
TestEditorInputSerializer.disableSerialize = false;
TestEditorInputSerializer.disableDeserialize = true;
deserialized = createEditorGroupModel(group.serialize());
assert.strictEqual(group.id, deserialized.id);
assert.strictEqual(deserialized.count, 0);
assert.strictEqual(deserialized.stickyCount, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.SEQUENTIAL).length, 0);
assert.strictEqual(deserialized.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
});
test('One Editor', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
// Active && Pinned
const input1 = input();
const { editor: openedEditor, isNew } = group.openEditor(input1, { active: true, pinned: true });
assert.strictEqual(openedEditor, input1);
assert.strictEqual(isNew, true);
assert.strictEqual(group.count, 1);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 1);
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.isActive(input1), true);
assert.strictEqual(group.isPinned(input1), true);
assert.strictEqual(group.isPinned(0), true);
assert.strictEqual(events.opened[0], input1);
assert.strictEqual(events.activated[0], input1);
let editor = group.closeEditor(input1);
assert.strictEqual(editor, input1);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(events.closed[0].editor, input1);
assert.strictEqual(events.closed[0].index, 0);
assert.strictEqual(events.closed[0].replaced, false);
// Active && Preview
const input2 = input();
group.openEditor(input2, { active: true, pinned: false });
assert.strictEqual(group.count, 1);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 1);
assert.strictEqual(group.activeEditor, input2);
assert.strictEqual(group.isActive(input2), true);
assert.strictEqual(group.isPinned(input2), false);
assert.strictEqual(group.isPinned(0), false);
assert.strictEqual(events.opened[1], input2);
assert.strictEqual(events.activated[1], input2);
group.closeEditor(input2);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(events.closed[1].editor, input2);
assert.strictEqual(events.closed[1].index, 0);
assert.strictEqual(events.closed[1].replaced, false);
editor = group.closeEditor(input2);
assert.ok(!editor);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(events.closed[1].editor, input2);
// Nonactive && Pinned => gets active because its first editor
const input3 = input();
group.openEditor(input3, { active: false, pinned: true });
assert.strictEqual(group.count, 1);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 1);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.isActive(input3), true);
assert.strictEqual(group.isPinned(input3), true);
assert.strictEqual(group.isPinned(0), true);
assert.strictEqual(events.opened[2], input3);
assert.strictEqual(events.activated[2], input3);
group.closeEditor(input3);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(events.closed[2].editor, input3);
assert.strictEqual(events.opened[2], input3);
assert.strictEqual(events.activated[2], input3);
group.closeEditor(input3);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(events.closed[2].editor, input3);
// Nonactive && Preview => gets active because its first editor
const input4 = input();
group.openEditor(input4);
assert.strictEqual(group.count, 1);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 1);
assert.strictEqual(group.activeEditor, input4);
assert.strictEqual(group.isActive(input4), true);
assert.strictEqual(group.isPinned(input4), false);
assert.strictEqual(group.isPinned(0), false);
assert.strictEqual(events.opened[3], input4);
assert.strictEqual(events.activated[3], input4);
group.closeEditor(input4);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(events.closed[3].editor, input4);
});
test('Multiple Editors - Pinned and Active', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
const input1 = input('1');
const input1Copy = input('1');
const input2 = input('2');
const input3 = input('3');
// Pinned and Active
let openedEditorResult = group.openEditor(input1, { pinned: true, active: true });
assert.strictEqual(openedEditorResult.editor, input1);
assert.strictEqual(openedEditorResult.isNew, true);
openedEditorResult = group.openEditor(input1Copy, { pinned: true, active: true }); // opening copy of editor should still return existing one
assert.strictEqual(openedEditorResult.editor, input1);
assert.strictEqual(openedEditorResult.isNew, false);
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: true, active: true });
assert.strictEqual(group.count, 3);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 3);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.isActive(input1), false);
assert.strictEqual(group.isPinned(input1), true);
assert.strictEqual(group.isActive(input2), false);
assert.strictEqual(group.isPinned(input2), true);
assert.strictEqual(group.isActive(input3), true);
assert.strictEqual(group.isPinned(input3), true);
assert.strictEqual(events.opened[0], input1);
assert.strictEqual(events.opened[1], input2);
assert.strictEqual(events.opened[2], input3);
assert.strictEqual(events.activated[0], input1);
assert.strictEqual(events.activated[1], input2);
assert.strictEqual(events.activated[2], input3);
const mru = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mru[0], input3);
assert.strictEqual(mru[1], input2);
assert.strictEqual(mru[2], input1);
// Add some tests where a matching input is used
// and verify that events carry the original input
const sameInput1 = input('1');
group.openEditor(sameInput1, { pinned: true, active: true });
assert.strictEqual(events.activated[3], input1);
group.unpin(sameInput1);
assert.strictEqual(events.unpinned[0], input1);
group.pin(sameInput1);
assert.strictEqual(events.pinned[0], input1);
group.stick(sameInput1);
assert.strictEqual(events.sticky[0], input1);
group.unstick(sameInput1);
assert.strictEqual(events.unsticky[0], input1);
group.moveEditor(sameInput1, 1);
assert.strictEqual(events.moved[0], input1);
group.closeEditor(sameInput1);
assert.strictEqual(events.closed[0].editor, input1);
closeAllEditors(group);
assert.strictEqual(events.closed.length, 3);
assert.strictEqual(group.count, 0);
});
test('Multiple Editors - Preview editor moves to the side of the active one', function () {
const group = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
group.openEditor(input1, { pinned: false, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: true, active: true });
assert.strictEqual(input3, group.getEditors(EditorsOrder.SEQUENTIAL)[2]);
const input4 = input();
group.openEditor(input4, { pinned: false, active: true }); // this should cause the preview editor to move after input3
assert.strictEqual(input4, group.getEditors(EditorsOrder.SEQUENTIAL)[2]);
});
test('Multiple Editors - Pinned and Active (DEFAULT_OPEN_EDITOR_DIRECTION = Direction.LEFT)', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(ILifecycleService, new TestLifecycleService());
inst.stub(IWorkspaceContextService, new TestContextService());
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
inst.stub(IConfigurationService, config);
config.setUserConfiguration('workbench', { editor: { openPositioning: 'left' } });
const group: EditorGroupModel = inst.createInstance(EditorGroupModel, undefined);
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
// Pinned and Active
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: true, active: true });
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input2);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[2], input1);
closeAllEditors(group);
assert.strictEqual(events.closed.length, 3);
assert.strictEqual(group.count, 0);
});
test('Multiple Editors - Pinned and Not Active', function () {
const group = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
// Pinned and Active
group.openEditor(input1, { pinned: true });
group.openEditor(input2, { pinned: true });
group.openEditor(input3, { pinned: true });
assert.strictEqual(group.count, 3);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 3);
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.isActive(input1), true);
assert.strictEqual(group.isPinned(input1), true);
assert.strictEqual(group.isPinned(0), true);
assert.strictEqual(group.isActive(input2), false);
assert.strictEqual(group.isPinned(input2), true);
assert.strictEqual(group.isPinned(1), true);
assert.strictEqual(group.isActive(input3), false);
assert.strictEqual(group.isPinned(input3), true);
assert.strictEqual(group.isPinned(2), true);
assert.strictEqual(group.isPinned(input3), true);
const mru = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mru[0], input1);
assert.strictEqual(mru[1], input3);
assert.strictEqual(mru[2], input2);
});
test('Multiple Editors - Preview gets overwritten', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
// Non active, preview
group.openEditor(input1); // becomes active, preview
group.openEditor(input2); // overwrites preview
group.openEditor(input3); // overwrites preview
assert.strictEqual(group.count, 1);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 1);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.isActive(input3), true);
assert.strictEqual(group.isPinned(input3), false);
assert.strictEqual(!group.isPinned(input3), true);
assert.strictEqual(events.opened[0], input1);
assert.strictEqual(events.opened[1], input2);
assert.strictEqual(events.opened[2], input3);
assert.strictEqual(events.closed[0].editor, input1);
assert.strictEqual(events.closed[1].editor, input2);
assert.strictEqual(events.closed[0].replaced, true);
assert.strictEqual(events.closed[1].replaced, true);
const mru = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mru[0], input3);
assert.strictEqual(mru.length, 1);
});
test('Multiple Editors - set active', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: false, active: true });
assert.strictEqual(group.activeEditor, input3);
let mru = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mru[0], input3);
assert.strictEqual(mru[1], input2);
assert.strictEqual(mru[2], input1);
group.setActive(input3);
assert.strictEqual(events.activated.length, 3);
group.setActive(input1);
assert.strictEqual(events.activated[3], input1);
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.isActive(input1), true);
assert.strictEqual(group.isActive(input2), false);
assert.strictEqual(group.isActive(input3), false);
mru = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mru[0], input1);
assert.strictEqual(mru[1], input3);
assert.strictEqual(mru[2], input2);
});
test('Multiple Editors - pin and unpin', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: false, active: true });
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 3);
group.pin(input3);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.isPinned(input3), true);
assert.strictEqual(group.isActive(input3), true);
assert.strictEqual(events.pinned[0], input3);
assert.strictEqual(group.count, 3);
group.unpin(input1);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.isPinned(input1), false);
assert.strictEqual(group.isActive(input1), false);
assert.strictEqual(events.unpinned[0], input1);
assert.strictEqual(group.count, 3);
group.unpin(input2);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 2); // 2 previews got merged into one
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input2);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input3);
assert.strictEqual(events.closed[0].editor, input1);
assert.strictEqual(group.count, 2);
group.unpin(input3);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 1); // pinning replaced the preview
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input3);
assert.strictEqual(events.closed[1].editor, input2);
assert.strictEqual(group.count, 1);
});
test('Multiple Editors - closing picks next from MRU list', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
const input4 = input();
const input5 = input();
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: true, active: true });
group.openEditor(input4, { pinned: true, active: true });
group.openEditor(input5, { pinned: true, active: true });
assert.strictEqual(group.activeEditor, input5);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0], input5);
assert.strictEqual(group.count, 5);
group.closeEditor(input5);
assert.strictEqual(group.activeEditor, input4);
assert.strictEqual(events.activated[5], input4);
assert.strictEqual(group.count, 4);
group.setActive(input1);
group.setActive(input4);
group.closeEditor(input4);
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.count, 3);
group.closeEditor(input1);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 2);
group.setActive(input2);
group.closeEditor(input2);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 1);
group.closeEditor(input3);
assert.ok(!group.activeEditor);
assert.strictEqual(group.count, 0);
});
test('Multiple Editors - closing picks next to the right', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(ILifecycleService, new TestLifecycleService());
inst.stub(IWorkspaceContextService, new TestContextService());
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { focusRecentEditorAfterClose: false } });
inst.stub(IConfigurationService, config);
const group = inst.createInstance(EditorGroupModel, undefined);
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
const input4 = input();
const input5 = input();
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: true, active: true });
group.openEditor(input4, { pinned: true, active: true });
group.openEditor(input5, { pinned: true, active: true });
assert.strictEqual(group.activeEditor, input5);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0], input5);
assert.strictEqual(group.count, 5);
group.closeEditor(input5);
assert.strictEqual(group.activeEditor, input4);
assert.strictEqual(events.activated[5], input4);
assert.strictEqual(group.count, 4);
group.setActive(input1);
group.closeEditor(input1);
assert.strictEqual(group.activeEditor, input2);
assert.strictEqual(group.count, 3);
group.setActive(input3);
group.closeEditor(input3);
assert.strictEqual(group.activeEditor, input4);
assert.strictEqual(group.count, 2);
group.closeEditor(input4);
assert.strictEqual(group.activeEditor, input2);
assert.strictEqual(group.count, 1);
group.closeEditor(input2);
assert.ok(!group.activeEditor);
assert.strictEqual(group.count, 0);
});
test('Multiple Editors - move editor', function () {
const group = createEditorGroupModel();
const events = groupListener(group);
const input1 = input();
const input2 = input();
const input3 = input();
const input4 = input();
const input5 = input();
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.moveEditor(input1, 1);
assert.strictEqual(events.moved[0], input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input2);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input1);
group.setActive(input1);
group.openEditor(input3, { pinned: true, active: true });
group.openEditor(input4, { pinned: true, active: true });
group.openEditor(input5, { pinned: true, active: true });
group.moveEditor(input4, 0);
assert.strictEqual(events.moved[1], input4);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input4);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input2);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[2], input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[3], input3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[4], input5);
group.moveEditor(input4, 3);
group.moveEditor(input2, 1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input2);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[2], input3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[3], input4);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[4], input5);
assert.strictEqual(events.moved.length, 4);
group.moveEditor(input1, 0);
assert.strictEqual(events.moved.length, 4);
group.moveEditor(input1, -1);
assert.strictEqual(events.moved.length, 4);
group.moveEditor(input5, 4);
assert.strictEqual(events.moved.length, 4);
group.moveEditor(input5, 100);
assert.strictEqual(events.moved.length, 4);
group.moveEditor(input5, -1);
assert.strictEqual(events.moved.length, 5);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input5);
group.moveEditor(input1, 100);
assert.strictEqual(events.moved.length, 6);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[4], input1);
});
test('Multiple Editors - move editor across groups', function () {
const group1 = createEditorGroupModel();
const group2 = createEditorGroupModel();
const g1_input1 = input();
const g1_input2 = input();
const g2_input1 = input();
group1.openEditor(g1_input1, { active: true, pinned: true });
group1.openEditor(g1_input2, { active: true, pinned: true });
group2.openEditor(g2_input1, { active: true, pinned: true });
// A move across groups is a close in the one group and an open in the other group at a specific index
group2.closeEditor(g2_input1);
group1.openEditor(g2_input1, { active: true, pinned: true, index: 1 });
assert.strictEqual(group1.count, 3);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[0], g1_input1);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[1], g2_input1);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[2], g1_input2);
});
test('Multiple Editors - move editor across groups (input already exists in group 1)', function () {
const group1 = createEditorGroupModel();
const group2 = createEditorGroupModel();
const g1_input1 = input();
const g1_input2 = input();
const g1_input3 = input();
const g2_input1 = g1_input2;
group1.openEditor(g1_input1, { active: true, pinned: true });
group1.openEditor(g1_input2, { active: true, pinned: true });
group1.openEditor(g1_input3, { active: true, pinned: true });
group2.openEditor(g2_input1, { active: true, pinned: true });
// A move across groups is a close in the one group and an open in the other group at a specific index
group2.closeEditor(g2_input1);
group1.openEditor(g2_input1, { active: true, pinned: true, index: 0 });
assert.strictEqual(group1.count, 3);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[0], g1_input2);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[1], g1_input1);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[2], g1_input3);
});
test('Multiple Editors - Pinned & Non Active', function () {
const group = createEditorGroupModel();
const input1 = input();
group.openEditor(input1);
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.previewEditor, input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input1);
assert.strictEqual(group.count, 1);
const input2 = input();
group.openEditor(input2, { pinned: true, active: false });
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.previewEditor, input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input2);
assert.strictEqual(group.count, 2);
const input3 = input();
group.openEditor(input3, { pinned: true, active: false });
assert.strictEqual(group.activeEditor, input1);
assert.strictEqual(group.previewEditor, input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[2], input2);
assert.strictEqual(group.isPinned(input1), false);
assert.strictEqual(group.isPinned(input2), true);
assert.strictEqual(group.isPinned(input3), true);
assert.strictEqual(group.count, 3);
});
test('Multiple Editors - Close Others, Close Left, Close Right', function () {
const group = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
const input4 = input();
const input5 = input();
group.openEditor(input1, { active: true, pinned: true });
group.openEditor(input2, { active: true, pinned: true });
group.openEditor(input3, { active: true, pinned: true });
group.openEditor(input4, { active: true, pinned: true });
group.openEditor(input5, { active: true, pinned: true });
// Close Others
closeEditors(group, group.activeEditor!);
assert.strictEqual(group.activeEditor, input5);
assert.strictEqual(group.count, 1);
closeAllEditors(group);
group.openEditor(input1, { active: true, pinned: true });
group.openEditor(input2, { active: true, pinned: true });
group.openEditor(input3, { active: true, pinned: true });
group.openEditor(input4, { active: true, pinned: true });
group.openEditor(input5, { active: true, pinned: true });
group.setActive(input3);
// Close Left
assert.strictEqual(group.activeEditor, input3);
closeEditors(group, group.activeEditor!, CloseDirection.LEFT);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input4);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[2], input5);
closeAllEditors(group);
group.openEditor(input1, { active: true, pinned: true });
group.openEditor(input2, { active: true, pinned: true });
group.openEditor(input3, { active: true, pinned: true });
group.openEditor(input4, { active: true, pinned: true });
group.openEditor(input5, { active: true, pinned: true });
group.setActive(input3);
// Close Right
assert.strictEqual(group.activeEditor, input3);
closeEditors(group, group.activeEditor!, CloseDirection.RIGHT);
assert.strictEqual(group.activeEditor, input3);
assert.strictEqual(group.count, 3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], input1);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], input2);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[2], input3);
});
test('Multiple Editors - real user example', function () {
const group = createEditorGroupModel();
// [] -> /index.html/
const indexHtml = input('index.html');
let openedEditor = group.openEditor(indexHtml).editor;
assert.strictEqual(openedEditor, indexHtml);
assert.strictEqual(group.activeEditor, indexHtml);
assert.strictEqual(group.previewEditor, indexHtml);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], indexHtml);
assert.strictEqual(group.count, 1);
// /index.html/ -> /index.html/
const sameIndexHtml = input('index.html');
openedEditor = group.openEditor(sameIndexHtml).editor;
assert.strictEqual(openedEditor, indexHtml);
assert.strictEqual(group.activeEditor, indexHtml);
assert.strictEqual(group.previewEditor, indexHtml);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], indexHtml);
assert.strictEqual(group.count, 1);
// /index.html/ -> /style.css/
const styleCss = input('style.css');
openedEditor = group.openEditor(styleCss).editor;
assert.strictEqual(openedEditor, styleCss);
assert.strictEqual(group.activeEditor, styleCss);
assert.strictEqual(group.previewEditor, styleCss);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], styleCss);
assert.strictEqual(group.count, 1);
// /style.css/ -> [/style.css/, test.js]
const testJs = input('test.js');
openedEditor = group.openEditor(testJs, { active: true, pinned: true }).editor;
assert.strictEqual(openedEditor, testJs);
assert.strictEqual(group.previewEditor, styleCss);
assert.strictEqual(group.activeEditor, testJs);
assert.strictEqual(group.isPinned(styleCss), false);
assert.strictEqual(group.isPinned(testJs), true);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], styleCss);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], testJs);
assert.strictEqual(group.count, 2);
// [/style.css/, test.js] -> [test.js, /index.html/]
const indexHtml2 = input('index.html');
group.openEditor(indexHtml2, { active: true });
assert.strictEqual(group.activeEditor, indexHtml2);
assert.strictEqual(group.previewEditor, indexHtml2);
assert.strictEqual(group.isPinned(indexHtml2), false);
assert.strictEqual(group.isPinned(testJs), true);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[0], testJs);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], indexHtml2);
assert.strictEqual(group.count, 2);
// make test.js active
const testJs2 = input('test.js');
group.setActive(testJs2);
assert.strictEqual(group.activeEditor, testJs);
assert.strictEqual(group.isActive(testJs2), true);
assert.strictEqual(group.count, 2);
// [test.js, /indexHtml/] -> [test.js, index.html]
const indexHtml3 = input('index.html');
group.pin(indexHtml3);
assert.strictEqual(group.isPinned(indexHtml3), true);
assert.strictEqual(group.activeEditor, testJs);
// [test.js, index.html] -> [test.js, file.ts, index.html]
const fileTs = input('file.ts');
group.openEditor(fileTs, { active: true, pinned: true });
assert.strictEqual(group.isPinned(fileTs), true);
assert.strictEqual(group.count, 3);
assert.strictEqual(group.activeEditor, fileTs);
// [test.js, index.html, file.ts] -> [test.js, /file.ts/, index.html]
group.unpin(fileTs);
assert.strictEqual(group.count, 3);
assert.strictEqual(group.isPinned(fileTs), false);
assert.strictEqual(group.activeEditor, fileTs);
// [test.js, /file.ts/, index.html] -> [test.js, /other.ts/, index.html]
const otherTs = input('other.ts');
group.openEditor(otherTs, { active: true });
assert.strictEqual(group.count, 3);
assert.strictEqual(group.activeEditor, otherTs);
assert.ok(group.getEditors(EditorsOrder.SEQUENTIAL)[0].matches(testJs));
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], otherTs);
assert.ok(group.getEditors(EditorsOrder.SEQUENTIAL)[2].matches(indexHtml));
// make index.html active
const indexHtml4 = input('index.html');
group.setActive(indexHtml4);
assert.strictEqual(group.activeEditor, indexHtml2);
// [test.js, /other.ts/, index.html] -> [test.js, /other.ts/]
group.closeEditor(indexHtml);
assert.strictEqual(group.count, 2);
assert.strictEqual(group.activeEditor, otherTs);
assert.ok(group.getEditors(EditorsOrder.SEQUENTIAL)[0].matches(testJs));
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL)[1], otherTs);
// [test.js, /other.ts/] -> [test.js]
group.closeEditor(otherTs);
assert.strictEqual(group.count, 1);
assert.strictEqual(group.activeEditor, testJs);
assert.ok(group.getEditors(EditorsOrder.SEQUENTIAL)[0].matches(testJs));
// [test.js] -> /test.js/
group.unpin(testJs);
assert.strictEqual(group.count, 1);
assert.strictEqual(group.activeEditor, testJs);
assert.ok(group.getEditors(EditorsOrder.SEQUENTIAL)[0].matches(testJs));
assert.strictEqual(group.isPinned(testJs), false);
// /test.js/ -> []
group.closeEditor(testJs);
assert.strictEqual(group.count, 0);
assert.strictEqual(group.activeEditor, null);
assert.strictEqual(group.previewEditor, null);
});
test('Single Group, Single Editor - persist', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(IWorkspaceContextService, new TestContextService());
const lifecycle = new TestLifecycleService();
inst.stub(ILifecycleService, lifecycle);
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { openPositioning: 'right' } });
inst.stub(IConfigurationService, config);
inst.invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
let group = createEditorGroupModel();
const input1 = input();
group.openEditor(input1);
assert.strictEqual(group.count, 1);
assert.strictEqual(group.activeEditor!.matches(input1), true);
assert.strictEqual(group.previewEditor!.matches(input1), true);
assert.strictEqual(group.isActive(input1), true);
// Create model again - should load from storage
group = inst.createInstance(EditorGroupModel, group.serialize());
assert.strictEqual(group.count, 1);
assert.strictEqual(group.activeEditor!.matches(input1), true);
assert.strictEqual(group.previewEditor!.matches(input1), true);
assert.strictEqual(group.isActive(input1), true);
});
test('Multiple Groups, Multiple editors - persist', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(IWorkspaceContextService, new TestContextService());
const lifecycle = new TestLifecycleService();
inst.stub(ILifecycleService, lifecycle);
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { openPositioning: 'right' } });
inst.stub(IConfigurationService, config);
inst.invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
let group1 = createEditorGroupModel();
const g1_input1 = input();
const g1_input2 = input();
const g1_input3 = input();
group1.openEditor(g1_input1, { active: true, pinned: true });
group1.openEditor(g1_input2, { active: true, pinned: false });
group1.openEditor(g1_input3, { active: false, pinned: true });
let group2 = createEditorGroupModel();
const g2_input1 = input();
const g2_input2 = input();
const g2_input3 = input();
group2.openEditor(g2_input1, { active: true, pinned: true });
group2.openEditor(g2_input2, { active: false, pinned: false });
group2.openEditor(g2_input3, { active: false, pinned: true });
assert.strictEqual(group1.count, 3);
assert.strictEqual(group2.count, 3);
assert.strictEqual(group1.activeEditor!.matches(g1_input2), true);
assert.strictEqual(group2.activeEditor!.matches(g2_input1), true);
assert.strictEqual(group1.previewEditor!.matches(g1_input2), true);
assert.strictEqual(group2.previewEditor!.matches(g2_input2), true);
assert.strictEqual(group1.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0].matches(g1_input2), true);
assert.strictEqual(group1.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[1].matches(g1_input3), true);
assert.strictEqual(group1.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[2].matches(g1_input1), true);
assert.strictEqual(group2.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0].matches(g2_input1), true);
assert.strictEqual(group2.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[1].matches(g2_input3), true);
assert.strictEqual(group2.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[2].matches(g2_input2), true);
// Create model again - should load from storage
group1 = inst.createInstance(EditorGroupModel, group1.serialize());
group2 = inst.createInstance(EditorGroupModel, group2.serialize());
assert.strictEqual(group1.count, 3);
assert.strictEqual(group2.count, 3);
assert.strictEqual(group1.activeEditor!.matches(g1_input2), true);
assert.strictEqual(group2.activeEditor!.matches(g2_input1), true);
assert.strictEqual(group1.previewEditor!.matches(g1_input2), true);
assert.strictEqual(group2.previewEditor!.matches(g2_input2), true);
assert.strictEqual(group1.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0].matches(g1_input2), true);
assert.strictEqual(group1.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[1].matches(g1_input3), true);
assert.strictEqual(group1.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[2].matches(g1_input1), true);
assert.strictEqual(group2.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0].matches(g2_input1), true);
assert.strictEqual(group2.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[1].matches(g2_input3), true);
assert.strictEqual(group2.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[2].matches(g2_input2), true);
});
test('Single group, multiple editors - persist (some not persistable)', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(IWorkspaceContextService, new TestContextService());
const lifecycle = new TestLifecycleService();
inst.stub(ILifecycleService, lifecycle);
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { openPositioning: 'right' } });
inst.stub(IConfigurationService, config);
inst.invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
let group = createEditorGroupModel();
const serializableInput1 = input();
const nonSerializableInput2 = input('3', true);
const serializableInput2 = input();
group.openEditor(serializableInput1, { active: true, pinned: true });
group.openEditor(nonSerializableInput2, { active: true, pinned: false });
group.openEditor(serializableInput2, { active: false, pinned: true });
assert.strictEqual(group.count, 3);
assert.strictEqual(group.activeEditor!.matches(nonSerializableInput2), true);
assert.strictEqual(group.previewEditor!.matches(nonSerializableInput2), true);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0].matches(nonSerializableInput2), true);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[1].matches(serializableInput2), true);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[2].matches(serializableInput1), true);
// Create model again - should load from storage
group = inst.createInstance(EditorGroupModel, group.serialize());
assert.strictEqual(group.count, 2);
assert.strictEqual(group.activeEditor!.matches(serializableInput2), true);
assert.strictEqual(group.previewEditor, null);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[0].matches(serializableInput2), true);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE)[1].matches(serializableInput1), true);
});
test('Single group, multiple editors - persist (some not persistable, sticky editors)', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(IWorkspaceContextService, new TestContextService());
const lifecycle = new TestLifecycleService();
inst.stub(ILifecycleService, lifecycle);
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { openPositioning: 'right' } });
inst.stub(IConfigurationService, config);
inst.invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
let group = createEditorGroupModel();
const serializableInput1 = input();
const nonSerializableInput2 = input('3', true);
const serializableInput2 = input();
group.openEditor(serializableInput1, { active: true, pinned: true });
group.openEditor(nonSerializableInput2, { active: true, pinned: true, sticky: true });
group.openEditor(serializableInput2, { active: false, pinned: true });
assert.strictEqual(group.count, 3);
assert.strictEqual(group.stickyCount, 1);
// Create model again - should load from storage
group = inst.createInstance(EditorGroupModel, group.serialize());
assert.strictEqual(group.count, 2);
assert.strictEqual(group.stickyCount, 0);
});
test('Multiple groups, multiple editors - persist (some not persistable, causes empty group)', function () {
let inst = new TestInstantiationService();
inst.stub(IStorageService, new TestStorageService());
inst.stub(IWorkspaceContextService, new TestContextService());
const lifecycle = new TestLifecycleService();
inst.stub(ILifecycleService, lifecycle);
inst.stub(ITelemetryService, NullTelemetryService);
const config = new TestConfigurationService();
config.setUserConfiguration('workbench', { editor: { openPositioning: 'right' } });
inst.stub(IConfigurationService, config);
inst.invokeFunction(accessor => Registry.as<IEditorInputFactoryRegistry>(EditorExtensions.EditorInputFactories).start(accessor));
let group1 = createEditorGroupModel();
let group2 = createEditorGroupModel();
const serializableInput1 = input();
const serializableInput2 = input();
const nonSerializableInput = input('2', true);
group1.openEditor(serializableInput1, { pinned: true });
group1.openEditor(serializableInput2);
group2.openEditor(nonSerializableInput);
// Create model again - should load from storage
group1 = inst.createInstance(EditorGroupModel, group1.serialize());
group2 = inst.createInstance(EditorGroupModel, group2.serialize());
assert.strictEqual(group1.count, 2);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[0].matches(serializableInput1), true);
assert.strictEqual(group1.getEditors(EditorsOrder.SEQUENTIAL)[1].matches(serializableInput2), true);
});
test('Multiple Editors - Editor Dispose', function () {
const group1 = createEditorGroupModel();
const group2 = createEditorGroupModel();
const group1Listener = groupListener(group1);
const group2Listener = groupListener(group2);
const input1 = input();
const input2 = input();
const input3 = input();
group1.openEditor(input1, { pinned: true, active: true });
group1.openEditor(input2, { pinned: true, active: true });
group1.openEditor(input3, { pinned: true, active: true });
group2.openEditor(input1, { pinned: true, active: true });
group2.openEditor(input2, { pinned: true, active: true });
input1.dispose();
assert.strictEqual(group1Listener.disposed.length, 1);
assert.strictEqual(group2Listener.disposed.length, 1);
assert.ok(group1Listener.disposed[0].matches(input1));
assert.ok(group2Listener.disposed[0].matches(input1));
input3.dispose();
assert.strictEqual(group1Listener.disposed.length, 2);
assert.strictEqual(group2Listener.disposed.length, 1);
assert.ok(group1Listener.disposed[1].matches(input3));
});
test('Preview tab does not have a stable position (https://github.com/microsoft/vscode/issues/8245)', function () {
const group1 = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
group1.openEditor(input1, { pinned: true, active: true });
group1.openEditor(input2, { active: true });
group1.setActive(input1);
group1.openEditor(input3, { active: true });
assert.strictEqual(group1.indexOf(input3), 1);
});
test('Multiple Editors - Editor Emits Dirty and Label Changed', function () {
const group1 = createEditorGroupModel();
const group2 = createEditorGroupModel();
const input1 = input();
const input2 = input();
group1.openEditor(input1, { pinned: true, active: true });
group2.openEditor(input2, { pinned: true, active: true });
let dirty1Counter = 0;
group1.onDidChangeEditorDirty(() => {
dirty1Counter++;
});
let dirty2Counter = 0;
group2.onDidChangeEditorDirty(() => {
dirty2Counter++;
});
let label1ChangeCounter = 0;
group1.onDidEditorLabelChange(() => {
label1ChangeCounter++;
});
let label2ChangeCounter = 0;
group2.onDidEditorLabelChange(() => {
label2ChangeCounter++;
});
(<TestEditorInput>input1).setDirty();
(<TestEditorInput>input1).setLabel();
assert.strictEqual(dirty1Counter, 1);
assert.strictEqual(label1ChangeCounter, 1);
(<TestEditorInput>input2).setDirty();
(<TestEditorInput>input2).setLabel();
assert.strictEqual(dirty2Counter, 1);
assert.strictEqual(label2ChangeCounter, 1);
closeAllEditors(group2);
(<TestEditorInput>input2).setDirty();
(<TestEditorInput>input2).setLabel();
assert.strictEqual(dirty2Counter, 1);
assert.strictEqual(label2ChangeCounter, 1);
assert.strictEqual(dirty1Counter, 1);
assert.strictEqual(label1ChangeCounter, 1);
});
test('Sticky Editors', function () {
const group = createEditorGroupModel();
const input1 = input();
const input2 = input();
const input3 = input();
const input4 = input();
group.openEditor(input1, { pinned: true, active: true });
group.openEditor(input2, { pinned: true, active: true });
group.openEditor(input3, { pinned: false, active: true });
assert.strictEqual(group.stickyCount, 0);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL).length, 3);
assert.strictEqual(group.getEditors(EditorsOrder.SEQUENTIAL, { excludeSticky: true }).length, 3);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE).length, 3);
assert.strictEqual(group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE, { excludeSticky: true }).length, 3);
// Stick last editor should move it first and pin
group.stick(input3);
assert.strictEqual(group.stickyCount, 1);
assert.strictEqual(group.isSticky(input1), false);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), true);
assert.strictEqual(group.isPinned(input3), true);
assert.strictEqual(group.indexOf(input1), 1);
assert.strictEqual(group.indexOf(input2), 2);
assert.strictEqual(group.indexOf(input3), 0);
let sequentialAllEditors = group.getEditors(EditorsOrder.SEQUENTIAL);
assert.strictEqual(sequentialAllEditors.length, 3);
let sequentialEditorsExcludingSticky = group.getEditors(EditorsOrder.SEQUENTIAL, { excludeSticky: true });
assert.strictEqual(sequentialEditorsExcludingSticky.length, 2);
assert.ok(sequentialEditorsExcludingSticky.indexOf(input1) >= 0);
assert.ok(sequentialEditorsExcludingSticky.indexOf(input2) >= 0);
let mruAllEditors = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mruAllEditors.length, 3);
let mruEditorsExcludingSticky = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE, { excludeSticky: true });
assert.strictEqual(mruEditorsExcludingSticky.length, 2);
assert.ok(mruEditorsExcludingSticky.indexOf(input1) >= 0);
assert.ok(mruEditorsExcludingSticky.indexOf(input2) >= 0);
// Sticking same editor again is a no-op
group.stick(input3);
assert.strictEqual(group.isSticky(input3), true);
// Sticking last editor now should move it after sticky one
group.stick(input2);
assert.strictEqual(group.stickyCount, 2);
assert.strictEqual(group.isSticky(input1), false);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), true);
assert.strictEqual(group.indexOf(input1), 2);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 0);
sequentialAllEditors = group.getEditors(EditorsOrder.SEQUENTIAL);
assert.strictEqual(sequentialAllEditors.length, 3);
sequentialEditorsExcludingSticky = group.getEditors(EditorsOrder.SEQUENTIAL, { excludeSticky: true });
assert.strictEqual(sequentialEditorsExcludingSticky.length, 1);
assert.ok(sequentialEditorsExcludingSticky.indexOf(input1) >= 0);
mruAllEditors = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mruAllEditors.length, 3);
mruEditorsExcludingSticky = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE, { excludeSticky: true });
assert.strictEqual(mruEditorsExcludingSticky.length, 1);
assert.ok(mruEditorsExcludingSticky.indexOf(input1) >= 0);
// Sticking remaining editor also works
group.stick(input1);
assert.strictEqual(group.stickyCount, 3);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), true);
assert.strictEqual(group.indexOf(input1), 2);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 0);
sequentialAllEditors = group.getEditors(EditorsOrder.SEQUENTIAL);
assert.strictEqual(sequentialAllEditors.length, 3);
sequentialEditorsExcludingSticky = group.getEditors(EditorsOrder.SEQUENTIAL, { excludeSticky: true });
assert.strictEqual(sequentialEditorsExcludingSticky.length, 0);
mruAllEditors = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE);
assert.strictEqual(mruAllEditors.length, 3);
mruEditorsExcludingSticky = group.getEditors(EditorsOrder.MOST_RECENTLY_ACTIVE, { excludeSticky: true });
assert.strictEqual(mruEditorsExcludingSticky.length, 0);
// Unsticking moves editor after sticky ones
group.unstick(input3);
assert.strictEqual(group.stickyCount, 2);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 1);
assert.strictEqual(group.indexOf(input2), 0);
assert.strictEqual(group.indexOf(input3), 2);
// Unsticking all works
group.unstick(input1);
group.unstick(input2);
assert.strictEqual(group.stickyCount, 0);
assert.strictEqual(group.isSticky(input1), false);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), false);
group.moveEditor(input1, 0);
group.moveEditor(input2, 1);
group.moveEditor(input3, 2);
// Opening a new editor always opens after sticky editors
group.stick(input1);
group.stick(input2);
group.setActive(input1);
const events = groupListener(group);
group.openEditor(input4, { pinned: true, active: true });
assert.strictEqual(group.indexOf(input4), 2);
group.closeEditor(input4);
assert.strictEqual(events.closed[0].sticky, false);
group.setActive(input2);
group.openEditor(input4, { pinned: true, active: true });
assert.strictEqual(group.indexOf(input4), 2);
group.closeEditor(input4);
assert.strictEqual(events.closed[1].sticky, false);
// Reset
assert.strictEqual(group.stickyCount, 2);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 0);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 2);
// Moving a sticky editor works
group.moveEditor(input1, 1); // still moved within sticky range
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 1);
assert.strictEqual(group.indexOf(input2), 0);
assert.strictEqual(group.indexOf(input3), 2);
group.moveEditor(input1, 0); // still moved within sticky range
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 0);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 2);
group.moveEditor(input1, 2); // moved out of sticky range
assert.strictEqual(group.isSticky(input1), false);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 2);
assert.strictEqual(group.indexOf(input2), 0);
assert.strictEqual(group.indexOf(input3), 1);
group.moveEditor(input2, 2); // moved out of sticky range
assert.strictEqual(group.isSticky(input1), false);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 1);
assert.strictEqual(group.indexOf(input2), 2);
assert.strictEqual(group.indexOf(input3), 0);
// Reset
group.moveEditor(input1, 0);
group.moveEditor(input2, 1);
group.moveEditor(input3, 2);
group.stick(input1);
group.unstick(input2);
assert.strictEqual(group.stickyCount, 1);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 0);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 2);
// Moving a unsticky editor in works
group.moveEditor(input3, 1); // still moved within unsticked range
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 0);
assert.strictEqual(group.indexOf(input2), 2);
assert.strictEqual(group.indexOf(input3), 1);
group.moveEditor(input3, 2); // still moved within unsticked range
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.indexOf(input1), 0);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 2);
group.moveEditor(input3, 0); // moved into sticky range
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), false);
assert.strictEqual(group.isSticky(input3), true);
assert.strictEqual(group.indexOf(input1), 1);
assert.strictEqual(group.indexOf(input2), 2);
assert.strictEqual(group.indexOf(input3), 0);
group.moveEditor(input2, 0); // moved into sticky range
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), true);
assert.strictEqual(group.indexOf(input1), 2);
assert.strictEqual(group.indexOf(input2), 0);
assert.strictEqual(group.indexOf(input3), 1);
// Closing a sticky editor updates state properly
group.stick(input1);
group.stick(input2);
group.unstick(input3);
assert.strictEqual(group.stickyCount, 2);
group.closeEditor(input1);
assert.strictEqual(events.closed[2].sticky, true);
assert.strictEqual(group.stickyCount, 1);
group.closeEditor(input2);
assert.strictEqual(events.closed[3].sticky, true);
assert.strictEqual(group.stickyCount, 0);
closeAllEditors(group);
assert.strictEqual(group.stickyCount, 0);
// Open sticky
group.openEditor(input1, { sticky: true });
assert.strictEqual(group.stickyCount, 1);
assert.strictEqual(group.isSticky(input1), true);
group.openEditor(input2, { pinned: true, active: true });
assert.strictEqual(group.stickyCount, 1);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), false);
group.openEditor(input2, { sticky: true });
assert.strictEqual(group.stickyCount, 2);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
group.openEditor(input3, { pinned: true, active: true });
group.openEditor(input4, { pinned: false, active: true, sticky: true });
assert.strictEqual(group.stickyCount, 3);
assert.strictEqual(group.isSticky(input1), true);
assert.strictEqual(group.isSticky(input2), true);
assert.strictEqual(group.isSticky(input3), false);
assert.strictEqual(group.isSticky(input4), true);
assert.strictEqual(group.isPinned(input4), true);
assert.strictEqual(group.indexOf(input1), 0);
assert.strictEqual(group.indexOf(input2), 1);
assert.strictEqual(group.indexOf(input3), 3);
assert.strictEqual(group.indexOf(input4), 2);
});
});<|fim▁end|> | // Close to the left
if (direction === CloseDirection.LEFT) { |
<|file_name|>socket.rs<|end_file_name|><|fim▁begin|>use chan::{self, Sender, Receiver};
use json;
use serde::ser::Serialize;
use std::io::{BufReader, Read, Write};
use std::net::Shutdown;
use std::{fs, thread};
use unix_socket::{UnixListener, UnixStream};
use datatype::{Command, DownloadFailed, Error, Event};
use gateway::Gateway;
use interpreter::CommandExec;
/// The `Socket` gateway is used for communication via Unix Domain Sockets.
pub struct Socket {
pub cmd_sock: String,
pub ev_sock: String,
}
impl Gateway for Socket {
fn start(&mut self, ctx: Sender<CommandExec>, erx: Receiver<Event>) {
info!("Listening for commands at socket {}", self.cmd_sock);
info!("Sending events to socket {}", self.ev_sock);
let _ = fs::remove_file(&self.cmd_sock);
let cmd_sock = UnixListener::bind(&self.cmd_sock).expect("command socket");
let ev_sock = self.ev_sock.clone();
thread::spawn(move || loop {
handle_event(&ev_sock, erx.recv().expect("socket events"))
});
for conn in cmd_sock.incoming() {
let ctx = ctx.clone();
conn.map(|stream| thread::spawn(move || handle_stream(stream, &ctx)))
.map(|_handle| ())
.unwrap_or_else(|err| error!("couldn't open socket connection: {}", err));
}
}
}
fn handle_stream(mut stream: UnixStream, ctx: &Sender<CommandExec>) {
info!("New socket connection.");
let resp = parse_command(&mut stream, ctx)
.map(|ev| json::to_vec(&ev).expect("couldn't encode Event"))
.unwrap_or_else(|err| format!("{}", err).into_bytes());
stream.write_all(&resp).unwrap_or_else(|err| error!("couldn't write to commands socket: {}", err));
stream.shutdown(Shutdown::Write).unwrap_or_else(|err| error!("couldn't close commands socket: {}", err));
}
fn parse_command(stream: &mut UnixStream, ctx: &Sender<CommandExec>) -> Result<Event, Error> {
let mut reader = BufReader::new(stream);
let mut input = String::new();
reader.read_to_string(&mut input)?;
debug!("socket input: {}", input);
let cmd = input.parse::<Command>()?;
let (etx, erx) = chan::async::<Event>();
ctx.send(CommandExec { cmd: cmd, etx: Some(etx) });
erx.recv().ok_or_else(|| Error::Socket("internal receiver error".to_string()))
}
fn handle_event(ev_sock: &str, event: Event) {
let reply = match event {
Event::DownloadComplete(dl) => {
EventWrapper::new("DownloadComplete", dl).to_json()
}
Event::DownloadFailed(id, reason) => {
EventWrapper::new("DownloadFailed", DownloadFailed { update_id: id, reason: reason }).to_json()
}
_ => return
};
let _ = UnixStream::connect(ev_sock)
.map_err(|err| debug!("skipping event socket broadcast: {}", err))
.map(|mut stream| {
stream.write_all(&reply).unwrap_or_else(|err| error!("couldn't write to events socket: {}", err));
stream.shutdown(Shutdown::Write).unwrap_or_else(|err| error!("couldn't close events socket: {}", err));
});
}
// FIXME(PRO-1322): create a proper JSON api
#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)]
struct EventWrapper<S: Serialize> {
pub version: String,
pub event: String,
pub data: S
}
impl<S: Serialize> EventWrapper<S> {
fn new(event: &str, data: S) -> Self {
EventWrapper { version: "0.1".into(), event: event.into(), data: data }
}
fn to_json(&self) -> Vec<u8> {
json::to_vec(self).expect("encode EventWrapper")
}
}
#[cfg(test)]<|fim▁hole|> use uuid::Uuid;
use datatype::{Command, DownloadComplete, Event};
const CMD_SOCK: &'static str = "/tmp/sota-commands.socket";
const EV_SOCK: &'static str = "/tmp/sota-events.socket";
#[test]
fn socket_commands_and_events() {
let (ctx, crx) = chan::sync::<CommandExec>(0);
let (etx, erx) = chan::sync::<Event>(0);
let mut socket = Socket { cmd_sock: CMD_SOCK.into(), ev_sock: EV_SOCK.into() };
thread::spawn(move || socket.start(ctx, erx));
let _ = fs::remove_file(EV_SOCK);
let serv = UnixListener::bind(EV_SOCK).expect("open events socket");
let send = DownloadComplete { update_id: Uuid::default(), update_image: "/foo".into(), signature: "sig".into() };
etx.send(Event::DownloadComplete(send.clone()));
let (stream, _) = serv.accept().expect("read events socket");
let recv: EventWrapper<DownloadComplete> = json::from_reader(&stream).expect("recv event");
assert_eq!(recv.version, "0.1".to_string());
assert_eq!(recv.event, "DownloadComplete".to_string());
assert_eq!(recv.data, send);
thread::spawn(move || {
let _ = etx; // move into this scope
loop {
match crx.recv() {
Some(CommandExec { cmd: Command::StartInstall(id), etx: Some(etx) }) => {
etx.send(Event::InstallingUpdate(id));
}
Some(_) => panic!("expected StartInstall"),
None => break
}
}
});
crossbeam::scope(|scope| {
for n in 0..10 {
scope.spawn(move || {
let id = format!("00000000-0000-0000-0000-00000000000{}", n).parse::<Uuid>().unwrap();
let mut stream = UnixStream::connect(CMD_SOCK).expect("open command socket");
let _ = stream.write_all(&format!("StartInstall {}", id).into_bytes()).expect("write to stream");
stream.shutdown(Shutdown::Write).expect("shut down writing");
assert_eq!(Event::InstallingUpdate(id), json::from_reader(&stream).expect("read event"));
});
}
});
}
}<|fim▁end|> | mod tests {
use super::*;
use crossbeam; |
<|file_name|>snrscript.py<|end_file_name|><|fim▁begin|>"""
Script for the paper
"""
import time
import numpy as np
import snr_of_images
import urllib
import zlib
from io import BytesIO
import cv2
import h5py
import os
import matplotlib.pyplot as plt
import json
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
#Following if it gives an InsecureRequestWarning
#requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
sbem_skip = False
atum_skip = False
fbem_skip = False
temca_skip = False
OCP_server = 'http://cloud.neurodata.io/ocp/ca/'
n_comp = 100
class OCP_data:
def __init__(self,key,num):
info = requests.get(OCP_server+key+'/info/',verify=False).json()['dataset']
res = info['resolutions'][0]
x_size,y_size,z_size = info['imagesize'][str(res)]
x_off,y_off,z_off = info['neariso_offset'][str(res)]
ocp_x_rand = np.random.randint(x_off,x_size-2000,num+400)
ocp_y_rand = np.random.randint(y_off,y_size-2000,num+400)
ocp_z_rand = np.random.randint(z_off,z_size,num+400)
count = 0
self.bad = []
self.bad_state = []
for i in range(num+400):
print(key,': ',count)
try_count = 0
while try_count < 10:
try_count2 = 0
try:
f = requests.get("http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/",timeout=60,verify=False).content
except Exception as e:
print(e)
print(key,', type 1: ',count, "http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/")
try_count2 +=1
if try_count2 == 5:
raise IOError('Maximum tries to download exceeded')
continue
try:
zdata = f#.read()
datastr = zlib.decompress ( zdata[:] )
datafobj = BytesIO ( datastr )
temp_data = np.load (datafobj)
except:
try_count +=1
print(key,', type 2: ',count, "http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/")
continue
if len(temp_data) == 0: #data failed to download correctly
try_count +=1
else:
break
if try_count == 10:
self.bad.append("http://cloud.neurodata.io/ocp/ca/"+key+"/npz/"+str(res)+"/"+str(ocp_x_rand[i])+","+str(ocp_x_rand[i]+2000)+"/"+str(ocp_y_rand[i])+","+str(ocp_y_rand[i]+2000)+"/"+str(ocp_z_rand[i])+","+str(ocp_z_rand[i]+1)+"/")
self.bad_state.append(np.random.get_state())
continue
if np.sum(temp_data[0]==0) > 0.5*len(temp_data[0].flatten()):
continue
if count == 0:
data = temp_data
else:
data = np.append(data, temp_data, axis=1)
count += 1
if count == num:
break
self.data = data[0]
np.random.seed(20170127)
#sudo mount -t davfs https://segem.rzg.mpg.de/webdav /image/sbem
sbem_snr = np.zeros(n_comp)
count = 0
while count < n_comp:
x = np.random.permutation(25)[0]+1 #Plus one to avoid some of the edges
y = np.random.permutation(35)[0]+1
z = np.random.permutation(42)[0]+1
if sbem_skip is True:
count+=1
if count == n_comp:
break
else:
continue
im = np.zeros([128*5,128*5])
for k in range(5):
for l in range(5):
#Construct large images by copying over to /tmp and reading the raw
os.system('cp /image/sbem/datasets/ek0563/raw/color/1/x'+str(x+k).zfill(4)+'/y'+str(y+l).zfill(4)+'/z'+str(z).zfill(4)+'/100527_k0563_mag1_x'+str(x+k).zfill(4)+'_y'+str(y+l).zfill(4)+'_z'+str(z).zfill(4)+'.raw /tmp/tmpim.raw')
im[l*128:(l+1)*128,k*128:(k+1)*128] = np.memmap('/tmp/tmpim.raw',dtype=np.uint8,shape=(128,128))
sbem_snr[count] = snr_of_images.SNR(im.astype(np.uint8),mode='im_array',conv=35,hess=200)
count += 1
if count == n_comp:
break
#sbem_snr.sort()
sbem_snr = sbem_snr[sbem_snr < np.inf]
<|fim▁hole|>for i in range(n_comp+10):
try:
atum_snr[count] = snr_of_images.SNR(atum.data[i,:,:],mode='im_array',conv=55,hess=800)
count += 1
if count == n_comp:
break
except:
continue
atum_snr = atum_snr[atum_snr < np.inf]
fibsem_snr = np.zeros(n_comp)
fib_random = np.random.permutation(range(1376,7651))[:300]
#fib_sem images have to be downloaded in advance
count = 0
for i,j in enumerate(fib_random):
if fbem_skip is True:
break
try:
fibsem_snr[count] = snr_of_images.SNR('fib_sem_images/grayscale-xy-'+str(j)+'.png',conv=35,hess=3200)
im = cv2.imread('fib_sem_images/grayscale-xy-'+str(j)+'.png',cv2.IMREAD_GRAYSCALE)
count += 1
if count == n_comp:
break
except:
continue
fibsem_snr = fibsem_snr[fibsem_snr < np.inf]
#aplus, bplus and cplus are the cremi.org files
crop_im = h5py.File('aplus.h5','r')
temca = crop_im['volumes']['raw']
crop_im = h5py.File('bplus.h5','r')
temca = np.append(temca,crop_im['volumes']['raw'],axis=0)
crop_im = h5py.File('cplus.h5','r')
temca = np.append(temca,crop_im['volumes']['raw'],axis=0)
temca_random = np.random.permutation(len(temca))[:150]
temcas = np.zeros(n_comp)
count = 0
for i,j in enumerate(temca_random):
if temca_skip is True:
break
try:
temcas[count] = snr_of_images.SNR(temca[j,:,:],mode='im_array',conv=55,hess=200)
cv2.imwrite('/image/Used/temca_'+str(count).zfill(3)+'.tif',temca[j,:,:])
count +=1
if count == n_comp:
break
except:
continue
temcas = temcas[temcas < np.inf]
bock11 = OCP_data('bock11',n_comp+10)
bock_snr = np.zeros(n_comp)
count = 0
for i in range(n_comp+10):
try:
bock_snr[count] = snr_of_images.SNR(bock11.data[i,:,:],mode='im_array',conv=55,hess=800)
cv2.imwrite('/image/Used/bock11_'+str(count).zfill(3)+'.tif',bock11.data[i,:,:])
count += 1
if count == n_comp:
break
except:
continue
bock_snr = bock_snr[bock_snr < np.inf]
acardona11 = OCP_data('acardona_0111_8',n_comp+10)
acardona_snr = np.zeros(n_comp)
count = 0
for i in range(n_comp+10):
try:
acardona_snr[count] = snr_of_images.SNR(acardona11.data[i,:,:],mode='im_array',conv=55,hess=800)
cv2.imwrite('/image/Used/acardona11_'+str(count).zfill(3)+'.tif',acardona11.data[i,:,:])
count += 1
if count == n_comp:
break
except:
continue
acardona_snr = acardona_snr[acardona_snr < np.inf]
takemura13 = OCP_data('takemura13',n_comp+10)
takemura_snr = np.zeros(n_comp)
count = 0
for i in range(n_comp+10):
try:
takemura_snr[count] = snr_of_images.SNR(takemura13.data[i,:,:],mode='im_array',conv=55,hess=800)
cv2.imwrite('/image/Used/takemura13_'+str(count).zfill(3)+'.tif',takemura13.data[i,:,:])
count += 1
if count == n_comp:
break
except:
continue
takemura_snr = takemura_snr[takemura_snr < np.inf]
times = np.array([35,1.2e-2,0.14,0.59])
means = np.array([np.nanmean(temcas),np.nanmean(fibsem_snr),np.nanmean(atum_snr),np.nanmean(sbem_snr),np.nanmean(bock_snr),np.nanmean(acardona_snr),np.nanmean(takemura_snr)])
yerr = np.array([np.nanstd(temcas),np.nanstd(fibsem_snr),np.nanstd(atum_snr),np.nanstd(sbem_snr),np.nanstd(bock_snr),np.nanstd(acardona_snr),np.nanstd(takemura_snr)])
np.savetxt('means_feature.txt',means)
np.savetxt('std_feature.txt',yerr)
b = np.argmin(np.abs(atum_snr-np.mean(atum_snr)))
#np.savetxt('atum_loc.txt',b)
means2 = np.array([ 15.7, 11.1, 9.9, 5.2])
yerr2 = np.array([ 2. , 2.6, 1.5, 0.8])
f = plt.figure(figsize=(10,4))
ax0 = f.add_subplot(121)
symbol = ['o','x','s','d','>','8','h','+']
colors = ['r','g','b','k','y','c','m','brown']
[ax0.loglog(times[i],means[i],'.',mfc=colors[i],marker=symbol[i],mec=colors[i]) for i in range(len(means))]
[ax0.errorbar(times[i],means[i],yerr=yerr[i],lw=1,fmt='none',ecolor=colors[i]) for i in range(len(means))]
ax0.set_title(r'${\rm a)~Feature~based~S/N}$')
ax0.set_xlim(1e-2,50)
ax0.set_ylim(1,30)
ax0.xlabel(r'${\rm Acquisition~Rate}~[\mu{}m^3~s^{-1}]$')
ax0.ylabel(r'${\rm S/N}$')
ax1 = f.add_subplot(122)
symbol = ['o','x','s','d']
colors = ['r','g','b','k']
[ax1.loglog(times[i],means2[i],'.',mfc=colors[i],marker=symbol[i],mec=colors[i]) for i in range(4)]
[ax1.errorbar(times[i],means2[i],yerr=yerr2[i],lw=1,fmt='none',ecolor=colors[i]) for i in range(len(means))]
ax1.set_title(r'${\rm b)~Cell-edge~S/N}$')
ax1.set_xlim(1e-2,50)
ax1.set_ylim(1,30)
ax1.xlabel(r'${\rm Acquisition~Rate}~[\mu{}m^3~s^{-1}]$')
ax1.ylabel(r'${\rm S/N}$')<|fim▁end|> | atum_snr = np.zeros(n_comp)
atum= OCP_data('kasthuri11',n_comp+10)
count = 0 |
<|file_name|>numerov.py<|end_file_name|><|fim▁begin|>import numpy
def numerov_integration(domain, a, f0, f1):
<|fim▁hole|> f = numpy.zeros(len(domain), dtype=numpy.complex)
f[0] = f0
f[1] = f1
step = domain.step
for i in range(2, len(domain)):
phi_i1 = f[i-1] * (2.0 + 5.0 * step**2 * a[i-1] / 6.0)
phi_i2 = f[i-2] * (1.0 - step**2 * a[i-2] / 12.0)
f[i] = (phi_i1 - phi_i2) / (1.0 - step**2 * a[i] / 12.0)
return f<|fim▁end|> | |
<|file_name|>celery.py<|end_file_name|><|fim▁begin|>import os
from celery import Celery<|fim▁hole|>from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'songaday_searcher.settings')
app = Celery('songaday_searcher')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)<|fim▁end|> | |
<|file_name|>Condition.ts<|end_file_name|><|fim▁begin|>import { flatArgs } from './Query';
import type { Entity } from '../binding';
import type { Filter } from './Filter';
import { JsonMap } from '../util';
import type { GeoPoint } from '../GeoPoint';
/**
* The Condition interface defines all existing query filters
*/
export interface Condition<T extends Entity> {
/**
* An object that contains filter rules which will be merged with the current filters of this query
*
* @param conditions - Additional filters for this query
* @return The resulting Query
*/
where(conditions: JsonMap): Filter<T>;
/**
* Adds a equal filter to the field. All other other filters on the field will be discarded
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*/
equal(field: string, value: any): Filter<T>
/**
* Adds a not equal filter to the field
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/ne/
*/
notEqual(field: string, value: any): Filter<T>
/**
* Adds a greater than filter to the field
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/gt/
*/
greaterThan(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a greater than or equal to filter to the field
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/gte/
*/
greaterThanOrEqualTo(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a less than filter to the field
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/lt/
*/
lessThan(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a less than or equal to filter to the field
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/lte/
*/
lessThanOrEqualTo(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a between filter to the field. This is a shorthand for an less than and greater than filter.
* @param field The field to filter
* @param greaterValue The field value must be greater than this value
* @param lessValue The field value must be less than this value
* @return The resulting Query
*/
between(
field: string,
greaterValue: number | string | Date | Entity,
lessValue: number | string | Date | Entity
): Filter<T>
/**
* Adds a “in” filter to the field
*
* The field value must be equal to one of the given values.
*
* @param field The field to filter
* @param args The field value or values to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/in/
*/
in(field: string, ...args: any[]): Filter<T>
/**
* Adds an “in” filter to the field
*
* The field value must be equal to one of the given values.
*
* @param field The field to filter
* @param args The field value or values to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/in/
*/
in(field: string, ...args: any[]): Filter<T>
/**
* Adds a “not in” filter to the field
*
* The field value must not be equal to any of the given values.
*
* @param field The field to filter
* @param args The field value or values to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/nin/
*/
notIn(field: string, ...args: any[]): Filter<T>
/**
* Adds a “is null” filter to the field
*
* The field value must be null.
*
* @param field The field to filter
* @return The resulting Query
*/
isNull(field: string): Filter<T>
/**
* Adds a “is not null” filter to the field
*
* The field value must not be null.
*
* @param field The field to filter
* @return The resulting Query
*/
isNotNull(field: string): Filter<T>
/**
* Adds a contains all filter to the collection field
*
* The collection must contain all the given values.
*
* @param field The field to filter
* @param args The field value or values to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/all/
*/<|fim▁hole|> *
* The field value divided by divisor must be equal to the remainder.
*
* @param field The field to filter
* @param divisor The divisor of the modulo filter
* @param remainder The remainder of the modulo filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/mod/
*/
mod(field: string, divisor: number, remainder: number): Filter<T>
/**
* Adds a regular expression filter to the field
*
* The field value must matches the regular expression.
* <p>Note: Only anchored expressions (Expressions that starts with an ^) and the multiline flag are supported.</p>
*
* @param field The field to filter
* @param regExp The regular expression of the filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/regex/
*/
matches(field: string, regExp: string | RegExp): Filter<T>
/**
* Adds a size filter to the collection field
*
* The collection must have exactly size members.
*
* @param field The field to filter
* @param size The collections size to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/size/
*/
size(field: string, size: number): Filter<T>
/**
* Adds a geopoint based near filter to the GeoPoint field
*
* The GeoPoint must be within the maximum distance
* to the given GeoPoint. Returns from nearest to farthest.
*
* @param field The field to filter
* @param geoPoint The GeoPoint to filter
* @param maxDistance Tha maximum distance to filter in meters
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/nearSphere/
*/
near(field: string, geoPoint: GeoPoint, maxDistance: number): Filter<T>
/**
* Adds a GeoPoint based polygon filter to the GeoPoint field
*
* The GeoPoint must be contained within the given polygon.
*
* @param field The field to filter
* @param geoPoints The geoPoints that describes the polygon of the filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/geoWithin/
*/
withinPolygon(field: string, ...geoPoints: GeoPoint[] | GeoPoint[][]): Filter<T>
/**
* Adds a equal filter to the field
*
* All other other filters on the field will be discarded.
*
* @method
* @param field The field to filter
* @param value The value used to filter
*/
eq(field: string, value: any): Filter<T>
/**
* Adds a not equal filter to the field
*
* @method
* @param field The field to filter
* @param value The value used to filter
*
* @see http://docs.mongodb.org/manual/reference/operator/query/ne/
*/
ne(field: string, value: any): Filter<T>
/**
* Adds a less than filter to the field
*
* Shorthand for {@link Condition#lessThan}.
*
* @method
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/lt/
*/
lt(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a less than or equal to filter to the field
*
* Shorthand for {@link Condition#lessThanOrEqualTo}.
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/lte/
*/
le(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a greater than filter to the field
*
* Shorthand for {@link Condition#greaterThan}.
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/gt/
*/
gt(field: string, value: number | string | Date | Entity): Filter<T>
/**
* Adds a greater than or equal to filter to the field
*
* Shorthand for {@link Condition#greaterThanOrEqualTo}.
*
* @param field The field to filter
* @param value The value used to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/gte/
*/
ge(field: string, value: number | string | Date | Entity): Filter<T>
/**
* The collection must contains one of the given values
*
* Adds a contains any filter to the collection field.
* Alias for {@link Condition#in}.
*
* @param field The field to filter
* @param args The field value or values to filter
* @return The resulting Query
*
* @see http://docs.mongodb.org/manual/reference/operator/query/in/
*/
containsAny(field: string, ...args: any[]): Filter<T>
/**
* Adds a filter to this query
*
* @param field
* @param filter
* @param value
* @return The resulting Query
*/
addFilter(field: string | null, filter: string | null, value: any): Filter<T>
}
// eslint-disable-next-line @typescript-eslint/no-redeclare
export const Condition: Partial<Condition<any>> = {
where(this: Condition<any>, conditions) {
return this.addFilter(null, null, conditions);
},
equal(this: Condition<any>, field, value) {
return this.addFilter(field, null, value);
},
notEqual(this: Condition<any>, field, value) {
return this.addFilter(field, '$ne', value);
},
greaterThan(this: Condition<any>, field, value) {
return this.addFilter(field, '$gt', value);
},
greaterThanOrEqualTo(this: Condition<any>, field, value) {
return this.addFilter(field, '$gte', value);
},
lessThan(this: Condition<any>, field, value) {
return this.addFilter(field, '$lt', value);
},
lessThanOrEqualTo(this: Condition<any>, field, value) {
return this.addFilter(field, '$lte', value);
},
between(this: Condition<any>, field, greaterValue, lessValue) {
return this
.addFilter(field, '$gt', greaterValue)
.addFilter(field, '$lt', lessValue);
},
in(this: Condition<any>, field: string, ...args: any[]) {
return this.addFilter(field, '$in', flatArgs(args));
},
notIn(this: Condition<any>, field, ...args: any[]) {
return this.addFilter(field, '$nin', flatArgs(args));
},
isNull(this: Condition<any>, field) {
return this.equal(field, null);
},
isNotNull(this: Condition<any>, field) {
return this.addFilter(field, '$exists', true)
.addFilter(field, '$ne', null);
},
containsAll(this: Condition<any>, field, ...args: any[]) {
return this.addFilter(field, '$all', flatArgs(args));
},
mod(this: Condition<any>, field, divisor, remainder) {
return this.addFilter(field, '$mod', [divisor, remainder]);
},
matches(this: Condition<any>, field, regExp) {
const reg = regExp instanceof RegExp ? regExp : new RegExp(regExp);
if (reg.ignoreCase) {
throw new Error('RegExp.ignoreCase flag is not supported.');
}
if (reg.global) {
throw new Error('RegExp.global flag is not supported.');
}
if (reg.source.indexOf('^') !== 0) {
throw new Error('regExp must be an anchored expression, i.e. it must be started with a ^.');
}
const result = this.addFilter(field, '$regex', reg.source);
if (reg.multiline) {
result.addFilter(field, '$options', 'm');
}
return result;
},
size(this: Condition<any>, field, size) {
return this.addFilter(field, '$size', size);
},
near(this: Condition<any>, field, geoPoint, maxDistance) {
return this.addFilter(field, '$nearSphere', {
$geometry: {
type: 'Point',
coordinates: [geoPoint.longitude, geoPoint.latitude],
},
$maxDistance: maxDistance,
});
},
withinPolygon(this: Condition<any>, field, ...args: any[]) {
const geoPoints = flatArgs(args);
return this.addFilter(field, '$geoWithin', {
$geometry: {
type: 'Polygon',
coordinates: [geoPoints.map((geoPoint) => [geoPoint.longitude, geoPoint.latitude])],
},
});
},
};
// aliases
Object.assign(Condition, {
eq: Condition.equal,
ne: Condition.notEqual,
lt: Condition.lessThan,
le: Condition.lessThanOrEqualTo,
gt: Condition.greaterThan,
ge: Condition.greaterThanOrEqualTo,
containsAny: Condition.in,
});<|fim▁end|> | containsAll(field: string, ...args: any[]): Filter<T>
/**
* Adds a modulo filter to the field |
<|file_name|>version.ts<|end_file_name|><|fim▁begin|>import test from 'ava';
import * as proxyquire from 'proxyquire';
import * as sinon from 'sinon';
import { CLIOptions } from '../../../src/lib/types';
const actions = ({ version: true } as CLIOptions);
const logger = {
error() { },
log() { }
};
proxyquire('../../../src/lib/cli/version', { '../utils/logging': logger });
import { printVersion } from '../../../src/lib/cli/version';
test.beforeEach((t) => {<|fim▁hole|>
t.context.logger = logger;
});
test.afterEach.always((t) => {
t.context.logger.log.restore();
t.context.logger.error.restore();
});
test.serial('If version option is defined, it should print the current version and return true', async (t) => {
const result = await printVersion(actions);
t.true(result);
t.true(t.context.logger.log.calledOnce);
t.true(t.context.logger.log.args[0][0].startsWith('v'));
});
test.serial('If version is not an option, it should return false', async (t) => {
const result = await printVersion(({}) as CLIOptions);
t.false(result);
});<|fim▁end|> | sinon.spy(logger, 'log');
sinon.spy(logger, 'error'); |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djangorest.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:<|fim▁hole|> except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)<|fim▁end|> | import django |
<|file_name|>RAD4SNPs_Main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
########################################################################## RAD4SNPs:##############################################################################
# A set of Python scripts to select and validate independent SNPs markers from a list of read files #
##################################################################################################################################################################
# MAIN PROGRAM
# Authors: G.LASSALLE ([email protected]) & C.DELORD ([email protected])
# Last update: AUGUST 2017
#################### PRE-CONDITIONS
#- [-i] Working directory where to store results of the pipeline for the focal species X
#- [-d] exact name of MySQL database where denovo_map.pl Stacks data are available for the focal species X
#- [-i1] single-end reads (reads 1) for focal species X duplicate 1
#- [-i2] single-end reads (reads 1) for focal species X duplicate 2
#- [-i3] paired-end reads (reads 2) for focal species X duplicate 1
#- [-i4] paired-end reads (reads 2) for focal species X duplicate 2
#- BWA and SAMtools available
#- Connexion to the Stacks MySQL database available: databases of Stacks 'denovo_map output' for each species.
###############################################################################
import argparse
import os
import sys
import MySQLdb
###############################################################################
parser = argparse.ArgumentParser()
parser.add_argument('-i', action='store', dest='InputDir', help='Working Directory')
parser.add_argument('-d', action='store', dest='database', help='Stacks database')
parser.add_argument('-c', action='store', dest='CodeSp', help='ID of the species')
parser.add_argument('-i1', action='store', dest='R11', help='First R1 file')
parser.add_argument('-i2', action='store', dest='R12', help='Second R1 file')
parser.add_argument('-i3', action='store', dest='R21', help='First R2 file')
parser.add_argument('-i4', action='store', dest='R22', help='Second R2 file')
parser.add_argument('--version', action='version', version='%(prog)s 0.1')
results = parser.parse_args()
print 'input directory =', results.InputDir
##############################################################################
# Arguments testing
##############################################################################
if results.InputDir:
if os.path.isdir(results.InputDir):
print "Working directory is valid."
else :
print "Caution: working directory is invalid, please ckeck [-i]."
sys.exit()
else :
print "Please insert path for working directory [-i]. End of program."
sys.exit()
##############################################################################
if results.database:
db = MySQLdb.connect(host="", # your host, usually localhost
user="", # your username
passwd="", # your password
db=results.database) # name of the database
cur1= db.cursor() # connexion
print "Currently working on MySQL database: "+str(results.database)
else:
print "Incorrect ID for database: database not found, please check [-d]"
sys.exit()
###############################################################################
#
if results.R11:
if os.path.isfile(results.R11):
print "First file of single-end reads: found."
else :
print "Path to single-end reads data is not a file: please check out [-i1]."
sys.exit()
else :
print "Please insert path to single-end read files [-i1]. End of program."
sys.exit()
#
if results.R12:
if os.path.isfile(results.R12):
print "Second file of single-end reads: found."
else :
print "Path to single-end reads data is not a file: please check out [-i2]."
sys.exit()
else :
print "Please insert path to single-end read files [-2]. End of program."
sys.exit()
#
if results.R21:
if os.path.isfile(results.R21):
print "First file of paired-end reads: found."
else :
print "Path to paired-end reads data is not a file: please check out [-i3]."
sys.exit()
else :
print "Please insert path to paired-end read files [-i3]. End of program."
sys.exit()
#
if results.R22:
if os.path.isfile(results.R22):
print "Second file of paired-end reads: found."
else :
print "Path to paired-end reads data is not a file: please check out [-i4]."
sys.exit()
else :
print "Please insert path to paired-end read files [-i4]. End of program."
sys.exit()
###############################################################################
if results.CodeSp:
CodeEspece=str(results.CodeSp)
if CodeEspece[:1]!="_":
CodeEspece=str(results.CodeSp)+str("_")
else:
CodeEspece="std_"
###############################################################################
WorkDir=os.path.abspath(results.InputDir) # Current working directory
FastaCatalog=str(WorkDir)+"/"+str(results.CodeSp)+"Catalog.fasta" # Formatting name of candidates fasta file -output of MySQL filtering
###############################################################################
# Main program
###############################################################################
if os.path.isfile("/usr/bin/bwa"):
print "BWA program is found."
else :
print "Cannot find BWA: please check out pipeline requirements."
sys.exit()
###samtools
if os.path.isfile("/usr/bin/samtools"):
print "SAMtools program is found."
else :
print "Cannot find SAMtools: please check out pipeline requirements."
sys.exit()
#####################################################
# Working directory writable
filepath = results.InputDir+'/file.txt'
try:
filehandle = open( filepath, 'w' )
except IOError:
sys.exit( 'Working directory is not accessible' + filepath )
###############################################################################
# Pipeline commands:
###############################################################################
#################################### FIRST FILTERING ##########################
print os.getcwd()
commandeExtractFasta="./RAD4SNPs_SQL2Fasta.py -o "+str(FastaCatalog)+" -d "+str(results.database)+" -c "+str(CodeEspece)
print "Extraction du fichier fasta"
print commandeExtractFasta
os.system(commandeExtractFasta)
############################## Fusion of single-end reads #####################
if results.R11:
if results.R12:
commandFusionR1="cat "+str(results.R11)+" "+str(results.R12)+" > "+str(WorkDir)+"/allR1.fq.gz"
else :
commandFusionR1="cp "+str(results.R11)+" "+str(WorkDir)+"/allR1.fq.gz"
#############################fin de fusion
############################## Fusion of paired-end reads #####################
if results.R21:
if results.R22:
commandFusionR2="cat "+str(results.R21)+" "+str(results.R22)+" > "+str(WorkDir)+"/allR2.fq.gz"
else :
commandFusionR2="cp "+str(results.R21)+" "+str(WorkDir)+"/allR2.fq.gz"
#################################### SECOND FILTERING (1) #####################
command1="bwa index "+str(FastaCatalog) # Indexing
command2="bwa mem -a -M "+str(FastaCatalog)+" "+str(WorkDir)+"/allR1.fq.gz > "+str(WorkDir)+"/PremierAlign.sam" # SE reads alignment
command3="samtools view -Sb "+str(WorkDir)+"/PremierAlign.sam | samtools sort - "+str(WorkDir)+"/PremierAlign1Sorted" # Conversion to bam file
command4="samtools view -F4 "+str(WorkDir)+"/PremierAlign1Sorted.bam > "+str(WorkDir)+"/PremierAlign1Sorted-F4.sam" # Elimination of unmapped SE reads
print "SE reads merging: "+str(commandFusionR1)
os.system(commandFusionR1)
print "PE reads merging: "+str(commandFusionR2)
os.system(commandFusionR2)
print "BWA indexing: "+str(command1)<|fim▁hole|>os.system(command1)
print "Alignment: "+str(command2)
os.system(command2)
print "Conversion to bam file: "+str(command3)
os.system(command3)
print "Elimination of unmapped SE reads: "+str(command4)
os.system(command4)
print " ************************************************************************"
print " Second filtering (1) with default parameters "
print " ************************************************************************"
print os.getcwd()
commande5="./RAD4SNPs_SamFilter.py -i "+str(WorkDir)+"/PremierAlign1Sorted-F4.sam"
os.system(commande5)
Candidatfasta1=str(WorkDir)+"/PremierAlign1Sorted-F4R1Filtered.fa" # Obtention of incomplete SE-validated fasta file
if os.path.isfile(Candidatfasta1):
print "SE-validated fasta file about to be completed. Re-aligning to complete second filtering."
else :
sys.exit( '****ERROR**** A problem occurred. Please check out alignment outputs.')
#################################### SECOND FILTERING (2) #####################
command21="bwa index "+str(Candidatfasta1)
command22="bwa mem -a -M "+str(Candidatfasta1)+" "+str(WorkDir)+"/allR1.fq.gz > "+str(WorkDir)+"/SecondAlign.sam"
command23="samtools view -Sb "+str(WorkDir)+"/SecondAlign.sam | samtools sort - "+str(WorkDir)+"/SecondAlign1Sorted"
command25="samtools index "+str(WorkDir)+"/SecondAlign1Sorted.bam"
command25bis="samtools faidx "+str(Candidatfasta1)
command26="samtools mpileup -d 1000 -O --ff 4 -f "+str(Candidatfasta1) +" "+ str(WorkDir)+"/SecondAlign1Sorted.bam"+" > "+str(WorkDir)+"/CandidatsR1.pileup"
print "BWA indexing: "+str(command21)
os.system(command21)
print "Alignment: "+str(command22)
os.system(command22)
print "Conversion to bam file: "+str(command23)
os.system(command23)
print "Indexing of bam file: "+str(command25)
os.system(command25)
print "Indexing for pileup file: "+str(command25bis)
os.system(command25bis)
print "Construction of SE pileup file: "+str(command26)
os.system(command26)
print " ************************************************************************"
print " Second filtering (2) with default parameters "
print " ************************************************************************"
print os.getcwd()
command27="./RAD4SNPs_PileupFilter.py -i "+str(WorkDir)+"/CandidatsR1.pileup"
print "End of second filtering: elimination of flanking variants: "+str(command27)
os.system(command27)
command28="./RAD4SNPs_FinalSQLExtract.py -i"+str(WorkDir)+"/CandidatsR1NoMulti.txt -d "+str(results.database)+" -c "+str(CodeEspece)+" > "+str(WorkDir)+"/CandidatFin.fasta"
print "Complete SE-validated fasta file: "+str(command28)
os.system(command28)
command28bis="sed -i '1d' "+str(WorkDir)+"/CandidatFin.fasta"
os.system(command28bis)
#################################### THIRD FILTERING ##########################
CandidatFin=str(WorkDir)+"/CandidatFin.fasta"
if os.path.isfile(CandidatFin):
print "SE-validated fasta file is completed. Re-aligning to perform third filtering."
else :
sys.exit( '****ERROR**** A problem occurred. Please check out alignment and/or pileup outputs.')
command29="bwa index "+str(CandidatFin)
command30="bwa mem -a -M "+str(CandidatFin)+" "+str(WorkDir)+"/allR2.fq.gz > "+str(WorkDir)+"/ThirdAlign.sam"
command31="samtools view -Sb "+str(WorkDir)+"/ThirdAlign.sam | samtools sort - "+str(WorkDir)+"/ThirdAlign2Sorted"
command32="samtools index "+str(WorkDir)+"/ThirdAlign2Sorted.bam"
command32bis="samtools faidx "+str(CandidatFin)
command33="samtools mpileup -d 1000 -O --ff 4 -f "+str(CandidatFin)+" "+str(WorkDir)+"/ThirdAlign2Sorted.bam"+" > "+str(WorkDir)+"/Candidats3.pileup"
print "BWA indexing: "+str(command29)
os.system(command29)
print "Alignment: "+str(command30)
os.system(command30)
print "Conversion to bam file: "+str(command31)
os.system(command31)
print "Indexing of bam file: "+str(command32)
os.system(command32)
print "Indexing for pileup file: "+str(command32bis)
os.system(command32bis)
print "Construction of PE pileup file: "+str(command33)
os.system(command33)
print " ************************************************************************"
print " Third filtering with default parameters "
print " ************************************************************************"
print os.getcwd()
command34="./RAD4SNPs_PileupFilter.py -i "+str(WorkDir)+"/Candidats3.pileup"
print "End of third filtering: elimination of flanking variants: "+str(command34)
os.system(command34)
command35="./RAD4SNPs_FinalSQLExtract.py -i"+str(WorkDir)+"/CandidatsR2NoMulti.txt -d "+str(results.database)+" -c "+str(CodeEspece)+" > "+str(WorkDir)+"/SNPs_out.fasta"
print "Complete PE-validated fasta file: "+str(command35)
os.system(command35)
# End.<|fim▁end|> | |
<|file_name|>paging.rs<|end_file_name|><|fim▁begin|>//! Description of the data-structures for IA-32e paging mode.
use core::fmt;
/// Represent a virtual (linear) memory address
#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub struct VAddr(usize);
impl VAddr {
/// Convert to `usize`
pub const fn as_usize(&self) -> usize {
self.0
}
/// Convert from `usize`
pub const fn from_usize(v: usize) -> Self {
VAddr(v)
}
}
impl fmt::Binary for VAddr {<|fim▁hole|> fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Display for VAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::LowerHex for VAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Octal for VAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::UpperHex for VAddr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}<|fim▁end|> | |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import os<|fim▁hole|>
from graftm.sequence_search_results import SequenceSearchResult
from graftm.graftm_output_paths import GraftMFiles
from graftm.search_table import SearchTableWriter
from graftm.sequence_searcher import SequenceSearcher
from graftm.hmmsearcher import NoInputSequencesException
from graftm.housekeeping import HouseKeeping
from graftm.summarise import Stats_And_Summary
from graftm.pplacer import Pplacer
from graftm.create import Create
from graftm.update import Update
from graftm.unpack_sequences import UnpackRawReads
from graftm.graftm_package import GraftMPackage
from graftm.expand_searcher import ExpandSearcher
from graftm.diamond import Diamond
from graftm.getaxnseq import Getaxnseq
from graftm.sequence_io import SequenceIO
from graftm.timeit import Timer
from graftm.clusterer import Clusterer
from graftm.decorator import Decorator
from graftm.external_program_suite import ExternalProgramSuite
from graftm.archive import Archive
from graftm.decoy_filter import DecoyFilter
from biom.util import biom_open
T=Timer()
class UnrecognisedSuffixError(Exception):
pass
class Run:
PIPELINE_AA = "P"
PIPELINE_NT = "D"
_MIN_VERBOSITY_FOR_ART = 3 # with 2 then, only errors are printed
PPLACER_TAXONOMIC_ASSIGNMENT = 'pplacer'
DIAMOND_TAXONOMIC_ASSIGNMENT = 'diamond'
MIN_ALIGNED_FILTER_FOR_NUCLEOTIDE_PACKAGES = 95
MIN_ALIGNED_FILTER_FOR_AMINO_ACID_PACKAGES = 30
DEFAULT_MAX_SAMPLES_FOR_KRONA = 100
NO_ORFS_EXITSTATUS = 128
def __init__(self, args):
self.args = args
self.setattributes(self.args)
def setattributes(self, args):
self.hk = HouseKeeping()
self.s = Stats_And_Summary()
if args.subparser_name == 'graft':
commands = ExternalProgramSuite(['orfm', 'nhmmer', 'hmmsearch',
'mfqe', 'pplacer',
'ktImportText', 'diamond'])
self.hk.set_attributes(self.args)
self.hk.set_euk_hmm(self.args)
if args.euk_check:self.args.search_hmm_files.append(self.args.euk_hmm_file)
self.ss = SequenceSearcher(self.args.search_hmm_files,
(None if self.args.search_only else self.args.aln_hmm_file))
self.sequence_pair_list = self.hk.parameter_checks(args)
if hasattr(args, 'reference_package'):
self.p = Pplacer(self.args.reference_package)
elif self.args.subparser_name == "create":
commands = ExternalProgramSuite(['taxit', 'FastTreeMP',
'hmmalign', 'mafft'])
self.create = Create(commands)
def summarise(self, base_list, trusted_placements, reverse_pipe, times,
hit_read_count_list, max_samples_for_krona):
'''
summarise - write summary information to file, including otu table, biom
file, krona plot, and timing information
Parameters
----------
base_list : array
list of each of the files processed by graftm, with the path and
and suffixed removed
trusted_placements : dict
dictionary of placements with entry as the key, a taxonomy string
as the value
reverse_pipe : bool
True = run reverse pipe, False = run normal pipeline
times : array
list of the recorded times for each step in the pipeline in the
format: [search_step_time, alignment_step_time, placement_step_time]
hit_read_count_list : array
list containing sublists, one for each file run through the GraftM
pipeline, each two entries, the first being the number of putative
eukaryotic reads (when searching 16S), the second being the number
of hits aligned and placed in the tree.
max_samples_for_krona: int
If the number of files processed is greater than this number, then
do not generate a krona diagram.
Returns
-------
'''
# Summary steps.
placements_list = []
for base in base_list:
# First assign the hash that contains all of the trusted placements
# to a variable to it can be passed to otu_builder, to be written
# to a file. :)
placements = trusted_placements[base]
self.s.readTax(placements, GraftMFiles(base, self.args.output_directory, False).read_tax_output_path(base))
placements_list.append(placements)
#Generate coverage table
#logging.info('Building coverage table for %s' % base)
#self.s.coverage_of_hmm(self.args.aln_hmm_file,
# self.gmf.summary_table_output_path(base),
# self.gmf.coverage_table_path(base),
# summary_dict[base]['read_length'])
logging.info('Writing summary table')
with open(self.gmf.combined_summary_table_output_path(), 'w') as f:
self.s.write_tabular_otu_table(base_list, placements_list, f)
logging.info('Writing biom file')
with biom_open(self.gmf.combined_biom_output_path(), 'w') as f:
biom_successful = self.s.write_biom(base_list, placements_list, f)
if not biom_successful:
os.remove(self.gmf.combined_biom_output_path())
logging.info('Building summary krona plot')
if len(base_list) > max_samples_for_krona:
logging.warn("Skipping creation of Krona diagram since there are too many input files. The maximum can be overridden using --max_samples_for_krona")
else:
self.s.write_krona_plot(base_list, placements_list, self.gmf.krona_output_path())
# Basic statistics
placed_reads=[len(trusted_placements[base]) for base in base_list]
self.s.build_basic_statistics(times, hit_read_count_list, placed_reads, \
base_list, self.gmf.basic_stats_path())
# Delete unnecessary files
logging.info('Cleaning up')
for base in base_list:
directions = ['forward', 'reverse']
if reverse_pipe:
for i in range(0,2):
self.gmf = GraftMFiles(base, self.args.output_directory, directions[i])
self.hk.delete([self.gmf.for_aln_path(base),
self.gmf.rev_aln_path(base),
self.gmf.conv_output_rev_path(base),
self.gmf.conv_output_for_path(base),
self.gmf.euk_free_path(base),
self.gmf.euk_contam_path(base),
self.gmf.readnames_output_path(base),
self.gmf.sto_output_path(base),
self.gmf.orf_titles_output_path(base),
self.gmf.orf_output_path(base),
self.gmf.output_for_path(base),
self.gmf.output_rev_path(base)])
else:
self.gmf = GraftMFiles(base, self.args.output_directory, False)
self.hk.delete([self.gmf.for_aln_path(base),
self.gmf.rev_aln_path(base),
self.gmf.conv_output_rev_path(base),
self.gmf.conv_output_for_path(base),
self.gmf.euk_free_path(base),
self.gmf.euk_contam_path(base),
self.gmf.readnames_output_path(base),
self.gmf.sto_output_path(base),
self.gmf.orf_titles_output_path(base),
self.gmf.orf_output_path(base),
self.gmf.output_for_path(base),
self.gmf.output_rev_path(base)])
logging.info('Done, thanks for using graftM!\n')
def graft(self):
# The Graft pipeline:
# Searches for reads using hmmer, and places them in phylogenetic
# trees to derive a community structure.
if self.args.graftm_package:
gpkg = GraftMPackage.acquire(self.args.graftm_package)
else:
gpkg = None
REVERSE_PIPE = (True if self.args.reverse else False)
INTERLEAVED = (True if self.args.interleaved else False)
base_list = []
seqs_list = []
search_results = []
hit_read_count_list = []
db_search_results = []
if gpkg:
maximum_range = gpkg.maximum_range()
if self.args.search_diamond_file:
self.args.search_method = self.hk.DIAMOND_SEARCH_METHOD
diamond_db = self.args.search_diamond_file[0]
else:
diamond_db = gpkg.diamond_database_path()
if self.args.search_method == self.hk.DIAMOND_SEARCH_METHOD:
if not diamond_db:
logging.error("%s search method selected, but no diamond database specified. \
Please either provide a gpkg to the --graftm_package flag, or a diamond \
database to the --search_diamond_file flag." % self.args.search_method)
raise Exception()
else:
# Get the maximum range, if none exists, make one from the HMM profile
if self.args.maximum_range:
maximum_range = self.args.maximum_range
else:
if self.args.search_method==self.hk.HMMSEARCH_SEARCH_METHOD:
if not self.args.search_only:
maximum_range = self.hk.get_maximum_range(self.args.aln_hmm_file)
else:
logging.debug("Running search only pipeline. maximum_range not configured.")
maximum_range = None
else:
logging.warning('Cannot determine maximum range when using %s pipeline and with no GraftM package specified' % self.args.search_method)
logging.warning('Setting maximum_range to None (linked hits will not be detected)')
maximum_range = None
if self.args.search_diamond_file:
diamond_db = self.args.search_diamond_file
else:
if self.args.search_method == self.hk.HMMSEARCH_SEARCH_METHOD:
diamond_db = None
else:
logging.error("%s search method selected, but no gpkg or diamond database selected" % self.args.search_method)
if self.args.assignment_method == Run.DIAMOND_TAXONOMIC_ASSIGNMENT:
if self.args.reverse:
logging.warn("--reverse reads specified with --assignment_method diamond. Reverse reads will be ignored.")
self.args.reverse = None
# If merge reads is specified, check that there are reverse reads to merge with
if self.args.merge_reads and not hasattr(self.args, 'reverse'):
raise Exception("Programming error")
# Set the output directory if not specified and create that directory
logging.debug('Creating working directory: %s' % self.args.output_directory)
self.hk.make_working_directory(self.args.output_directory,
self.args.force)
# Set pipeline and evalue by checking HMM format
if self.args.search_only:
if self.args.search_method == self.hk.HMMSEARCH_SEARCH_METHOD:
hmm_type, hmm_tc = self.hk.setpipe(self.args.search_hmm_files[0])
logging.debug("HMM type: %s Trusted Cutoff: %s" % (hmm_type, hmm_tc))
else:
hmm_type, hmm_tc = self.hk.setpipe(self.args.aln_hmm_file)
logging.debug("HMM type: %s Trusted Cutoff: %s" % (hmm_type, hmm_tc))
if self.args.search_method == self.hk.HMMSEARCH_SEARCH_METHOD:
setattr(self.args, 'type', hmm_type)
if hmm_tc:
setattr(self.args, 'evalue', '--cut_tc')
else:
setattr(self.args, 'type', self.PIPELINE_AA)
if self.args.filter_minimum is not None:
filter_minimum = self.args.filter_minimum
else:
if self.args.type == self.PIPELINE_NT:
filter_minimum = Run.MIN_ALIGNED_FILTER_FOR_NUCLEOTIDE_PACKAGES
else:
filter_minimum = Run.MIN_ALIGNED_FILTER_FOR_AMINO_ACID_PACKAGES
# Generate expand_search database if required
if self.args.expand_search_contigs:
if self.args.graftm_package:
pkg = GraftMPackage.acquire(self.args.graftm_package)
else:
pkg = None
boots = ExpandSearcher(
search_hmm_files = self.args.search_hmm_files,
maximum_range = self.args.maximum_range,
threads = self.args.threads,
evalue = self.args.evalue,
min_orf_length = self.args.min_orf_length,
graftm_package = pkg)
# this is a hack, it should really use GraftMFiles but that class isn't currently flexible enough
new_database = (os.path.join(self.args.output_directory, "expand_search.hmm") \
if self.args.search_method == self.hk.HMMSEARCH_SEARCH_METHOD \
else os.path.join(self.args.output_directory, "expand_search")
)
if boots.generate_expand_search_database_from_contigs(
self.args.expand_search_contigs,
new_database,
self.args.search_method):
if self.args.search_method == self.hk.HMMSEARCH_SEARCH_METHOD:
self.ss.search_hmm.append(new_database)
else:
diamond_db = new_database
first_search_method = self.args.search_method
if self.args.decoy_database:
decoy_filter = DecoyFilter(Diamond(diamond_db, threads=self.args.threads),
Diamond(self.args.decoy_database,
threads=self.args.threads))
doing_decoy_search = True
elif self.args.search_method == self.hk.HMMSEARCH_AND_DIAMOND_SEARCH_METHOD:
decoy_filter = DecoyFilter(Diamond(diamond_db, threads=self.args.threads))
doing_decoy_search = True
first_search_method = self.hk.HMMSEARCH_SEARCH_METHOD
else:
doing_decoy_search = False
# For each pair (or single file passed to GraftM)
logging.debug('Working with %i file(s)' % len(self.sequence_pair_list))
for pair in self.sequence_pair_list:
# Guess the sequence file type, if not already specified to GraftM
unpack = UnpackRawReads(pair[0],
self.args.input_sequence_type,
INTERLEAVED)
# Set the basename, and make an entry to the summary table.
base = unpack.basename()
pair_direction = ['forward', 'reverse']
logging.info("Working on %s" % base)
# Make the working base subdirectory
self.hk.make_working_directory(os.path.join(self.args.output_directory,
base),
self.args.force)
# for each of the paired end read files
for read_file in pair:
unpack = UnpackRawReads(read_file,
self.args.input_sequence_type,
INTERLEAVED)
if read_file is None:
# placeholder for interleaved (second file is None)
continue
if not os.path.isfile(read_file): # Check file exists
logging.info('%s does not exist! Skipping this file..' % read_file)
continue
# Set the output file_name
if len(pair) == 2:
direction = 'interleaved' if pair[1] is None \
else pair_direction.pop(0)
logging.info("Working on %s reads" % direction)
self.gmf = GraftMFiles(base,
self.args.output_directory,
direction)
self.hk.make_working_directory(os.path.join(self.args.output_directory,
base,
direction),
self.args.force)
else:
direction = False
self.gmf = GraftMFiles(base,
self.args.output_directory,
direction)
if self.args.type == self.PIPELINE_AA:
logging.debug("Running protein pipeline")
try:
search_time, (result, complement_information) = self.ss.aa_db_search(
self.gmf,
base,
unpack,
first_search_method,
maximum_range,
self.args.threads,
self.args.evalue,
self.args.min_orf_length,
self.args.restrict_read_length,
diamond_db
)
except NoInputSequencesException as e:
logging.error("No sufficiently long open reading frames were found, indicating"
" either the input sequences are too short or the min orf length"
" cutoff is too high. Cannot continue sorry. Alternatively, there"
" is something amiss with the installation of OrfM. The specific"
" command that failed was: %s" % e.command)
exit(Run.NO_ORFS_EXITSTATUS)
# Or the DNA pipeline
elif self.args.type == self.PIPELINE_NT:
logging.debug("Running nucleotide pipeline")
search_time, (result, complement_information) = self.ss.nt_db_search(
self.gmf,
base,
unpack,
self.args.euk_check,
self.args.search_method,
maximum_range,
self.args.threads,
self.args.evalue
)
reads_detected = True
if not result.hit_fasta() or os.path.getsize(result.hit_fasta()) == 0:
logging.info('No reads found in %s' % base)
reads_detected = False
if self.args.search_only:
db_search_results.append(result)
base_list.append(base)
continue
# Filter out decoys if specified
if reads_detected and doing_decoy_search:
with tempfile.NamedTemporaryFile(prefix="graftm_decoy", suffix='.fa') as f:
tmpname = f.name
any_remaining = decoy_filter.filter(result.hit_fasta(),
tmpname)
if any_remaining:
shutil.move(tmpname, result.hit_fasta())
else:
# No hits remain after decoy filtering.
os.remove(result.hit_fasta())
continue
if self.args.assignment_method == Run.PPLACER_TAXONOMIC_ASSIGNMENT:
logging.info('aligning reads to reference package database')
hit_aligned_reads = self.gmf.aligned_fasta_output_path(base)
if reads_detected:
aln_time, aln_result = self.ss.align(
result.hit_fasta(),
hit_aligned_reads,
complement_information,
self.args.type,
filter_minimum
)
else:
aln_time = 'n/a'
if not os.path.exists(hit_aligned_reads): # If all were filtered out, or there just was none..
with open(hit_aligned_reads,'w') as f:
pass # just touch the file, nothing else
seqs_list.append(hit_aligned_reads)
db_search_results.append(result)
base_list.append(base)
search_results.append(result.search_result)
hit_read_count_list.append(result.hit_count)
# Write summary table
srchtw = SearchTableWriter()
srchtw.build_search_otu_table([x.search_objects for x in db_search_results],
base_list,
self.gmf.search_otu_table())
if self.args.search_only:
logging.info('Stopping before alignment and taxonomic assignment phase\n')
exit(0)
if self.args.merge_reads: # not run when diamond is the assignment mode- enforced by argparse grokking
logging.debug("Running merge reads output")
if self.args.interleaved:
fwd_seqs = seqs_list
rev_seqs = []
else:
base_list=base_list[0::2]
fwd_seqs = seqs_list[0::2]
rev_seqs = seqs_list[1::2]
merged_output=[GraftMFiles(base, self.args.output_directory, False).aligned_fasta_output_path(base) \
for base in base_list]
logging.debug("merged reads to %s", merged_output)
self.ss.merge_forev_aln(fwd_seqs, rev_seqs, merged_output)
seqs_list=merged_output
REVERSE_PIPE = False
elif REVERSE_PIPE:
base_list=base_list[0::2]
# Leave the pipeline if search only was specified
if self.args.search_and_align_only:
logging.info('Stopping before taxonomic assignment phase\n')
exit(0)
elif not any(base_list):
logging.error('No hits in any of the provided files. Cannot continue with no reads to assign taxonomy to.\n')
exit(0)
self.gmf = GraftMFiles('',
self.args.output_directory,
False)
if self.args.assignment_method == Run.PPLACER_TAXONOMIC_ASSIGNMENT:
clusterer=Clusterer()
# Classification steps
seqs_list=clusterer.cluster(seqs_list, REVERSE_PIPE)
logging.info("Placing reads into phylogenetic tree")
taxonomic_assignment_time, assignments=self.p.place(REVERSE_PIPE,
seqs_list,
self.args.resolve_placements,
self.gmf,
self.args,
result.slash_endings,
gpkg.taxtastic_taxonomy_path(),
clusterer)
assignments = clusterer.uncluster_annotations(assignments, REVERSE_PIPE)
elif self.args.assignment_method == Run.DIAMOND_TAXONOMIC_ASSIGNMENT:
logging.info("Assigning taxonomy with diamond")
taxonomic_assignment_time, assignments = self._assign_taxonomy_with_diamond(\
base_list,
db_search_results,
gpkg,
self.gmf)
aln_time = 'n/a'
else: raise Exception("Unexpected assignment method encountered: %s" % self.args.placement_method)
self.summarise(base_list, assignments, REVERSE_PIPE,
[search_time, aln_time, taxonomic_assignment_time],
hit_read_count_list, self.args.max_samples_for_krona)
@T.timeit
def _assign_taxonomy_with_diamond(self, base_list, db_search_results,
graftm_package, graftm_files):
'''Run diamond to assign taxonomy
Parameters
----------
base_list: list of str
list of sequence block names
db_search_results: list of DBSearchResult
the result of running hmmsearches
graftm_package: GraftMPackage object
Diamond is run against this database
graftm_files: GraftMFiles object
Result files are written here
Returns
-------
list of
1. time taken for assignment
2. assignments i.e. dict of base_list entry to dict of read names to
to taxonomies, or None if there was no hit detected.
'''
runner = Diamond(graftm_package.diamond_database_path(),
self.args.threads,
self.args.evalue)
taxonomy_definition = Getaxnseq().read_taxtastic_taxonomy_and_seqinfo\
(open(graftm_package.taxtastic_taxonomy_path()),
open(graftm_package.taxtastic_seqinfo_path()))
results = {}
# For each of the search results,
for i, search_result in enumerate(db_search_results):
if search_result.hit_fasta() is None:
sequence_id_to_taxonomy = {}
else:
sequence_id_to_hit = {}
# Run diamond
logging.debug("Running diamond on %s" % search_result.hit_fasta())
diamond_result = runner.run(search_result.hit_fasta(),
UnpackRawReads.PROTEIN_SEQUENCE_TYPE,
daa_file_basename=graftm_files.diamond_assignment_output_basename(base_list[i]))
for res in diamond_result.each([SequenceSearchResult.QUERY_ID_FIELD,
SequenceSearchResult.HIT_ID_FIELD]):
if res[0] in sequence_id_to_hit:
# do not accept duplicates
if sequence_id_to_hit[res[0]] != res[1]:
raise Exception("Diamond unexpectedly gave two hits for a single query sequence for %s" % res[0])
else:
sequence_id_to_hit[res[0]] = res[1]
# Extract taxonomy of the best hit, and add in the no hits
sequence_id_to_taxonomy = {}
for seqio in SequenceIO().read_fasta_file(search_result.hit_fasta()):
name = seqio.name
if name in sequence_id_to_hit:
# Add Root; to be in line with pplacer assignment method
sequence_id_to_taxonomy[name] = ['Root']+taxonomy_definition[sequence_id_to_hit[name]]
else:
# picked up in the initial search (by hmmsearch, say), but diamond misses it
sequence_id_to_taxonomy[name] = ['Root']
results[base_list[i]] = sequence_id_to_taxonomy
return results
def main(self):
if self.args.subparser_name == 'graft':
if self.args.verbosity >= self._MIN_VERBOSITY_FOR_ART: print('''
GRAFT
Joel Boyd, Ben Woodcroft
__/__
______|
_- - _ ________| |_____/
- - - | |____/_
- _ >>>> - >>>> ____|
- _- - - | ______
- _ |_____|
- |______
''')
self.graft()
elif self.args.subparser_name == 'create':
if self.args.verbosity >= self._MIN_VERBOSITY_FOR_ART: print('''
CREATE
Joel Boyd, Ben Woodcroft
/
>a /
------------- /
>b | |
-------- >>> | GPKG |
>c |________|
----------
''')
if self.args.dereplication_level < 0:
logging.error("Invalid dereplication level selected! please enter a positive integer")
exit(1)
else:
if not self.args.sequences:
if not self.args.alignment and not self.args.rerooted_annotated_tree \
and not self.args.rerooted_tree:
logging.error("Some sort of sequence data must be provided to run graftM create")
exit(1)
if self.args.taxonomy:
if self.args.rerooted_annotated_tree:
logging.error("--taxonomy is incompatible with --rerooted_annotated_tree")
exit(1)
if self.args.taxtastic_taxonomy or self.args.taxtastic_seqinfo:
logging.error("--taxtastic_taxonomy and --taxtastic_seqinfo are incompatible with --taxonomy")
exit(1)
elif self.args.rerooted_annotated_tree:
if self.args.taxtastic_taxonomy or self.args.taxtastic_seqinfo:
logging.error("--taxtastic_taxonomy and --taxtastic_seqinfo are incompatible with --rerooted_annotated_tree")
exit(1)
else:
if not self.args.taxtastic_taxonomy or not self.args.taxtastic_seqinfo:
logging.error("--taxonomy, --rerooted_annotated_tree or --taxtastic_taxonomy/--taxtastic_seqinfo is required")
exit(1)
if bool(self.args.taxtastic_taxonomy) ^ bool(self.args.taxtastic_seqinfo):
logging.error("Both or neither of --taxtastic_taxonomy and --taxtastic_seqinfo must be defined")
exit(1)
if self.args.alignment and self.args.hmm:
logging.warn("Using both --alignment and --hmm is rarely useful, but proceding on the assumption you understand.")
if len([_f for _f in [self.args.rerooted_tree,
self.args.rerooted_annotated_tree,
self.args.tree] if _f]) > 1:
logging.error("Only 1 input tree can be specified")
exit(1)
self.create.main(
dereplication_level = self.args.dereplication_level,
sequences = self.args.sequences,
alignment = self.args.alignment,
taxonomy = self.args.taxonomy,
rerooted_tree = self.args.rerooted_tree,
unrooted_tree = self.args.tree,
tree_log = self.args.tree_log,
prefix = self.args.output,
rerooted_annotated_tree = self.args.rerooted_annotated_tree,
min_aligned_percent = float(self.args.min_aligned_percent)/100,
taxtastic_taxonomy = self.args.taxtastic_taxonomy,
taxtastic_seqinfo = self.args.taxtastic_seqinfo,
hmm = self.args.hmm,
search_hmm_files = self.args.search_hmm_files,
force = self.args.force,
threads = self.args.threads
)
elif self.args.subparser_name == 'update':
logging.info("GraftM package %s specified to update with sequences in %s" % (self.args.graftm_package, self.args.sequences))
if self.args.regenerate_diamond_db:
gpkg = GraftMPackage.acquire(self.args.graftm_package)
logging.info("Regenerating diamond DB..")
gpkg.create_diamond_db()
logging.info("Diamond database regenerated.")
return
elif not self.args.sequences:
logging.error("--sequences is required unless regenerating the diamond DB")
exit(1)
if not self.args.output:
if self.args.graftm_package.endswith(".gpkg"):
self.args.output = self.args.graftm_package.replace(".gpkg", "-updated.gpkg")
else:
self.args.output = self.args.graftm_package + '-update.gpkg'
Update(ExternalProgramSuite(
['taxit', 'FastTreeMP', 'hmmalign', 'mafft'])).update(
input_sequence_path=self.args.sequences,
input_taxonomy_path=self.args.taxonomy,
input_graftm_package_path=self.args.graftm_package,
output_graftm_package_path=self.args.output)
elif self.args.subparser_name == 'expand_search':
args = self.args
if not args.graftm_package and not args.search_hmm_files:
logging.error("expand_search mode requires either --graftm_package or --search_hmm_files")
exit(1)
if args.graftm_package:
pkg = GraftMPackage.acquire(args.graftm_package)
else:
pkg = None
expandsearcher = ExpandSearcher(search_hmm_files = args.search_hmm_files,
maximum_range = args.maximum_range,
threads = args.threads,
evalue = args.evalue,
min_orf_length = args.min_orf_length,
graftm_package = pkg)
expandsearcher.generate_expand_search_database_from_contigs(args.contigs,
args.output_hmm,
search_method=ExpandSearcher.HMM_SEARCH_METHOD)
elif self.args.subparser_name == 'tree':
if self.args.graftm_package:
# shim in the paths from the graftm package, not overwriting
# any of the provided paths.
gpkg = GraftMPackage.acquire(self.args.graftm_package)
if not self.args.rooted_tree: self.args.rooted_tree = gpkg.reference_package_tree_path()
if not self.args.input_greengenes_taxonomy:
if not self.args.input_taxtastic_seqinfo:
self.args.input_taxtastic_seqinfo = gpkg.taxtastic_seqinfo_path()
if not self.args.input_taxtastic_taxonomy:
self.args.input_taxtastic_taxonomy = gpkg.taxtastic_taxonomy_path()
if self.args.rooted_tree:
if self.args.unrooted_tree:
logging.error("Both a rooted tree and an un-rooted tree were provided, so it's unclear what you are asking GraftM to do. \
If you're unsure see graftM tree -h")
exit(1)
elif self.args.reference_tree:
logging.error("Both a rooted tree and reference tree were provided, so it's unclear what you are asking GraftM to do. \
If you're unsure see graftM tree -h")
exit(1)
if not self.args.decorate:
logging.error("It seems a rooted tree has been provided, but --decorate has not been specified so it is unclear what you are asking graftM to do.")
exit(1)
dec = Decorator(tree_path = self.args.rooted_tree)
elif self.args.unrooted_tree and self.args.reference_tree:
logging.debug("Using provided reference tree %s to reroot %s" % (self.args.reference_tree,
self.args.unrooted_tree))
dec = Decorator(reference_tree_path = self.args.reference_tree,
tree_path = self.args.unrooted_tree)
else:
logging.error("Some tree(s) must be provided, either a rooted tree or both an unrooted tree and a reference tree")
exit(1)
if self.args.output_taxonomy is None and self.args.output_tree is None:
logging.error("Either an output tree or taxonomy must be provided")
exit(1)
if self.args.input_greengenes_taxonomy:
if self.args.input_taxtastic_seqinfo or self.args.input_taxtastic_taxonomy:
logging.error("Both taxtastic and greengenes taxonomy were provided, so its unclear what taxonomy you want graftM to decorate with")
exit(1)
logging.debug("Using input GreenGenes style taxonomy file")
dec.main(self.args.input_greengenes_taxonomy,
self.args.output_tree, self.args.output_taxonomy,
self.args.no_unique_tax, self.args.decorate, None)
elif self.args.input_taxtastic_seqinfo and self.args.input_taxtastic_taxonomy:
logging.debug("Using input taxtastic style taxonomy/seqinfo")
dec.main(self.args.input_taxtastic_taxonomy, self.args.output_tree,
self.args.output_taxonomy, self.args.no_unique_tax,
self.args.decorate, self.args.input_taxtastic_seqinfo)
else:
logging.error("Either a taxtastic taxonomy or seqinfo file was provided. GraftM cannot continue without both.")
exit(1)
elif self.args.subparser_name == 'archive':
# Back slashes in the ASCII art are escaped.
if self.args.verbosity >= self._MIN_VERBOSITY_FOR_ART: print("""
ARCHIVE
Joel Boyd, Ben Woodcroft
____.----.
____.----' \\
\\ \\
\\ \\
\\ \\
\\ ____.----'`--.__
\\___.----' | `--.____
/`-._ | __.-' \\
/ `-._ ___.---' \\
/ `-.____.---' \\ +------+
/ / | \\ \\ |`. |`.
/ / | \\ _.--' <===> | `+--+---+
`-. / | \\ __.--' | | | |
`-._ / | \\ __.--' | | | | |
| `-./ | \\_.-' | +---+--+ |
| | | `. | `. |
| | | `+------+
| | |
| | |
| | |
| | |
| | |
`-. | _.-'
`-. | __..--'
`-. | __.-'
`-|__.--'
""")
if self.args.create:
if self.args.extract:
logging.error("Please specify whether to either create or export a GraftM package")
exit(1)
if not self.args.graftm_package:
logging.error("Creating a GraftM package archive requires an package to be specified")
exit(1)
if not self.args.archive:
logging.error("Creating a GraftM package archive requires an output archive path to be specified")
exit(1)
archive = Archive()
archive.create(self.args.graftm_package, self.args.archive,
force=self.args.force)
elif self.args.extract:
archive = Archive()
archive.extract(self.args.archive, self.args.graftm_package,
force=self.args.force)
else:
logging.error("Please specify whether to either create or export a GraftM package")
exit(1)
else:
raise Exception("Unexpected subparser name %s" % self.args.subparser_name)<|fim▁end|> | import logging
import tempfile
import shutil |
<|file_name|>utils.go<|end_file_name|><|fim▁begin|>package client
import (
"bytes"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
gosignal "os/signal"
"runtime"
"strconv"
"strings"
"time"
"github.com/Sirupsen/logrus"
"github.com/docker/docker/api"
"github.com/docker/docker/api/types"
"github.com/docker/docker/autogen/dockerversion"
"github.com/docker/docker/cliconfig"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/docker/docker/pkg/signal"
"github.com/docker/docker/pkg/stdcopy"
"github.com/docker/docker/pkg/term"
"github.com/docker/docker/registry"
"github.com/docker/docker/utils"
)
var (
errConnectionFailed = errors.New("Cannot connect to the Docker daemon. Is the docker daemon running on this host?")
)
type serverResponse struct {
body io.ReadCloser
header http.Header
statusCode int
}
// HTTPClient creates a new HTTP client with the cli's client transport instance.
func (cli *DockerCli) HTTPClient() *http.Client {
return &http.Client{Transport: cli.transport}
}
func (cli *DockerCli) encodeData(data interface{}) (*bytes.Buffer, error) {
params := bytes.NewBuffer(nil)
if data != nil {
if err := json.NewEncoder(params).Encode(data); err != nil {
return nil, err
}
}
return params, nil
}
func (cli *DockerCli) clientRequest(method, path string, in io.Reader, headers map[string][]string) (*serverResponse, error) {
serverResp := &serverResponse{
body: nil,
statusCode: -1,
}
expectedPayload := (method == "POST" || method == "PUT")
if expectedPayload && in == nil {
in = bytes.NewReader([]byte{})
}
req, err := http.NewRequest(method, fmt.Sprintf("%s/v%s%s", cli.basePath, api.Version, path), in)
if err != nil {
return serverResp, err
}
// Add CLI Config's HTTP Headers BEFORE we set the Docker headers
// then the user can't change OUR headers
for k, v := range cli.configFile.HTTPHeaders {
req.Header.Set(k, v)
}
req.Header.Set("User-Agent", "Docker-Client/"+dockerversion.VERSION+" ("+runtime.GOOS+")")
req.URL.Host = cli.addr
req.URL.Scheme = cli.scheme
if headers != nil {
for k, v := range headers {
req.Header[k] = v
}
}
if expectedPayload && req.Header.Get("Content-Type") == "" {
req.Header.Set("Content-Type", "text/plain")
}
resp, err := cli.HTTPClient().Do(req)
if resp != nil {
serverResp.statusCode = resp.StatusCode
}
if err != nil {
if utils.IsTimeout(err) || strings.Contains(err.Error(), "connection refused") || strings.Contains(err.Error(), "dial unix") {
return serverResp, errConnectionFailed
}
if cli.tlsConfig == nil && strings.Contains(err.Error(), "malformed HTTP response") {
return serverResp, fmt.Errorf("%v.\n* Are you trying to connect to a TLS-enabled daemon without TLS?", err)
}
if cli.tlsConfig != nil && strings.Contains(err.Error(), "remote error: bad certificate") {
return serverResp, fmt.Errorf("The server probably has client authentication (--tlsverify) enabled. Please check your TLS client certification settings: %v", err)
}
return serverResp, fmt.Errorf("An error occurred trying to connect: %v", err)
}
if serverResp.statusCode < 200 || serverResp.statusCode >= 400 {
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return serverResp, err
}
if len(body) == 0 {
return serverResp, fmt.Errorf("Error: request returned %s for API route and version %s, check if the server supports the requested API version", http.StatusText(serverResp.statusCode), req.URL)
}
return serverResp, fmt.Errorf("Error response from daemon: %s", bytes.TrimSpace(body))
}
serverResp.body = resp.Body
serverResp.header = resp.Header
return serverResp, nil
}
// cmdAttempt builds the corresponding registry Auth Header from the given
// authConfig. It returns the servers body, status, error response
func (cli *DockerCli) cmdAttempt(authConfig cliconfig.AuthConfig, method, path string, in io.Reader, out io.Writer) (io.ReadCloser, int, error) {
buf, err := json.Marshal(authConfig)
if err != nil {
return nil, -1, err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
// begin the request
serverResp, err := cli.clientRequest(method, path, in, map[string][]string{
"X-Registry-Auth": registryAuthHeader,
})
if err == nil && out != nil {
// If we are streaming output, complete the stream since
// errors may not appear until later.
err = cli.streamBody(serverResp.body, serverResp.header.Get("Content-Type"), true, out, nil)
}
if err != nil {
// Since errors in a stream appear after status 200 has been written,
// we may need to change the status code.
if strings.Contains(err.Error(), "Authentication is required") ||
strings.Contains(err.Error(), "Status 401") ||
strings.Contains(err.Error(), "401 Unauthorized") ||
strings.Contains(err.Error(), "status code 401") {
serverResp.statusCode = http.StatusUnauthorized
}
}
return serverResp.body, serverResp.statusCode, err
}
func (cli *DockerCli) clientRequestAttemptLogin(method, path string, in io.Reader, out io.Writer, index *registry.IndexInfo, cmdName string) (io.ReadCloser, int, error) {
// Resolve the Auth config relevant for this server
authConfig := registry.ResolveAuthConfig(cli.configFile, index)
body, statusCode, err := cli.cmdAttempt(authConfig, method, path, in, out)
if statusCode == http.StatusUnauthorized {
fmt.Fprintf(cli.out, "\nPlease login prior to %s:\n", cmdName)
if err = cli.CmdLogin(index.GetAuthConfigKey()); err != nil {
return nil, -1, err
}
authConfig = registry.ResolveAuthConfig(cli.configFile, index)
return cli.cmdAttempt(authConfig, method, path, in, out)
}
return body, statusCode, err
}
func (cli *DockerCli) callWrapper(method, path string, data interface{}, headers map[string][]string) (io.ReadCloser, http.Header, int, error) {
sr, err := cli.call(method, path, data, headers)
return sr.body, sr.header, sr.statusCode, err
}
func (cli *DockerCli) call(method, path string, data interface{}, headers map[string][]string) (*serverResponse, error) {
params, err := cli.encodeData(data)
if err != nil {
sr := &serverResponse{
body: nil,
header: nil,
statusCode: -1,
}
return sr, nil
}
if data != nil {
if headers == nil {
headers = make(map[string][]string)
}
headers["Content-Type"] = []string{"application/json"}
}
serverResp, err := cli.clientRequest(method, path, params, headers)
return serverResp, err
}
type streamOpts struct {
rawTerminal bool
in io.Reader
out io.Writer
err io.Writer
headers map[string][]string
}
func (cli *DockerCli) stream(method, path string, opts *streamOpts) (*serverResponse, error) {
serverResp, err := cli.clientRequest(method, path, opts.in, opts.headers)
if err != nil {
return serverResp, err
}
return serverResp, cli.streamBody(serverResp.body, serverResp.header.Get("Content-Type"), opts.rawTerminal, opts.out, opts.err)
}
func (cli *DockerCli) streamBody(body io.ReadCloser, contentType string, rawTerminal bool, stdout, stderr io.Writer) error {
defer body.Close()
if api.MatchesContentType(contentType, "application/json") {
return jsonmessage.DisplayJSONMessagesStream(body, stdout, cli.outFd, cli.isTerminalOut)
}
if stdout != nil || stderr != nil {
// When TTY is ON, use regular copy
var err error
if rawTerminal {
_, err = io.Copy(stdout, body)
} else {
_, err = stdcopy.StdCopy(stdout, stderr, body)
}
logrus.Debugf("[stream] End of stdout")
return err
}
return nil
}
func (cli *DockerCli) resizeTty(id string, isExec bool) {
height, width := cli.getTtySize()
if height == 0 && width == 0 {
return
}
v := url.Values{}
v.Set("h", strconv.Itoa(height))
v.Set("w", strconv.Itoa(width))
path := ""
if !isExec {
path = "/containers/" + id + "/resize?"
} else {
path = "/exec/" + id + "/resize?"
}
if _, _, err := readBody(cli.call("POST", path+v.Encode(), nil, nil)); err != nil {
logrus.Debugf("Error resize: %s", err)
}<|fim▁hole|> if err != nil {
return -1, err
}
defer serverResp.body.Close()
var res types.ContainerWaitResponse
if err := json.NewDecoder(serverResp.body).Decode(&res); err != nil {
return -1, err
}
return res.StatusCode, nil
}
// getExitCode perform an inspect on the container. It returns
// the running state and the exit code.
func getExitCode(cli *DockerCli, containerID string) (bool, int, error) {
serverResp, err := cli.call("GET", "/containers/"+containerID+"/json", nil, nil)
if err != nil {
// If we can't connect, then the daemon probably died.
if err != errConnectionFailed {
return false, -1, err
}
return false, -1, nil
}
defer serverResp.body.Close()
var c types.ContainerJSON
if err := json.NewDecoder(serverResp.body).Decode(&c); err != nil {
return false, -1, err
}
return c.State.Running, c.State.ExitCode, nil
}
// getExecExitCode perform an inspect on the exec command. It returns
// the running state and the exit code.
func getExecExitCode(cli *DockerCli, execID string) (bool, int, error) {
serverResp, err := cli.call("GET", "/exec/"+execID+"/json", nil, nil)
if err != nil {
// If we can't connect, then the daemon probably died.
if err != errConnectionFailed {
return false, -1, err
}
return false, -1, nil
}
defer serverResp.body.Close()
//TODO: Should we reconsider having a type in api/types?
//this is a response to exex/id/json not container
var c struct {
Running bool
ExitCode int
}
if err := json.NewDecoder(serverResp.body).Decode(&c); err != nil {
return false, -1, err
}
return c.Running, c.ExitCode, nil
}
func (cli *DockerCli) monitorTtySize(id string, isExec bool) error {
cli.resizeTty(id, isExec)
if runtime.GOOS == "windows" {
go func() {
prevH, prevW := cli.getTtySize()
for {
time.Sleep(time.Millisecond * 250)
h, w := cli.getTtySize()
if prevW != w || prevH != h {
cli.resizeTty(id, isExec)
}
prevH = h
prevW = w
}
}()
} else {
sigchan := make(chan os.Signal, 1)
gosignal.Notify(sigchan, signal.SIGWINCH)
go func() {
for range sigchan {
cli.resizeTty(id, isExec)
}
}()
}
return nil
}
func (cli *DockerCli) getTtySize() (int, int) {
if !cli.isTerminalOut {
return 0, 0
}
ws, err := term.GetWinsize(cli.outFd)
if err != nil {
logrus.Debugf("Error getting size: %s", err)
if ws == nil {
return 0, 0
}
}
return int(ws.Height), int(ws.Width)
}
func readBody(serverResp *serverResponse, err error) ([]byte, int, error) {
if serverResp.body != nil {
defer serverResp.body.Close()
}
if err != nil {
return nil, serverResp.statusCode, err
}
body, err := ioutil.ReadAll(serverResp.body)
if err != nil {
return nil, -1, err
}
return body, serverResp.statusCode, nil
}<|fim▁end|> | }
func waitForExit(cli *DockerCli, containerID string) (int, error) {
serverResp, err := cli.call("POST", "/containers/"+containerID+"/wait", nil, nil) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># jsb socket related plugins
#<|fim▁hole|>""" this package contains all the socket related plugins. """
import os
(f, tail) = os.path.split(__file__)
__all__ = []
for i in os.listdir(f):
if i.endswith('.py'):
__all__.append(i[:-3])
elif os.path.isdir(f + os.sep + i) and not i.startswith('.'):
__all__.append(i)
try:
__all__.remove('__init__')
except:
pass
__plugs__ = __all__<|fim▁end|> | #
|
<|file_name|>Improvement.js<|end_file_name|><|fim▁begin|>function Improvement(options) {
this.name = options.name;
this.image = options.image;
this.depth = options.depth || 0;
this.parent = options.parent;
this.children = options.children || [];
this.siblings = options.siblings || [];
this.toDisplay = options.toDisplay || [];
this.toBuild = options.toBuild || [];
this.cost = options.cost;
this.materials = options.materials;
this.buildEvent = 'event:'+options.buildEvent;
}
Improvement.prototype = {
/*
* The build function will build the improvement, adding it to the list of
* improvements built in the GameState and removing the needed materials
* from the character's inventory.
*/
build: function() {
GameState.buildImprovement(this.code);
var character = GameState.getCharacter();
character.adjustCurrency(-this.cost);
$.each(this.materials, function(key, value) {
character.removeItem({ item:key, count:value });
});
},
/*
* An improvement is active when it has been built and none of its children
* have been built.
*/
isActive: function() {
if (!GameState.isImprovementBuilt(this.code)) {
return false;
}
for (var i=0; i<this.children.length; i++) {
if (GameState.isImprovementBuilt(this.children[i])) {
return false;
}
}
return true;
},
/*
* Function used to check to see if the improvement can be built regardless
* of whether the character has the materials or not. If this improvement
* has a parent location that hasn't been built then this improvement cannot
* be built. If this improvement has any sibling improvements that have been
* built then this improvement cannot be built.
*/
canBuild: function() {
if (this.canDisplay() == false) { return false; }
for (var i=0; i<this.siblings.length; i++) {
if (GameState.isImprovementBuilt(this.siblings[i])) {
return false;
}
}
return Resolver.meetsRequirements(this.toBuild);
},
/*
* Function used to check to see if an improvement should be displayed in
* the list of improvements. Improvements with parent improvements will not
* be displayed until their parent improvements have been built.
*/
canDisplay: function() {
if (this.parent && !GameState.isImprovementBuilt(this.parent)) {
return false;
}
return Resolver.meetsRequirements(this.toDisplay);<|fim▁hole|> * Function used to check to see if the character has both the currency and
* materials needed to build the improvement.
*/
hasMaterials: function() {
var character = GameState.getCharacter();
if (character.getCurrency() < this.cost) {
return false;
}
var result = true;
$.each(this.materials, function(key, value) {
if (character.getItemQuantity(key) < value) {
result = false;
}
});
return result;
},
/* Get the description of the improvement from the interface data. */
getDescription: function() {
return Data.getInterface('improvement_'+this.getCode());
},
getBuildEvent: function() { return this.buildEvent; },
getCode: function() { return this.code; },
getCost: function() { return this.cost; },
getDepth: function() { return this.depth; },
getImage: function() { return this.image; },
getMaterials: function() { return this.materials; },
getName: function() { return this.name; },
};<|fim▁end|> | },
/* |
<|file_name|>receiver_unittest.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <string.h>
#include <list>
#include <memory>
#include <queue>
#include <vector>
#include "modules/video_coding/encoded_frame.h"
#include "modules/video_coding/packet.h"
#include "modules/video_coding/receiver.h"
#include "modules/video_coding/test/stream_generator.h"
#include "modules/video_coding/test/test_util.h"
#include "modules/video_coding/timing.h"
#include BOSS_WEBRTC_U_rtc_base__checks_h //original-code:"rtc_base/checks.h"
#include BOSS_WEBRTC_U_system_wrappers__include__clock_h //original-code:"system_wrappers/include/clock.h"
#include BOSS_WEBRTC_U_test__gtest_h //original-code:"test/gtest.h"
namespace webrtc {
class TestVCMReceiver : public ::testing::Test {
protected:
TestVCMReceiver()
: clock_(new SimulatedClock(0)),
timing_(clock_.get()),
receiver_(&timing_, clock_.get(), &event_factory_) {
stream_generator_.reset(
new StreamGenerator(0, clock_->TimeInMilliseconds()));
}
virtual void SetUp() { receiver_.Reset(); }
int32_t InsertPacket(int index) {
VCMPacket packet;
bool packet_available = stream_generator_->GetPacket(&packet, index);
EXPECT_TRUE(packet_available);
if (!packet_available)
return kGeneralError; // Return here to avoid crashes below.
return receiver_.InsertPacket(packet);
}
int32_t InsertPacketAndPop(int index) {
VCMPacket packet;
bool packet_available = stream_generator_->PopPacket(&packet, index);
EXPECT_TRUE(packet_available);
if (!packet_available)
return kGeneralError; // Return here to avoid crashes below.
return receiver_.InsertPacket(packet);
}
int32_t InsertFrame(FrameType frame_type, bool complete) {
int num_of_packets = complete ? 1 : 2;
stream_generator_->GenerateFrame(
frame_type, (frame_type != kEmptyFrame) ? num_of_packets : 0,
(frame_type == kEmptyFrame) ? 1 : 0, clock_->TimeInMilliseconds());
int32_t ret = InsertPacketAndPop(0);
if (!complete) {
// Drop the second packet.
VCMPacket packet;
stream_generator_->PopPacket(&packet, 0);
}
clock_->AdvanceTimeMilliseconds(kDefaultFramePeriodMs);
return ret;
}
bool DecodeNextFrame() {
VCMEncodedFrame* frame = receiver_.FrameForDecoding(0, false);
if (!frame)
return false;
receiver_.ReleaseFrame(frame);
return true;
}
std::unique_ptr<SimulatedClock> clock_;
VCMTiming timing_;
NullEventFactory event_factory_;
VCMReceiver receiver_;
std::unique_ptr<StreamGenerator> stream_generator_;
};
TEST_F(TestVCMReceiver, NonDecodableDuration_Empty) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
const size_t kMaxNackListSize = 1000;
const int kMaxPacketAgeToNack = 1000;
const int kMaxNonDecodableDuration = 500;
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
// Advance time until it's time to decode the key frame.
clock_->AdvanceTimeMilliseconds(kMinDelayMs);
EXPECT_TRUE(DecodeNextFrame());
bool request_key_frame = false;
std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
EXPECT_FALSE(request_key_frame);
}
TEST_F(TestVCMReceiver, NonDecodableDuration_NoKeyFrame) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
const size_t kMaxNackListSize = 1000;
const int kMaxPacketAgeToNack = 1000;
const int kMaxNonDecodableDuration = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
const int kNumFrames = kDefaultFrameRate * kMaxNonDecodableDuration / 1000;
for (int i = 0; i < kNumFrames; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
}
bool request_key_frame = false;
std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
EXPECT_TRUE(request_key_frame);
}
TEST_F(TestVCMReceiver, NonDecodableDuration_OneIncomplete) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
const size_t kMaxNackListSize = 1000;
const int kMaxPacketAgeToNack = 1000;
const int kMaxNonDecodableDuration = 500;
const int kMaxNonDecodableDurationFrames =
(kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
receiver_.SetMinReceiverDelay(kMinDelayMs);
int64_t key_frame_inserted = clock_->TimeInMilliseconds();
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
// Insert an incomplete frame.
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
// Insert enough frames to have too long non-decodable sequence.
for (int i = 0; i < kMaxNonDecodableDurationFrames; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
}
// Advance time until it's time to decode the key frame.
clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
key_frame_inserted);
EXPECT_TRUE(DecodeNextFrame());
// Make sure we get a key frame request.
bool request_key_frame = false;
std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
EXPECT_TRUE(request_key_frame);
}
TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
const size_t kMaxNackListSize = 1000;
const int kMaxPacketAgeToNack = 1000;
const int kMaxNonDecodableDuration = 500;
const int kMaxNonDecodableDurationFrames =
(kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
receiver_.SetMinReceiverDelay(kMinDelayMs);
int64_t key_frame_inserted = clock_->TimeInMilliseconds();
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
// Insert an incomplete frame.
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
// Insert all but one frame to not trigger a key frame request due to
// too long duration of non-decodable frames.
for (int i = 0; i < kMaxNonDecodableDurationFrames - 1; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
}
// Advance time until it's time to decode the key frame.
clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
key_frame_inserted);
EXPECT_TRUE(DecodeNextFrame());
// Make sure we don't get a key frame request since we haven't generated
// enough frames.
bool request_key_frame = false;
std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
EXPECT_FALSE(request_key_frame);
}
TEST_F(TestVCMReceiver, NonDecodableDuration_NoTrigger2) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
const size_t kMaxNackListSize = 1000;
const int kMaxPacketAgeToNack = 1000;
const int kMaxNonDecodableDuration = 500;
const int kMaxNonDecodableDurationFrames =
(kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
receiver_.SetMinReceiverDelay(kMinDelayMs);
int64_t key_frame_inserted = clock_->TimeInMilliseconds();
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
// Insert enough frames to have too long non-decodable sequence, except that
// we don't have any losses.
for (int i = 0; i < kMaxNonDecodableDurationFrames; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
}
// Insert an incomplete frame.
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
// Advance time until it's time to decode the key frame.
clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
key_frame_inserted);
EXPECT_TRUE(DecodeNextFrame());
// Make sure we don't get a key frame request since the non-decodable duration
// is only one frame.
bool request_key_frame = false;
std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
EXPECT_FALSE(request_key_frame);
}
TEST_F(TestVCMReceiver, NonDecodableDuration_KeyFrameAfterIncompleteFrames) {
// Enable NACK and with no RTT thresholds for disabling retransmission delay.
receiver_.SetNackMode(kNack, -1, -1);
const size_t kMaxNackListSize = 1000;
const int kMaxPacketAgeToNack = 1000;
const int kMaxNonDecodableDuration = 500;
const int kMaxNonDecodableDurationFrames =
(kDefaultFrameRate * kMaxNonDecodableDuration + 500) / 1000;
const int kMinDelayMs = 500;
receiver_.SetNackSettings(kMaxNackListSize, kMaxPacketAgeToNack,
kMaxNonDecodableDuration);
receiver_.SetMinReceiverDelay(kMinDelayMs);
int64_t key_frame_inserted = clock_->TimeInMilliseconds();
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
// Insert an incomplete frame.
EXPECT_GE(InsertFrame(kVideoFrameDelta, false), kNoError);
// Insert enough frames to have too long non-decodable sequence.
for (int i = 0; i < kMaxNonDecodableDurationFrames; ++i) {
EXPECT_GE(InsertFrame(kVideoFrameDelta, true), kNoError);
}
EXPECT_GE(InsertFrame(kVideoFrameKey, true), kNoError);
// Advance time until it's time to decode the key frame.
clock_->AdvanceTimeMilliseconds(kMinDelayMs - clock_->TimeInMilliseconds() -
key_frame_inserted);
EXPECT_TRUE(DecodeNextFrame());
// Make sure we don't get a key frame request since we have a key frame
// in the list.
bool request_key_frame = false;
std::vector<uint16_t> nack_list = receiver_.NackList(&request_key_frame);
EXPECT_FALSE(request_key_frame);
}
// A simulated clock, when time elapses, will insert frames into the jitter
// buffer, based on initial settings.
class SimulatedClockWithFrames : public SimulatedClock {
public:
SimulatedClockWithFrames(StreamGenerator* stream_generator,
VCMReceiver* receiver)
: SimulatedClock(0),
stream_generator_(stream_generator),
receiver_(receiver) {}
virtual ~SimulatedClockWithFrames() {}
// If |stop_on_frame| is true and next frame arrives between now and
// now+|milliseconds|, the clock will be advanced to the arrival time of next
// frame.
// Otherwise, the clock will be advanced by |milliseconds|.
//
// For both cases, a frame will be inserted into the jitter buffer at the
// instant when the clock time is timestamps_.front().arrive_time.
//
// Return true if some frame arrives between now and now+|milliseconds|.
bool AdvanceTimeMilliseconds(int64_t milliseconds, bool stop_on_frame) {
return AdvanceTimeMicroseconds(milliseconds * 1000, stop_on_frame);
}
bool AdvanceTimeMicroseconds(int64_t microseconds, bool stop_on_frame) {
int64_t start_time = TimeInMicroseconds();
int64_t end_time = start_time + microseconds;
bool frame_injected = false;
while (!timestamps_.empty() &&
timestamps_.front().arrive_time <= end_time) {
RTC_DCHECK(timestamps_.front().arrive_time >= start_time);
SimulatedClock::AdvanceTimeMicroseconds(timestamps_.front().arrive_time -
TimeInMicroseconds());
GenerateAndInsertFrame((timestamps_.front().render_time + 500) / 1000);
timestamps_.pop();
frame_injected = true;
if (stop_on_frame)
return frame_injected;
}
if (TimeInMicroseconds() < end_time) {
SimulatedClock::AdvanceTimeMicroseconds(end_time - TimeInMicroseconds());
}
return frame_injected;
}
// Input timestamps are in unit Milliseconds.
// And |arrive_timestamps| must be positive and in increasing order.
// |arrive_timestamps| determine when we are going to insert frames into the
// jitter buffer.
// |render_timestamps| are the timestamps on the frame.
void SetFrames(const int64_t* arrive_timestamps,
const int64_t* render_timestamps,
size_t size) {
int64_t previous_arrive_timestamp = 0;
for (size_t i = 0; i < size; i++) {
RTC_CHECK(arrive_timestamps[i] >= previous_arrive_timestamp);
timestamps_.push(TimestampPair(arrive_timestamps[i] * 1000,
render_timestamps[i] * 1000));
previous_arrive_timestamp = arrive_timestamps[i];
}
}
private:
struct TimestampPair {
TimestampPair(int64_t arrive_timestamp, int64_t render_timestamp)
: arrive_time(arrive_timestamp), render_time(render_timestamp) {}
int64_t arrive_time;
int64_t render_time;
};
void GenerateAndInsertFrame(int64_t render_timestamp_ms) {
VCMPacket packet;
stream_generator_->GenerateFrame(FrameType::kVideoFrameKey,
1, // media packets
0, // empty packets
render_timestamp_ms);
bool packet_available = stream_generator_->PopPacket(&packet, 0);
EXPECT_TRUE(packet_available);
if (!packet_available)
return; // Return here to avoid crashes below.
receiver_->InsertPacket(packet);
}
std::queue<TimestampPair> timestamps_;
StreamGenerator* stream_generator_;
VCMReceiver* receiver_;
};
// Use a SimulatedClockWithFrames
// Wait call will do either of these:
// 1. If |stop_on_frame| is true, the clock will be turned to the exact instant
// that the first frame comes and the frame will be inserted into the jitter
// buffer, or the clock will be turned to now + |max_time| if no frame comes in
// the window.
// 2. If |stop_on_frame| is false, the clock will be turn to now + |max_time|,
// and all the frames arriving between now and now + |max_time| will be
// inserted into the jitter buffer.
//
// This is used to simulate the JitterBuffer getting packets from internet as
// time elapses.
class FrameInjectEvent : public EventWrapper {
public:
FrameInjectEvent(SimulatedClockWithFrames* clock, bool stop_on_frame)
: clock_(clock), stop_on_frame_(stop_on_frame) {}
bool Set() override { return true; }
EventTypeWrapper Wait(unsigned long max_time) override { // NOLINT
if (clock_->AdvanceTimeMilliseconds(max_time, stop_on_frame_) &&
stop_on_frame_) {
return EventTypeWrapper::kEventSignaled;
} else {
return EventTypeWrapper::kEventTimeout;
}
}
private:
SimulatedClockWithFrames* clock_;
bool stop_on_frame_;
};
class VCMReceiverTimingTest : public ::testing::Test {
protected:
VCMReceiverTimingTest()
: clock_(&stream_generator_, &receiver_),
stream_generator_(0, clock_.TimeInMilliseconds()),
timing_(&clock_),
receiver_(
&timing_,
&clock_,
std::unique_ptr<EventWrapper>(new FrameInjectEvent(&clock_, false)),
std::unique_ptr<EventWrapper>(
new FrameInjectEvent(&clock_, true))) {}
virtual void SetUp() { receiver_.Reset(); }
SimulatedClockWithFrames clock_;
StreamGenerator stream_generator_;
VCMTiming timing_;
VCMReceiver receiver_;
};
// Test whether VCMReceiver::FrameForDecoding handles parameter
// |max_wait_time_ms| correctly:
// 1. The function execution should never take more than |max_wait_time_ms|.
// 2. If the function exit before now + |max_wait_time_ms|, a frame must be
// returned.
TEST_F(VCMReceiverTimingTest, FrameForDecoding) {
const size_t kNumFrames = 100;
const int kFramePeriod = 40;
int64_t arrive_timestamps[kNumFrames];
int64_t render_timestamps[kNumFrames];
// Construct test samples.
// render_timestamps are the timestamps stored in the Frame;
// arrive_timestamps controls when the Frame packet got received.
for (size_t i = 0; i < kNumFrames; i++) {
// Preset frame rate to 25Hz.
// But we add a reasonable deviation to arrive_timestamps to mimic Internet
// fluctuation.<|fim▁hole|> }
clock_.SetFrames(arrive_timestamps, render_timestamps, kNumFrames);
// Record how many frames we finally get out of the receiver.
size_t num_frames_return = 0;
const int64_t kMaxWaitTime = 30;
// Ideally, we should get all frames that we input in InitializeFrames.
// In the case that FrameForDecoding kills frames by error, we rely on the
// build bot to kill the test.
while (num_frames_return < kNumFrames) {
int64_t start_time = clock_.TimeInMilliseconds();
VCMEncodedFrame* frame = receiver_.FrameForDecoding(kMaxWaitTime, false);
int64_t end_time = clock_.TimeInMilliseconds();
// In any case the FrameForDecoding should not wait longer than
// max_wait_time.
// In the case that we did not get a frame, it should have been waiting for
// exactly max_wait_time. (By the testing samples we constructed above, we
// are sure there is no timing error, so the only case it returns with NULL
// is that it runs out of time.)
if (frame) {
receiver_.ReleaseFrame(frame);
++num_frames_return;
EXPECT_GE(kMaxWaitTime, end_time - start_time);
} else {
EXPECT_EQ(kMaxWaitTime, end_time - start_time);
}
}
}
// Test whether VCMReceiver::FrameForDecoding handles parameter
// |prefer_late_decoding| and |max_wait_time_ms| correctly:
// 1. The function execution should never take more than |max_wait_time_ms|.
// 2. If the function exit before now + |max_wait_time_ms|, a frame must be
// returned and the end time must be equal to the render timestamp - delay
// for decoding and rendering.
TEST_F(VCMReceiverTimingTest, FrameForDecodingPreferLateDecoding) {
const size_t kNumFrames = 100;
const int kFramePeriod = 40;
int64_t arrive_timestamps[kNumFrames];
int64_t render_timestamps[kNumFrames];
int render_delay_ms;
int max_decode_ms;
int dummy;
timing_.GetTimings(&dummy, &max_decode_ms, &dummy, &dummy, &dummy, &dummy,
&render_delay_ms);
// Construct test samples.
// render_timestamps are the timestamps stored in the Frame;
// arrive_timestamps controls when the Frame packet got received.
for (size_t i = 0; i < kNumFrames; i++) {
// Preset frame rate to 25Hz.
// But we add a reasonable deviation to arrive_timestamps to mimic Internet
// fluctuation.
arrive_timestamps[i] =
(i + 1) * kFramePeriod + (i % 10) * ((i % 2) ? 1 : -1);
render_timestamps[i] = (i + 1) * kFramePeriod;
}
clock_.SetFrames(arrive_timestamps, render_timestamps, kNumFrames);
// Record how many frames we finally get out of the receiver.
size_t num_frames_return = 0;
const int64_t kMaxWaitTime = 30;
bool prefer_late_decoding = true;
while (num_frames_return < kNumFrames) {
int64_t start_time = clock_.TimeInMilliseconds();
VCMEncodedFrame* frame =
receiver_.FrameForDecoding(kMaxWaitTime, prefer_late_decoding);
int64_t end_time = clock_.TimeInMilliseconds();
if (frame) {
EXPECT_EQ(frame->RenderTimeMs() - max_decode_ms - render_delay_ms,
end_time);
receiver_.ReleaseFrame(frame);
++num_frames_return;
} else {
EXPECT_EQ(kMaxWaitTime, end_time - start_time);
}
}
}
} // namespace webrtc<|fim▁end|> | arrive_timestamps[i] =
(i + 1) * kFramePeriod + (i % 10) * ((i % 2) ? 1 : -1);
render_timestamps[i] = (i + 1) * kFramePeriod; |
<|file_name|>model_orchestra.py<|end_file_name|><|fim▁begin|># coding: utf-8
import re
import os
import ast
import luigi
import psycopg2
import boto3
import random
import sqlalchemy
import tempfile
import glob
import datetime
import subprocess
import pandas as pn
from luigi import six
from os.path import join, dirname
from luigi import configuration
from luigi.s3 import S3Target, S3Client
from dotenv import load_dotenv,find_dotenv
from luigi.contrib import postgres
from compranet.pipelines.pipelines.utils.pg_compranet import parse_cfg_string, download_dir
from compranet.pipelines.pipelines.etl.elt_orchestra import CreateSemanticDB
# Variables de ambiente
load_dotenv(find_dotenv())
# Load Postgres Schemas
#temp = open('./common/pg_clean_schemas.txt').read()
#schemas = ast.literal_eval(temp)
# AWS
aws_access_key_id = os.environ.get('AWS_ACCESS_KEY_ID')
aws_secret_access_key = os.environ.get('AWS_SECRET_ACCESS_KEY')
class Model(luigi.Task):
"""
Clase intermedia que activa los scripts de modelado
"""
year_month = luigi.Parameter()
def requires(self):
return CreateSemanticDB(self.year_month)
def run(self):
yield MissingClassifier(self.year_month)
yield CentralityClassifier(self.year_month)
class CentralityClassifier(luigi.Task):
"""
Clase que corre las medidas de centralidad implementadas por
neo4j
"""
year_month = luigi.Parameter()
script = luigi.Parameter('DEFAULT')
type_script = luigi.Parameter()
def run(self):
# First upload data into neo4j
cmd = '''
cycli ./models/neo4j_scripts/upload.neo4j
'''
subprocess.call(cmd, shell=True)
# Run centrality meassures
cmd = '''<|fim▁hole|> return subprocess.call(cmd, shell=True)
class MissingClassifier(luigi.Task):
"""
Clase que corre el índice de clasificación por missing values
"""
year_month = luigi.Parameter()
script = luigi.Parameter('DEFAULT')
def run(self):
cmd = '''
python {}/missing-classifier.py
'''.format(self.script)
return subprocess.call(cmd, shell=True)<|fim▁end|> | cycli ./models/neo4j_scripts/centrality.neo4j
''' |
<|file_name|>helloworld.py<|end_file_name|><|fim▁begin|># helloworld.py
#
# familiar test program, demonstrating py2js conversion
def helloworld(suffix):
print "hello world"+suffix
<|fim▁hole|>helloworld("!")<|fim▁end|> | |
<|file_name|>Compression.java<|end_file_name|><|fim▁begin|>package org.jrenner.fps.utils;
import com.badlogic.gdx.utils.Array;
import org.jrenner.fps.Log;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class Compression {
public static byte[] writeCompressedString(String s) {
GZIPOutputStream gzout = null;
try {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
gzout = new GZIPOutputStream(bout);
gzout.write(s.getBytes());
gzout.flush();
gzout.close();
return bout.toByteArray();<|fim▁hole|> } finally {
if (gzout != null) {
try {
gzout.flush();
gzout.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return null;
}
public static String decompressToString(byte[] bytes) {
GZIPInputStream gzin = null;
try {
gzin = new GZIPInputStream(new ByteArrayInputStream(bytes));
byte[] buf = new byte[8192];
byte[] storage = new byte[65536];
int n = 0;
int total = 0;
while (true) {
n = gzin.read(buf);
if (n == -1) break;
// expand to meet needs
if (total + n >= storage.length) {
byte[] expanded = new byte[storage.length * 2];
System.arraycopy(storage, 0, expanded, 0, storage.length);
storage = expanded;
}
System.out.printf("blen: %d, storlen: %d, total: %d, n: %d\n", buf.length, storage.length, total, n);
System.arraycopy(buf, 0, storage, total, n);
total += n;
}
Log.debug("read " + total + " bytes from compressed files");
byte[] result = new byte[total];
System.arraycopy(storage, 0, result, 0, total);
return new String(result);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (gzin != null) {
try {
gzin.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return null;
}
}<|fim▁end|> | } catch (IOException e) {
e.printStackTrace(); |
<|file_name|>mixins.py<|end_file_name|><|fim▁begin|>"""Form mixins for the ``multilingual_tags`` app."""
from django import forms
from django.forms.utils import ErrorList
from django.contrib.contenttypes.models import ContentType
from django.utils.text import slugify
from django.utils.translation import get_language, ugettext_lazy as _
from .. import models
class TaggingFormMixin(object):
"""Mixin for ModelForms to add multilingual tags to a model."""
tag_field = {
'name': 'tags',
'label': _('Tags'),
'help_text': _('Add tags separated by comma.'),
'required': True,
'max_tags': 0,
}
def __init__(self, *args, **kwargs):
super(TaggingFormMixin, self).__init__(*args, **kwargs)
self._taggeditems = []
self._instance_ctype = None
self.fields[self._get_tag_field_name()] = forms.CharField(
label=self._get_tag_field_label(),
help_text=self._get_tag_field_help_text(),
initial=self._get_tag_field_initial(),
required=self._get_tag_field_required(),
)
self.fields[self._get_tag_field_name()].widget.attrs.update({
'data-class': 'multilingual-tags-field',
'data-max-tags': self._get_tag_field_max_tags()})
setattr(self, 'clean_{0}'.format(self._get_tag_field_name()),
self._get_tag_field_clean())
def add_error(self, fieldname, message):<|fim▁hole|> self._errors[fieldname] = ErrorList()
self._errors[fieldname].append(message)
def _get_tag_field_clean(self):
def clean_field():
self._tags_added = []
self._taggeditems = []
language = get_language()
max_tags = self._get_tag_field_max_tags()
data = self.data.get(self._get_tag_field_name())
if not data:
return []
tag_data = [t.strip() for t in data.split(',')]
self._instance_ctype = ContentType.objects.get_for_model(
self.instance)
for tag_string in tag_data:
if len(tag_string) > 64:
self.add_error(
self._get_tag_field_name(),
_('Tags cannot be longer than 64 characters:'
' "{0}"'.format(tag_string))
)
continue
try:
tag = models.Tag.objects.get(
slug=slugify(tag_string))
except models.Tag.DoesNotExist:
# TODO tags should not be stored directly
tag = models.Tag.objects.create(
slug=slugify(tag_string),
name=tag_string,
language_code=language)
# prevent duplicate tags
if tag not in self._tags_added:
self._tags_added.append(tag)
if self.instance.id:
taggeditem, created = (
models.TaggedItem.objects.get_or_create(
tag=tag,
content_type=self._instance_ctype,
object_id=self.instance.id,
)
)
else:
taggeditem = models.TaggedItem(
tag=tag,
content_type=self._instance_ctype)
self._taggeditems.append(taggeditem)
if max_tags and len(self._tags_added) > max_tags:
self.add_error(
self._get_tag_field_name(),
_('You cannot add more than {0} tags.'.format(
self._get_tag_field_max_tags()
))
)
return self._taggeditems
return clean_field
def _get_tag_field_help_text(self):
return self.tag_field.get('help_text', '')
def _get_tag_field_initial(self):
tag_model_field = getattr(self.instance, self._get_tag_field_name())
return ','.join([ti.tag.name for ti in tag_model_field.all()])
def _get_tag_field_label(self):
return self.tag_field.get('label', 'Tags')
def _get_tag_field_max_tags(self):
return int(self.tag_field.get('max_tags', 0))
def _get_tag_field_name(self):
return self.tag_field.get('name', 'tags')
def _get_tag_field_required(self):
return self.tag_field.get('required', True)
def save(self, commit=True):
instance = super(TaggingFormMixin, self).save(commit)
for item in self._taggeditems:
if hasattr(instance, 'get_user'):
item.user = instance.get_user()
item.object_id = instance.id
item.save()
models.TaggedItem.objects.filter(
content_type=self._instance_ctype,
object_id=instance.id).exclude(
pk__in=[ti.pk for ti in self._taggeditems]).delete()
return instance<|fim▁end|> | if fieldname in self._errors:
self._errors[fieldname].append(message)
else: |
<|file_name|>deleteGlobalLoadBalancerRule.py<|end_file_name|><|fim▁begin|>"""Deletes a global load balancer rule."""
from baseCmd import *
from baseResponse import *
class deleteGlobalLoadBalancerRuleCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "true"
"""the ID of the global load balancer rule"""
"""Required"""
self.id = None
self.typeInfo['id'] = 'uuid'
self.required = ["id", ]
class deleteGlobalLoadBalancerRuleResponse (baseResponse):<|fim▁hole|> self.displaytext = None
self.typeInfo['displaytext'] = 'string'
"""true if operation is executed successfully"""
self.success = None
self.typeInfo['success'] = 'boolean'<|fim▁end|> | typeInfo = {}
def __init__(self):
"""any text associated with the success or failure""" |
<|file_name|>hazc_device.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3
from zeroconf import Zeroconf, ServiceInfo
import socket
import configparser
from . import hazc_cmd
# import pdb
class hazc_device:
#Forward constants
NO_PARAM = hazc_cmd.NO_PARAM
BOOL = hazc_cmd.BOOL
FLOAT = hazc_cmd.FLOAT
STRING = hazc_cmd.STRING
INT = hazc_cmd.INT
global running
running = False
def __init__(self, ipaddr):
self.version = "0.1"
self.config = configparser.ConfigParser()
self.config.read('config.ini')
self.MSGLEN = 1024
self.END_OF_MSG = '*'
self.ip = ipaddr
self.buffer = 20
# self.commands = {'version?':self.version_cmd,'commands?':self.commands_cmd,'status?':self.status_cmd}
hcvc = hazc_cmd.hazc_cmd('version?', self.version_cmd, self.NO_PARAM)
hccc = hazc_cmd.hazc_cmd('commands?', self.commands_cmd, self.NO_PARAM)
hcsc = hazc_cmd.hazc_cmd('status?', self.status_cmd, self.STRING)
self.commands = {'version': hcvc, 'commands': hccc, 'status': hcsc}
# probably want to add a debug log status
self.status = {'exec_status': self.exec_status}
#Adds a function - not as preferred as addControl
#Does NOT auto add status
def addFunction(self, name, handler, paramtype):
# pdb.settrace()
#log("This is not the preferred way to add controls, see addControl")
if not('?' in name or '!' in name):
# log("Function name requires a '?' or '!', assuming '!'")
name += '!'
self.commands[name] = hazc_cmd.hazc_cmd(name, handler, paramtype)
#Adds a control vector
#controlname should just be a name like 'temp' or 'position' - it'll be the same for the status
def addControl(self, controlname, handler, statushandler, paramtype=NO_PARAM):
cmd_name = 'set-'+controlname
self.commands[cmd_name] = hazc_cmd.hazc_cmd(cmd_name+'?', handler, paramtype)
self.addStatus(controlname, statushandler)
#adds a unique status not already included in control vector. name is just the name, as in 'temp'
def addStatus(self, name, handler):
self.status[name] = handler
def advertise(self):
postfix = self.config['global']['service_prefix']
self.port = int(self.config['global']['port'])
#print(self.config['device']['hostname']+postfix)
info = ServiceInfo(postfix, self.config['device']['hostname']+"."+postfix,
socket.inet_aton(self.ip), self.port, 0, 0,
{'info': self.config['device']['description']}, "hazc.local.")
self.bindConnection()
zeroconf = Zeroconf()
zeroconf.register_service(info)
try:
while True:
# try:
print("Ready")
self.conn, self.addr = self.webcontrol.accept()
self.listen()
self.conn.close()
except KeyboardInterrupt:
pass
finally:
print()
print("Unregistering...")
zeroconf.unregister_service(info)
zeroconf.close()
try:
print("Shutting down socket")
self.webcontrol.shutdown(socket.SHUT_RDWR)
except Exception as e:
print(e)
def listen(self):
data = bytes()
rbytes = 0
while rbytes < self.MSGLEN:
d = self.conn.recv(self.buffer)
if not d: break
data += d
rbytes += len(d)
# print data.decode('utf-8')
self.handledata(data)
def handledata(self, data):
command, param = self.cleanandstringdata(data)
print('->' + command + ';' + param)
# replystr = "ERROR"
try:
replystr = self.commands[command].execute(param)
except KeyError:
if(command==''):
command = "(empty string)"
print("ERROR! Unknown command: " + command)
replystr = ""
# replystr = self.commands['version'].execute('')
if(replystr == None):
print("WARNING! " + command + " should return a string to send to the master. Sending 'NO_REPLY'")
replystr = 'NO_REPLY'
print(replystr)
self.reply(replystr)
def reply(self, msg):
longmsg = msg
while len(longmsg) < self.MSGLEN:
longmsg += self.END_OF_MSG
# print(longmsg)
self.conn.send(longmsg.encode('utf-8'))
def cleanandstringdata(self, data):
dstr = data.decode('utf-8')
full = dstr.strip(self.END_OF_MSG)
if '?' in full:
li = full.split('?')
param = li[-1]
cmd = li[0]
elif '!' in full:
li = full.split('!')
param = li[-1]
cmd = li[0]
else:
param = ''
cmd = full
return (cmd, param)
def bindConnection(self):
try:
self.webcontrol = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.webcontrol.bind((self.ip, self.port))
self.webcontrol.listen(1)
except OSError as e:
print(e)
quit()
def exec_status(self):
return "Running"
def version_cmd(self):
return self.version
def paramtype_tostring(self, paramnum):
if paramnum == self.BOOL:
return 'BOOL'
elif paramnum == self.FLOAT:
return 'FLOAT'
elif paramnum == self.STRING:
return 'STRING'
elif paramnum == self.INT:
return 'INT'
else:
return 'PARAM_ERROR'
def commands_cmd(self):
rstr = ""
for key in self.commands:
rstr += key
if self.commands[key].paramtype is not self.NO_PARAM:
# pdb.set_trace()<|fim▁hole|>
def status_cmd(self, specific_status=''):
str = ''
if len(specific_status) > 0:
str = self.status[specific_status]
else:
for st in self.status:
str += st + ',' + self.status[st]() + ';'
return str[:self.MSGLEN-1]
# Some debugging methods
def debug_cmds(self):
print("Commands: " + str(self.commands))
print("Statuses: " + str(self.status))<|fim▁end|> | rstr += ':' + self.paramtype_tostring(self.commands[key].paramtype)
rstr += ";"
return rstr |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.