file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
gclient_scm_test.py | #!/usr/bin/env vpython3
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for gclient_scm.py."""
# pylint: disable=E1103
from __future__ import unicode_literals
from subprocess import Popen, PIPE, STDOUT
import json
import logging
import os
import re
import sys
import tempfile
import unittest
if sys.version_info.major == 2:
from cStringIO import StringIO
else:
from io import StringIO
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from third_party import mock
from testing_support import fake_repos
from testing_support import test_case_utils
import gclient_scm
import gclient_utils
import git_cache
import subprocess2
GIT = 'git' if sys.platform != 'win32' else 'git.bat'
# Disable global git cache
git_cache.Mirror.SetCachePath(None)
# Shortcut since this function is used often
join = gclient_scm.os.path.join
TIMESTAMP_RE = re.compile(r'\[[0-9]{1,2}:[0-9]{2}:[0-9]{2}\] (.*)', re.DOTALL)
def strip_timestamps(value):
lines = value.splitlines(True)
for i in range(len(lines)):
m = TIMESTAMP_RE.match(lines[i])
if m:
lines[i] = m.group(1)
return ''.join(lines)
class BasicTests(unittest.TestCase):
@mock.patch('gclient_scm.scm.GIT.Capture')
def testGetFirstRemoteUrl(self, mockCapture):
REMOTE_STRINGS = [('remote.origin.url E:\\foo\\bar', 'E:\\foo\\bar'),
('remote.origin.url /b/foo/bar', '/b/foo/bar'),
('remote.origin.url https://foo/bar', 'https://foo/bar'),
('remote.origin.url E:\\Fo Bar\\bax', 'E:\\Fo Bar\\bax'),
('remote.origin.url git://what/"do', 'git://what/"do')]
FAKE_PATH = '/fake/path'
mockCapture.side_effect = [question for question, _ in REMOTE_STRINGS]
for _, answer in REMOTE_STRINGS:
self.assertEqual(
gclient_scm.SCMWrapper._get_first_remote_url(FAKE_PATH), answer)
expected_calls = [
mock.call(['config', '--local', '--get-regexp', r'remote.*.url'],
cwd=FAKE_PATH)
for _ in REMOTE_STRINGS
]
self.assertEqual(mockCapture.mock_calls, expected_calls)
class BaseGitWrapperTestCase(unittest.TestCase, test_case_utils.TestCaseUtils):
"""This class doesn't use pymox."""
class OptionsObject(object):
def __init__(self, verbose=False, revision=None):
self.auto_rebase = False
self.verbose = verbose
self.revision = revision
self.deps_os = None
self.force = False
self.reset = False
self.nohooks = False
self.no_history = False
self.upstream = False
self.cache_dir = None
self.merge = False
self.jobs = 1
self.break_repo_locks = False
self.delete_unversioned_trees = False
self.patch_ref = None
self.patch_repo = None
self.rebase_patch_ref = True
self.reset_patch_ref = True
sample_git_import = """blob
mark :1
data 6
Hello
blob
mark :2
data 4
Bye
reset refs/heads/master
commit refs/heads/master
mark :3
author Bob <[email protected]> 1253744361 -0700
committer Bob <[email protected]> 1253744361 -0700
data 8
A and B
M 100644 :1 a
M 100644 :2 b
blob
mark :4
data 10
Hello
You
blob
mark :5
data 8
Bye
You
commit refs/heads/origin
mark :6
author Alice <[email protected]> 1253744424 -0700
committer Alice <[email protected]> 1253744424 -0700
data 13
Personalized
from :3
M 100644 :4 a
M 100644 :5 b
blob
mark :7
data 5
Mooh
commit refs/heads/feature
mark :8
author Bob <[email protected]> 1390311986 -0000
committer Bob <[email protected]> 1390311986 -0000
data 6
Add C
from :3
M 100644 :7 c
reset refs/heads/master
from :3
"""
def Options(self, *args, **kwargs):
return self.OptionsObject(*args, **kwargs)
def checkstdout(self, expected):
value = sys.stdout.getvalue()
sys.stdout.close()
# Check that the expected output appears.
# pylint: disable=no-member
self.assertIn(expected, strip_timestamps(value))
@staticmethod
def CreateGitRepo(git_import, path):
"""Do it for real."""
try:
Popen([GIT, 'init', '-q'], stdout=PIPE, stderr=STDOUT,
cwd=path).communicate()
except OSError:
# git is not available, skip this test.
return False
Popen([GIT, 'fast-import', '--quiet'], stdin=PIPE, stdout=PIPE,
stderr=STDOUT, cwd=path).communicate(input=git_import.encode())
Popen([GIT, 'checkout', '-q'], stdout=PIPE, stderr=STDOUT,
cwd=path).communicate()
Popen([GIT, 'remote', 'add', '-f', 'origin', '.'], stdout=PIPE,
stderr=STDOUT, cwd=path).communicate()
Popen([GIT, 'checkout', '-b', 'new', 'origin/master', '-q'], stdout=PIPE,
stderr=STDOUT, cwd=path).communicate()
Popen([GIT, 'push', 'origin', 'origin/origin:origin/master', '-q'],
stdout=PIPE, stderr=STDOUT, cwd=path).communicate()
Popen([GIT, 'config', '--unset', 'remote.origin.fetch'], stdout=PIPE,
stderr=STDOUT, cwd=path).communicate()
Popen([GIT, 'config', 'user.email', '[email protected]'], stdout=PIPE,
stderr=STDOUT, cwd=path).communicate()
Popen([GIT, 'config', 'user.name', 'Some User'], stdout=PIPE,
stderr=STDOUT, cwd=path).communicate()
return True
def _GetAskForDataCallback(self, expected_prompt, return_value):
def AskForData(prompt, options):
self.assertEqual(prompt, expected_prompt)
return return_value
return AskForData
def setUp(self):
unittest.TestCase.setUp(self)
test_case_utils.TestCaseUtils.setUp(self)
self.url = 'git://foo'
# The .git suffix allows gclient_scm to recognize the dir as a git repo
# when cloning it locally
self.root_dir = tempfile.mkdtemp('.git')
self.relpath = '.'
self.base_path = join(self.root_dir, self.relpath)
self.enabled = self.CreateGitRepo(self.sample_git_import, self.base_path)
self._original_GitBinaryExists = gclient_scm.GitWrapper.BinaryExists
mock.patch('gclient_scm.GitWrapper.BinaryExists',
staticmethod(lambda : True)).start()
mock.patch('sys.stdout', StringIO()).start()
self.addCleanup(mock.patch.stopall)
self.addCleanup(gclient_utils.rmtree, self.root_dir)
class ManagedGitWrapperTestCase(BaseGitWrapperTestCase):
def testRevertMissing(self):
if not self.enabled:
return
options = self.Options()
file_path = join(self.base_path, 'a')
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, None, file_list)
gclient_scm.os.remove(file_path)
file_list = []
scm.revert(options, self.args, file_list)
self.assertEqual(file_list, [file_path])
file_list = []
scm.diff(options, self.args, file_list)
self.assertEqual(file_list, [])
sys.stdout.close()
def testRevertNone(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, None, file_list)
file_list = []
scm.revert(options, self.args, file_list)
self.assertEqual(file_list, [])
self.assertEqual(scm.revinfo(options, self.args, None),
'a7142dc9f0009350b96a11f372b6ea658592aa95')
sys.stdout.close()
def testRevertModified(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, None, file_list)
file_path = join(self.base_path, 'a')
with open(file_path, 'a') as f:
f.writelines('touched\n')
file_list = []
scm.revert(options, self.args, file_list)
self.assertEqual(file_list, [file_path])
file_list = []
scm.diff(options, self.args, file_list)
self.assertEqual(file_list, [])
self.assertEqual(scm.revinfo(options, self.args, None),
'a7142dc9f0009350b96a11f372b6ea658592aa95')
sys.stdout.close()
def testRevertNew(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, None, file_list)
file_path = join(self.base_path, 'c')
with open(file_path, 'w') as f:
f.writelines('new\n')
Popen([GIT, 'add', 'c'], stdout=PIPE,
stderr=STDOUT, cwd=self.base_path).communicate()
file_list = []
scm.revert(options, self.args, file_list)
self.assertEqual(file_list, [file_path])
file_list = []
scm.diff(options, self.args, file_list)
self.assertEqual(file_list, [])
self.assertEqual(scm.revinfo(options, self.args, None),
'a7142dc9f0009350b96a11f372b6ea658592aa95')
sys.stdout.close()
def testStatusNew(self):
if not self.enabled:
return
options = self.Options()
file_path = join(self.base_path, 'a')
with open(file_path, 'a') as f:
f.writelines('touched\n')
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.status(options, self.args, file_list)
self.assertEqual(file_list, [file_path])
self.checkstdout(
('\n________ running \'git -c core.quotePath=false diff --name-status '
'069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\n\nM\ta\n') %
join(self.root_dir, '.'))
def testStatus2New(self):
if not self.enabled:
return
options = self.Options()
expected_file_list = []
for f in ['a', 'b']:
file_path = join(self.base_path, f)
with open(file_path, 'a') as f:
f.writelines('touched\n')
expected_file_list.extend([file_path])
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.status(options, self.args, file_list)
expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
self.assertEqual(sorted(file_list), expected_file_list)
self.checkstdout(
('\n________ running \'git -c core.quotePath=false diff --name-status '
'069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\n\nM\ta\nM\tb\n')
% join(self.root_dir, '.'))
def testUpdateUpdate(self):
if not self.enabled:
return
options = self.Options()
expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'a7142dc9f0009350b96a11f372b6ea658592aa95')
sys.stdout.close()
def testUpdateMerge(self):
if not self.enabled:
return
options = self.Options()
options.merge = True
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
scm._Run(['checkout', '-q', 'feature'], options)
rev = scm.revinfo(options, (), None)
file_list = []
scm.update(options, (), file_list)
self.assertEqual(file_list, [join(self.base_path, x)
for x in ['a', 'b', 'c']])
# The actual commit that is created is unstable, so we verify its tree and
# parents instead.
self.assertEqual(scm._Capture(['rev-parse', 'HEAD:']),
'd2e35c10ac24d6c621e14a1fcadceb533155627d')
parent = 'HEAD^' if sys.platform != 'win32' else 'HEAD^^'
self.assertEqual(scm._Capture(['rev-parse', parent + '1']), rev)
self.assertEqual(scm._Capture(['rev-parse', parent + '2']),
scm._Capture(['rev-parse', 'origin/master']))
sys.stdout.close()
def testUpdateRebase(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
scm._Run(['checkout', '-q', 'feature'], options)
file_list = []
# Fake a 'y' key press.
scm._AskForData = self._GetAskForDataCallback(
'Cannot fast-forward merge, attempt to rebase? '
'(y)es / (q)uit / (s)kip : ', 'y')
scm.update(options, (), file_list)
self.assertEqual(file_list, [join(self.base_path, x)
for x in ['a', 'b', 'c']])
# The actual commit that is created is unstable, so we verify its tree and
# parent instead.
self.assertEqual(scm._Capture(['rev-parse', 'HEAD:']),
'd2e35c10ac24d6c621e14a1fcadceb533155627d')
parent = 'HEAD^' if sys.platform != 'win32' else 'HEAD^^'
self.assertEqual(scm._Capture(['rev-parse', parent + '1']),
scm._Capture(['rev-parse', 'origin/master']))
sys.stdout.close()
def testUpdateReset(self):
if not self.enabled:
return
options = self.Options()
options.reset = True
dir_path = join(self.base_path, 'c')
os.mkdir(dir_path)
with open(join(dir_path, 'nested'), 'w') as f:
f.writelines('new\n')
file_path = join(self.base_path, 'file')
with open(file_path, 'w') as f:
f.writelines('new\n')
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, (), file_list)
self.assert_(gclient_scm.os.path.isdir(dir_path))
self.assert_(gclient_scm.os.path.isfile(file_path))
sys.stdout.close()
def testUpdateResetUnsetsFetchConfig(self):
if not self.enabled:
return
options = self.Options()
options.reset = True
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
scm._Run(['config', 'remote.origin.fetch',
'+refs/heads/bad/ref:refs/remotes/origin/bad/ref'], options)
file_list = []
scm.update(options, (), file_list)
self.assertEqual(scm.revinfo(options, (), None),
'069c602044c5388d2d15c3f875b057c852003458')
sys.stdout.close()
def testUpdateResetDeleteUnversionedTrees(self):
if not self.enabled:
return
options = self.Options()
options.reset = True
options.delete_unversioned_trees = True
dir_path = join(self.base_path, 'dir')
os.mkdir(dir_path)
with open(join(dir_path, 'nested'), 'w') as f:
f.writelines('new\n')
file_path = join(self.base_path, 'file')
with open(file_path, 'w') as f:
f.writelines('new\n')
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
scm.update(options, (), file_list)
self.assert_(not gclient_scm.os.path.isdir(dir_path))
self.assert_(gclient_scm.os.path.isfile(file_path))
sys.stdout.close()
def testUpdateUnstagedConflict(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_path = join(self.base_path, 'b')
with open(file_path, 'w') as f:
f.writelines('conflict\n')
try:
scm.update(options, (), [])
self.fail()
except (gclient_scm.gclient_utils.Error, subprocess2.CalledProcessError):
# The exact exception text varies across git versions so it's not worth
# verifying it. It's fine as long as it throws.
pass
# Manually flush stdout since we can't verify it's content accurately across
# git versions.
sys.stdout.getvalue()
sys.stdout.close()
@unittest.skip('Skipping until crbug.com/670884 is resolved.')
def testUpdateLocked(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_path = join(self.base_path, '.git', 'index.lock')
with open(file_path, 'w'):
pass
with self.assertRaises(subprocess2.CalledProcessError):
scm.update(options, (), [])
sys.stdout.close()
def testUpdateLockedBreak(self):
if not self.enabled:
return
options = self.Options()
options.break_repo_locks = True
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_path = join(self.base_path, '.git', 'index.lock')
with open(file_path, 'w'):
pass
scm.update(options, (), [])
self.assertRegexpMatches(sys.stdout.getvalue(),
r'breaking lock.*\.git[/|\\]index\.lock')
self.assertFalse(os.path.exists(file_path))
sys.stdout.close()
def testUpdateConflict(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_path = join(self.base_path, 'b')
with open(file_path, 'w') as f:
f.writelines('conflict\n')
scm._Run(['commit', '-am', 'test'], options)
scm._AskForData = self._GetAskForDataCallback(
'Cannot fast-forward merge, attempt to rebase? '
'(y)es / (q)uit / (s)kip : ', 'y')
with self.assertRaises(gclient_scm.gclient_utils.Error) as e:
scm.update(options, (), [])
self.assertEqual(
e.exception.args[0],
'Conflict while rebasing this branch.\n'
'Fix the conflict and run gclient again.\n'
'See \'man git-rebase\' for details.\n')
with self.assertRaises(gclient_scm.gclient_utils.Error) as e:
scm.update(options, (), [])
self.assertEqual(
e.exception.args[0],
'\n____ . at refs/remotes/origin/master\n'
'\tYou have unstaged changes.\n'
'\tPlease commit, stash, or reset.\n')
sys.stdout.close()
def testRevinfo(self):
if not self.enabled:
return
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
rev_info = scm.revinfo(options, (), None)
self.assertEqual(rev_info, '069c602044c5388d2d15c3f875b057c852003458')
class ManagedGitWrapperTestCaseMock(unittest.TestCase):
class OptionsObject(object):
def __init__(self, verbose=False, revision=None, force=False):
self.verbose = verbose
self.revision = revision
self.deps_os = None
self.force = force
self.reset = False
self.nohooks = False
self.break_repo_locks = False
# TODO(maruel): Test --jobs > 1.
self.jobs = 1
self.patch_ref = None
self.patch_repo = None
self.rebase_patch_ref = True
def Options(self, *args, **kwargs):
return self.OptionsObject(*args, **kwargs)
def checkstdout(self, expected):
value = sys.stdout.getvalue()
sys.stdout.close()
# Check that the expected output appears.
# pylint: disable=no-member
self.assertIn(expected, strip_timestamps(value))
def setUp(self):
self.fake_hash_1 = 't0ta11yf4k3'
self.fake_hash_2 = '3v3nf4k3r'
self.url = 'git://foo'
self.root_dir = '/tmp' if sys.platform != 'win32' else 't:\\tmp'
self.relpath = 'fake'
self.base_path = os.path.join(self.root_dir, self.relpath)
self.backup_base_path = os.path.join(self.root_dir,
'old_%s.git' % self.relpath)
mock.patch('gclient_scm.scm.GIT.ApplyEnvVars').start()
mock.patch('gclient_scm.GitWrapper._CheckMinVersion').start()
mock.patch('gclient_scm.GitWrapper._Fetch').start()
mock.patch('gclient_scm.GitWrapper._DeleteOrMove').start()
mock.patch('sys.stdout', StringIO()).start()
self.addCleanup(mock.patch.stopall)
@mock.patch('scm.GIT.IsValidRevision')
@mock.patch('os.path.isdir', lambda _: True)
def testGetUsableRevGit(self, mockIsValidRevision):
# pylint: disable=no-member
options = self.Options(verbose=True)
mockIsValidRevision.side_effect = lambda cwd, rev: rev != '1'
git_scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
# A [fake] git sha1 with a git repo should work (this is in the case that
# the LKGR gets flipped to git sha1's some day).
self.assertEqual(git_scm.GetUsableRev(self.fake_hash_1, options),
self.fake_hash_1)
# An SVN rev with an existing purely git repo should raise an exception.
self.assertRaises(gclient_scm.gclient_utils.Error,
git_scm.GetUsableRev, '1', options)
@mock.patch('gclient_scm.GitWrapper._Clone')
@mock.patch('os.path.isdir')
@mock.patch('os.path.exists')
@mock.patch('subprocess2.check_output')
def testUpdateNoDotGit(
self, mockCheckOutput, mockExists, mockIsdir, mockClone):
mockIsdir.side_effect = lambda path: path == self.base_path
mockExists.side_effect = lambda path: path == self.base_path
mockCheckOutput.return_value = b''
options = self.Options()
scm = gclient_scm.GitWrapper(
self.url, self.root_dir, self.relpath)
scm.update(options, None, [])
env = gclient_scm.scm.GIT.ApplyEnvVars({})
self.assertEqual(
mockCheckOutput.mock_calls,
[
mock.call(
['git', '-c', 'core.quotePath=false', 'ls-files'],
cwd=self.base_path, env=env, stderr=-1),
mock.call(
['git', 'rev-parse', '--verify', 'HEAD'],
cwd=self.base_path, env=env, stderr=-1),
])
mockClone.assert_called_with(
'refs/remotes/origin/master', self.url, options)
self.checkstdout('\n')
@mock.patch('gclient_scm.GitWrapper._Clone')
@mock.patch('os.path.isdir')
@mock.patch('os.path.exists')
@mock.patch('subprocess2.check_output')
def testUpdateConflict(
self, mockCheckOutput, mockExists, mockIsdir, mockClone):
mockIsdir.side_effect = lambda path: path == self.base_path
mockExists.side_effect = lambda path: path == self.base_path
mockCheckOutput.return_value = b''
mockClone.side_effect = [
gclient_scm.subprocess2.CalledProcessError(
None, None, None, None, None),
None,
]
options = self.Options()
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
scm.update(options, None, [])
env = gclient_scm.scm.GIT.ApplyEnvVars({})
self.assertEqual(
mockCheckOutput.mock_calls,
[
mock.call(
['git', '-c', 'core.quotePath=false', 'ls-files'],
cwd=self.base_path, env=env, stderr=-1),
mock.call(
['git', 'rev-parse', '--verify', 'HEAD'],
cwd=self.base_path, env=env, stderr=-1),
])
mockClone.assert_called_with(
'refs/remotes/origin/master', self.url, options)
self.checkstdout('\n')
class UnmanagedGitWrapperTestCase(BaseGitWrapperTestCase):
def checkInStdout(self, expected):
value = sys.stdout.getvalue()
sys.stdout.close()
# pylint: disable=no-member
self.assertIn(expected, value)
def checkNotInStdout(self, expected):
value = sys.stdout.getvalue()
sys.stdout.close()
# pylint: disable=no-member
self.assertNotIn(expected, value)
def getCurrentBranch(self):
# Returns name of current branch or HEAD for detached HEAD
branch = gclient_scm.scm.GIT.Capture(['rev-parse', '--abbrev-ref', 'HEAD'],
cwd=self.base_path)
if branch == 'HEAD':
return None
return branch
def testUpdateClone(self):
if not self.enabled:
return
options = self.Options()
origin_root_dir = self.root_dir
self.addCleanup(gclient_utils.rmtree, origin_root_dir)
self.root_dir = tempfile.mkdtemp()
self.relpath = '.'
self.base_path = join(self.root_dir, self.relpath)
scm = gclient_scm.GitWrapper(origin_root_dir,
self.root_dir,
self.relpath)
expected_file_list = [join(self.base_path, "a"),
join(self.base_path, "b")]
file_list = []
options.revision = 'unmanaged'
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'069c602044c5388d2d15c3f875b057c852003458')
# indicates detached HEAD
self.assertEqual(self.getCurrentBranch(), None)
self.checkInStdout(
'Checked out refs/remotes/origin/master to a detached HEAD')
def testUpdateCloneOnCommit(self):
if not self.enabled:
return
options = self.Options()
origin_root_dir = self.root_dir
self.addCleanup(gclient_utils.rmtree, origin_root_dir)
self.root_dir = tempfile.mkdtemp()
self.relpath = '.'
self.base_path = join(self.root_dir, self.relpath)
url_with_commit_ref = origin_root_dir +\
'@a7142dc9f0009350b96a11f372b6ea658592aa95'
scm = gclient_scm.GitWrapper(url_with_commit_ref,
self.root_dir,
self.relpath)
expected_file_list = [join(self.base_path, "a"),
join(self.base_path, "b")]
file_list = []
options.revision = 'unmanaged'
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'a7142dc9f0009350b96a11f372b6ea658592aa95')
# indicates detached HEAD
self.assertEqual(self.getCurrentBranch(), None)
self.checkInStdout(
'Checked out a7142dc9f0009350b96a11f372b6ea658592aa95 to a detached HEAD')
def testUpdateCloneOnBranch(self):
if not self.enabled:
return
options = self.Options()
origin_root_dir = self.root_dir
self.addCleanup(gclient_utils.rmtree, origin_root_dir)
self.root_dir = tempfile.mkdtemp()
self.relpath = '.'
self.base_path = join(self.root_dir, self.relpath)
url_with_branch_ref = origin_root_dir + '@feature'
scm = gclient_scm.GitWrapper(url_with_branch_ref,
self.root_dir,
self.relpath)
expected_file_list = [join(self.base_path, "a"),
join(self.base_path, "b"),
join(self.base_path, "c")]
file_list = []
options.revision = 'unmanaged'
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'9a51244740b25fa2ded5252ca00a3178d3f665a9')
# indicates detached HEAD
self.assertEqual(self.getCurrentBranch(), None)
self.checkInStdout(
'Checked out 9a51244740b25fa2ded5252ca00a3178d3f665a9 '
'to a detached HEAD')
def testUpdateCloneOnFetchedRemoteBranch(self):
if not self.enabled:
return
options = self.Options()
origin_root_dir = self.root_dir
self.addCleanup(gclient_utils.rmtree, origin_root_dir)
self.root_dir = tempfile.mkdtemp()
self.relpath = '.'
self.base_path = join(self.root_dir, self.relpath)
url_with_branch_ref = origin_root_dir + '@refs/remotes/origin/feature'
scm = gclient_scm.GitWrapper(url_with_branch_ref,
self.root_dir,
self.relpath)
expected_file_list = [join(self.base_path, "a"),
join(self.base_path, "b"),
join(self.base_path, "c")]
file_list = []
options.revision = 'unmanaged'
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'9a51244740b25fa2ded5252ca00a3178d3f665a9')
# indicates detached HEAD
self.assertEqual(self.getCurrentBranch(), None)
self.checkInStdout(
'Checked out refs/remotes/origin/feature to a detached HEAD')
def testUpdateCloneOnTrueRemoteBranch(self):
if not self.enabled:
return
options = self.Options()
origin_root_dir = self.root_dir
self.addCleanup(gclient_utils.rmtree, origin_root_dir)
self.root_dir = tempfile.mkdtemp()
self.relpath = '.'
self.base_path = join(self.root_dir, self.relpath)
url_with_branch_ref = origin_root_dir + '@refs/heads/feature'
scm = gclient_scm.GitWrapper(url_with_branch_ref,
self.root_dir,
self.relpath)
expected_file_list = [join(self.base_path, "a"),
join(self.base_path, "b"),
join(self.base_path, "c")]
file_list = []
options.revision = 'unmanaged'
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'9a51244740b25fa2ded5252ca00a3178d3f665a9')
# @refs/heads/feature is AKA @refs/remotes/origin/feature in the clone, so
# should be treated as such by gclient.
# TODO(mmoss): Though really, we should only allow DEPS to specify branches
# as they are known in the upstream repo, since the mapping into the local
# repo can be modified by users (or we might even want to change the gclient
# defaults at some point). But that will take more work to stop using
# refs/remotes/ everywhere that we do (and to stop assuming a DEPS ref will
# always resolve locally, like when passing them to show-ref or rev-list).
self.assertEqual(self.getCurrentBranch(), None)
self.checkInStdout(
'Checked out refs/remotes/origin/feature to a detached HEAD')
def testUpdateUpdate(self):
if not self.enabled:
return
options = self.Options()
expected_file_list = []
scm = gclient_scm.GitWrapper(self.url, self.root_dir,
self.relpath)
file_list = []
options.revision = 'unmanaged'
scm.update(options, (), file_list)
self.assertEqual(file_list, expected_file_list)
self.assertEqual(scm.revinfo(options, (), None),
'069c602044c5388d2d15c3f875b057c852003458')
self.checkstdout('________ unmanaged solution; skipping .\n')
class | (unittest.TestCase):
def setUp(self):
# Create this before setting up mocks.
self._cipd_root_dir = tempfile.mkdtemp()
self._workdir = tempfile.mkdtemp()
self._cipd_instance_url = 'https://chrome-infra-packages.appspot.com'
self._cipd_root = gclient_scm.CipdRoot(
self._cipd_root_dir,
self._cipd_instance_url)
self._cipd_packages = [
self._cipd_root.add_package('f', 'foo_package', 'foo_version'),
self._cipd_root.add_package('b', 'bar_package', 'bar_version'),
self._cipd_root.add_package('b', 'baz_package', 'baz_version'),
]
mock.patch('tempfile.mkdtemp', lambda: self._workdir).start()
mock.patch('gclient_scm.CipdRoot.add_package').start()
mock.patch('gclient_scm.CipdRoot.clobber').start()
mock.patch('gclient_scm.CipdRoot.ensure').start()
self.addCleanup(mock.patch.stopall)
self.addCleanup(gclient_utils.rmtree, self._cipd_root_dir)
self.addCleanup(gclient_utils.rmtree, self._workdir)
def createScmWithPackageThatSatisfies(self, condition):
return gclient_scm.CipdWrapper(
url=self._cipd_instance_url,
root_dir=self._cipd_root_dir,
relpath='fake_relpath',
root=self._cipd_root,
package=self.getPackageThatSatisfies(condition))
def getPackageThatSatisfies(self, condition):
for p in self._cipd_packages:
if condition(p):
return p
self.fail('Unable to find a satisfactory package.')
def testRevert(self):
"""Checks that revert does nothing."""
scm = self.createScmWithPackageThatSatisfies(lambda _: True)
scm.revert(None, (), [])
@mock.patch('gclient_scm.gclient_utils.CheckCallAndFilter')
@mock.patch('gclient_scm.gclient_utils.rmtree')
def testRevinfo(self, mockRmtree, mockCheckCallAndFilter):
"""Checks that revinfo uses the JSON from cipd describe."""
scm = self.createScmWithPackageThatSatisfies(lambda _: True)
expected_revinfo = '0123456789abcdef0123456789abcdef01234567'
json_contents = {
'result': {
'pin': {
'instance_id': expected_revinfo,
}
}
}
describe_json_path = join(self._workdir, 'describe.json')
with open(describe_json_path, 'w') as describe_json:
json.dump(json_contents, describe_json)
revinfo = scm.revinfo(None, (), [])
self.assertEqual(revinfo, expected_revinfo)
mockRmtree.assert_called_with(self._workdir)
mockCheckCallAndFilter.assert_called_with([
'cipd', 'describe', 'foo_package',
'-log-level', 'error',
'-version', 'foo_version',
'-json-output', describe_json_path,
])
def testUpdate(self):
"""Checks that update does nothing."""
scm = self.createScmWithPackageThatSatisfies(lambda _: True)
scm.update(None, (), [])
class GerritChangesFakeRepo(fake_repos.FakeReposBase):
def populateGit(self):
# Creates a tree that looks like this:
#
# 6 refs/changes/35/1235/1
# |
# 5 refs/changes/34/1234/1
# |
# 1--2--3--4 refs/heads/master
# | |
# | 11(5)--12 refs/heads/master-with-5
# |
# 7--8--9 refs/heads/feature
# |
# 10 refs/changes/36/1236/1
#
self._commit_git('repo_1', {'commit 1': 'touched'})
self._commit_git('repo_1', {'commit 2': 'touched'})
self._commit_git('repo_1', {'commit 3': 'touched'})
self._commit_git('repo_1', {'commit 4': 'touched'})
self._create_ref('repo_1', 'refs/heads/master', 4)
# Create a change on top of commit 3 that consists of two commits.
self._commit_git('repo_1',
{'commit 5': 'touched',
'change': '1234'},
base=3)
self._create_ref('repo_1', 'refs/changes/34/1234/1', 5)
self._commit_git('repo_1',
{'commit 6': 'touched',
'change': '1235'})
self._create_ref('repo_1', 'refs/changes/35/1235/1', 6)
# Create a refs/heads/feature branch on top of commit 2, consisting of three
# commits.
self._commit_git('repo_1', {'commit 7': 'touched'}, base=2)
self._commit_git('repo_1', {'commit 8': 'touched'})
self._commit_git('repo_1', {'commit 9': 'touched'})
self._create_ref('repo_1', 'refs/heads/feature', 9)
# Create a change of top of commit 8.
self._commit_git('repo_1',
{'commit 10': 'touched',
'change': '1236'},
base=8)
self._create_ref('repo_1', 'refs/changes/36/1236/1', 10)
# Create a refs/heads/master-with-5 on top of commit 3 which is a branch
# where refs/changes/34/1234/1 (commit 5) has already landed as commit 11.
self._commit_git('repo_1',
# This is really commit 11, but has the changes of commit 5
{'commit 5': 'touched',
'change': '1234'},
base=3)
self._commit_git('repo_1', {'commit 12': 'touched'})
self._create_ref('repo_1', 'refs/heads/master-with-5', 12)
class GerritChangesTest(fake_repos.FakeReposTestBase):
FAKE_REPOS_CLASS = GerritChangesFakeRepo
def setUp(self):
super(GerritChangesTest, self).setUp()
self.enabled = self.FAKE_REPOS.set_up_git()
self.options = BaseGitWrapperTestCase.OptionsObject()
self.url = self.git_base + 'repo_1'
self.mirror = None
mock.patch('sys.stdout').start()
self.addCleanup(mock.patch.stopall)
def setUpMirror(self):
self.mirror = tempfile.mkdtemp()
git_cache.Mirror.SetCachePath(self.mirror)
self.addCleanup(gclient_utils.rmtree, self.mirror)
self.addCleanup(git_cache.Mirror.SetCachePath, None)
def assertCommits(self, commits):
"""Check that all, and only |commits| are present in the current checkout.
"""
for i in commits:
name = os.path.join(self.root_dir, 'commit ' + str(i))
self.assertTrue(os.path.exists(name), 'Commit not found: %s' % name)
all_commits = set(range(1, len(self.FAKE_REPOS.git_hashes['repo_1'])))
for i in all_commits - set(commits):
name = os.path.join(self.root_dir, 'commit ' + str(i))
self.assertFalse(os.path.exists(name), 'Unexpected commit: %s' % name)
def testCanCloneGerritChange(self):
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.revision = 'refs/changes/35/1235/1'
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 6), self.gitrevparse(self.root_dir))
def testCanSyncToGerritChange(self):
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.revision = self.githash('repo_1', 1)
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
self.options.revision = 'refs/changes/35/1235/1'
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 6), self.gitrevparse(self.root_dir))
def testCanCloneGerritChangeMirror(self):
self.setUpMirror()
self.testCanCloneGerritChange()
def testCanSyncToGerritChangeMirror(self):
self.setUpMirror()
self.testCanSyncToGerritChange()
def testMirrorPushUrl(self):
self.setUpMirror()
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.assertIsNotNone(scm._GetMirror(self.url, self.options))
scm.update(self.options, None, file_list)
fetch_url = scm._Capture(['remote', 'get-url', 'origin'])
self.assertTrue(
fetch_url.startswith(self.mirror),
msg='\n'.join([
'Repository fetch url should be in the git cache mirror directory.',
' fetch_url: %s' % fetch_url,
' mirror: %s' % self.mirror]))
push_url = scm._Capture(['remote', 'get-url', '--push', 'origin'])
self.assertEqual(push_url, self.url)
def testAppliesPatchOnTopOfMasterByDefault(self):
"""Test the default case, where we apply a patch on top of master."""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
# Make sure we don't specify a revision.
self.options.revision = None
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 2, 3, 4, 5, 6])
self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
def testCheckoutOlderThanPatchBase(self):
"""Test applying a patch on an old checkout.
We first checkout commit 1, and try to patch refs/changes/35/1235/1, which
contains commits 5 and 6, and is based on top of commit 3.
The final result should contain commits 1, 5 and 6, but not commits 2 or 3.
"""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
# Sync to commit 1
self.options.revision = self.githash('repo_1', 1)
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
# Apply the change on top of that.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 5, 6])
self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
def testCheckoutOriginFeature(self):
"""Tests that we can apply a patch on a branch other than master."""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
# Sync to remote's refs/heads/feature
self.options.revision = 'refs/heads/feature'
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
# Apply the change on top of that.
scm.apply_patch_ref(
self.url, 'refs/changes/36/1236/1', 'refs/heads/feature', self.options,
file_list)
self.assertCommits([1, 2, 7, 8, 9, 10])
self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
def testCheckoutOriginFeatureOnOldRevision(self):
"""Tests that we can apply a patch on an old checkout, on a branch other
than master."""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
# Sync to remote's refs/heads/feature on an old revision
self.options.revision = self.githash('repo_1', 7)
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 7), self.gitrevparse(self.root_dir))
# Apply the change on top of that.
scm.apply_patch_ref(
self.url, 'refs/changes/36/1236/1', 'refs/heads/feature', self.options,
file_list)
# We shouldn't have rebased on top of 2 (which is the merge base between
# remote's master branch and the change) but on top of 7 (which is the
# merge base between remote's feature branch and the change).
self.assertCommits([1, 2, 7, 10])
self.assertEqual(self.githash('repo_1', 7), self.gitrevparse(self.root_dir))
def testCheckoutOriginFeaturePatchBranch(self):
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
# Sync to the hash instead of remote's refs/heads/feature.
self.options.revision = self.githash('repo_1', 9)
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
# Apply refs/changes/34/1234/1, created for remote's master branch on top of
# remote's feature branch.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
# Commits 5 and 6 are part of the patch, and commits 1, 2, 7, 8 and 9 are
# part of remote's feature branch.
self.assertCommits([1, 2, 5, 6, 7, 8, 9])
self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
def testDoesntRebasePatchMaster(self):
"""Tests that we can apply a patch without rebasing it.
"""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.rebase_patch_ref = False
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
# Apply the change on top of that.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 2, 3, 5, 6])
self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
def testDoesntRebasePatchOldCheckout(self):
"""Tests that we can apply a patch without rebasing it on an old checkout.
"""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
# Sync to commit 1
self.options.revision = self.githash('repo_1', 1)
self.options.rebase_patch_ref = False
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
# Apply the change on top of that.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 2, 3, 5, 6])
self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
def testDoesntSoftResetIfNotAskedTo(self):
"""Test that we can apply a patch without doing a soft reset."""
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.reset_patch_ref = False
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 2, 3, 4, 5, 6])
# The commit hash after cherry-picking is not known, but it must be
# different from what the repo was synced at before patching.
self.assertNotEqual(self.githash('repo_1', 4),
self.gitrevparse(self.root_dir))
def testRecoversAfterPatchFailure(self):
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.revision = 'refs/changes/34/1234/1'
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
# Checkout 'refs/changes/34/1234/1' modifies the 'change' file, so trying to
# patch 'refs/changes/36/1236/1' creates a patch failure.
with self.assertRaises(subprocess2.CalledProcessError) as cm:
scm.apply_patch_ref(
self.url, 'refs/changes/36/1236/1', 'refs/heads/master', self.options,
file_list)
self.assertEqual(cm.exception.cmd[:2], ['git', 'cherry-pick'])
self.assertIn(b'error: could not apply', cm.exception.stderr)
# Try to apply 'refs/changes/35/1235/1', which doesn't have a merge
# conflict.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 2, 3, 5, 6])
self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
def testIgnoresAlreadyMergedCommits(self):
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.revision = 'refs/heads/master-with-5'
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 12),
self.gitrevparse(self.root_dir))
# When we try 'refs/changes/35/1235/1' on top of 'refs/heads/feature',
# 'refs/changes/34/1234/1' will be an empty commit, since the changes were
# already present in the tree as commit 11.
# Make sure we deal with this gracefully.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/feature', self.options,
file_list)
self.assertCommits([1, 2, 3, 5, 6, 12])
self.assertEqual(self.githash('repo_1', 12),
self.gitrevparse(self.root_dir))
def testRecoversFromExistingCherryPick(self):
scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
file_list = []
self.options.revision = 'refs/changes/34/1234/1'
scm.update(self.options, None, file_list)
self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
# Checkout 'refs/changes/34/1234/1' modifies the 'change' file, so trying to
# cherry-pick 'refs/changes/36/1236/1' raises an error.
scm._Run(['fetch', 'origin', 'refs/changes/36/1236/1'], self.options)
with self.assertRaises(subprocess2.CalledProcessError) as cm:
scm._Run(['cherry-pick', 'FETCH_HEAD'], self.options)
self.assertEqual(cm.exception.cmd[:2], ['git', 'cherry-pick'])
# Try to apply 'refs/changes/35/1235/1', which doesn't have a merge
# conflict.
scm.apply_patch_ref(
self.url, 'refs/changes/35/1235/1', 'refs/heads/master', self.options,
file_list)
self.assertCommits([1, 2, 3, 5, 6])
self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
if __name__ == '__main__':
level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
logging.basicConfig(
level=level,
format='%(asctime).19s %(levelname)s %(filename)s:'
'%(lineno)s %(message)s')
unittest.main()
# vim: ts=2:sw=2:tw=80:et:
| CipdWrapperTestCase |
api_converter.go | // Copyright (c) 2019 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package handler
import (
"reflect"
"github.com/uber/peloton/.gen/peloton/api/v0/job"
"github.com/uber/peloton/.gen/peloton/api/v0/peloton"
pelotonv0query "github.com/uber/peloton/.gen/peloton/api/v0/query"
pelotonv0respool "github.com/uber/peloton/.gen/peloton/api/v0/respool"
"github.com/uber/peloton/.gen/peloton/api/v0/task"
"github.com/uber/peloton/.gen/peloton/api/v0/update"
"github.com/uber/peloton/.gen/peloton/api/v1alpha/job/stateless"
v1alphapeloton "github.com/uber/peloton/.gen/peloton/api/v1alpha/peloton"
"github.com/uber/peloton/.gen/peloton/api/v1alpha/pod"
"github.com/uber/peloton/.gen/peloton/api/v1alpha/query"
"github.com/uber/peloton/.gen/peloton/private/models"
"github.com/uber/peloton/pkg/common/util"
versionutil "github.com/uber/peloton/pkg/common/util/entityversion"
"go.uber.org/yarpc/yarpcerrors"
)
// ConvertTaskStateToPodState converts v0 task.TaskState to v1alpha pod.PodState
func ConvertTaskStateToPodState(state task.TaskState) pod.PodState {
switch state {
case task.TaskState_UNKNOWN:
return pod.PodState_POD_STATE_INVALID
case task.TaskState_INITIALIZED:
return pod.PodState_POD_STATE_INITIALIZED
case task.TaskState_PENDING:
return pod.PodState_POD_STATE_PENDING
case task.TaskState_READY:
return pod.PodState_POD_STATE_READY
case task.TaskState_PLACING:
return pod.PodState_POD_STATE_PLACING
case task.TaskState_PLACED:
return pod.PodState_POD_STATE_PLACED
case task.TaskState_LAUNCHING:
return pod.PodState_POD_STATE_LAUNCHING
case task.TaskState_LAUNCHED:
return pod.PodState_POD_STATE_LAUNCHED
case task.TaskState_STARTING:
return pod.PodState_POD_STATE_STARTING
case task.TaskState_RUNNING:
return pod.PodState_POD_STATE_RUNNING
case task.TaskState_SUCCEEDED:
return pod.PodState_POD_STATE_SUCCEEDED
case task.TaskState_FAILED:
return pod.PodState_POD_STATE_FAILED
case task.TaskState_LOST:
return pod.PodState_POD_STATE_LOST
case task.TaskState_PREEMPTING:
return pod.PodState_POD_STATE_PREEMPTING
case task.TaskState_KILLING:
return pod.PodState_POD_STATE_KILLING
case task.TaskState_KILLED:
return pod.PodState_POD_STATE_KILLED
case task.TaskState_DELETED:
return pod.PodState_POD_STATE_DELETED
case task.TaskState_RESERVED:
return pod.PodState_POD_STATE_RESERVED
}
return pod.PodState_POD_STATE_INVALID
}
// ConvertPodStateToTaskState converts v0 task.TaskState to v1alpha pod.PodState
func ConvertPodStateToTaskState(state pod.PodState) task.TaskState {
switch state {
case pod.PodState_POD_STATE_INVALID:
return task.TaskState_UNKNOWN
case pod.PodState_POD_STATE_INITIALIZED:
return task.TaskState_INITIALIZED
case pod.PodState_POD_STATE_PENDING:
return task.TaskState_PENDING
case pod.PodState_POD_STATE_READY:
return task.TaskState_READY
case pod.PodState_POD_STATE_PLACING:
return task.TaskState_PLACING
case pod.PodState_POD_STATE_PLACED:
return task.TaskState_PLACED
case pod.PodState_POD_STATE_LAUNCHING:
return task.TaskState_LAUNCHING
case pod.PodState_POD_STATE_LAUNCHED:
return task.TaskState_LAUNCHED
case pod.PodState_POD_STATE_STARTING:
return task.TaskState_STARTING
case pod.PodState_POD_STATE_RUNNING:
return task.TaskState_RUNNING
case pod.PodState_POD_STATE_SUCCEEDED:
return task.TaskState_SUCCEEDED
case pod.PodState_POD_STATE_FAILED:
return task.TaskState_FAILED
case pod.PodState_POD_STATE_LOST:
return task.TaskState_LOST
case pod.PodState_POD_STATE_PREEMPTING:
return task.TaskState_PREEMPTING
case pod.PodState_POD_STATE_KILLING:
return task.TaskState_KILLING
case pod.PodState_POD_STATE_KILLED:
return task.TaskState_KILLED
case pod.PodState_POD_STATE_DELETED:
return task.TaskState_DELETED
case pod.PodState_POD_STATE_RESERVED:
return task.TaskState_RESERVED
}
return task.TaskState_UNKNOWN
}
// ConvertV1InstanceRangeToV0InstanceRange converts from array of
// v1 pod.InstanceIDRange to array of v0 task.InstanceRange
func ConvertV1InstanceRangeToV0InstanceRange(
instanceRange []*pod.InstanceIDRange) []*task.InstanceRange {
var resp []*task.InstanceRange
for _, inst := range instanceRange {
r := &task.InstanceRange{
From: inst.GetFrom(),
To: inst.GetTo(),
}
resp = append(resp, r)
}
return resp
}
// ConvertTaskRuntimeToPodStatus converts
// v0 task.RuntimeInfo to v1alpha pod.PodStatus
func ConvertTaskRuntimeToPodStatus(runtime *task.RuntimeInfo) *pod.PodStatus {
return &pod.PodStatus{
State: ConvertTaskStateToPodState(runtime.GetState()),
PodId: &v1alphapeloton.PodID{Value: runtime.GetMesosTaskId().GetValue()},
StartTime: runtime.GetStartTime(),
CompletionTime: runtime.GetCompletionTime(),
Host: runtime.GetHost(),
ContainersStatus: []*pod.ContainerStatus{
{
Ports: runtime.GetPorts(),
Healthy: &pod.HealthStatus{
State: pod.HealthState(runtime.GetHealthy()),
},
StartTime: runtime.GetStartTime(),
CompletionTime: runtime.GetCompletionTime(),
Message: runtime.GetMessage(),
Reason: runtime.GetReason(),
TerminationStatus: convertTaskTerminationStatusToPodTerminationStatus(
runtime.TerminationStatus),
},
},
DesiredState: ConvertTaskStateToPodState(runtime.GetGoalState()),
Message: runtime.GetMessage(),
Reason: runtime.GetReason(),
FailureCount: runtime.GetFailureCount(),
VolumeId: &v1alphapeloton.VolumeID{Value: runtime.GetVolumeID().GetValue()},
Version: versionutil.GetPodEntityVersion(runtime.GetConfigVersion()),
DesiredVersion: versionutil.GetPodEntityVersion(runtime.GetDesiredConfigVersion()),
AgentId: runtime.GetAgentID(),
Revision: &v1alphapeloton.Revision{
Version: runtime.GetRevision().GetVersion(),
CreatedAt: runtime.GetRevision().GetCreatedAt(),
UpdatedAt: runtime.GetRevision().GetUpdatedAt(),
UpdatedBy: runtime.GetRevision().GetUpdatedBy(),
},
PrevPodId: &v1alphapeloton.PodID{Value: runtime.GetPrevMesosTaskId().GetValue()},
ResourceUsage: runtime.GetResourceUsage(),
DesiredPodId: &v1alphapeloton.PodID{Value: runtime.GetDesiredMesosTaskId().GetValue()},
DesiredHost: runtime.GetDesiredHost(),
}
}
// ConvertTaskConfigToPodSpec converts v0 task.TaskConfig to v1alpha pod.PodSpec
func ConvertTaskConfigToPodSpec(taskConfig *task.TaskConfig, jobID string, instanceID uint32) *pod.PodSpec {
result := &pod.PodSpec{
Controller: taskConfig.GetController(),
KillGracePeriodSeconds: taskConfig.GetKillGracePeriodSeconds(),
Revocable: taskConfig.GetRevocable(),
}
if len(jobID) != 0 {
result.PodName = &v1alphapeloton.PodName{
Value: util.CreatePelotonTaskID(jobID, instanceID),
}
}
if taskConfig.GetConstraint() != nil {
result.Constraint = ConvertTaskConstraintsToPodConstraints([]*task.Constraint{taskConfig.GetConstraint()})[0]
}
if taskConfig.GetVolume() != nil {
result.Volume = &pod.PersistentVolumeSpec{
ContainerPath: taskConfig.GetVolume().GetContainerPath(),
SizeMb: taskConfig.GetVolume().GetSizeMB(),
}
}
if taskConfig.GetLabels() != nil {
result.Labels = ConvertLabels(taskConfig.GetLabels())
}
if taskConfig.GetPreemptionPolicy() != nil {
result.PreemptionPolicy = &pod.PreemptionPolicy{
KillOnPreempt: taskConfig.GetPreemptionPolicy().GetKillOnPreempt(),
}
}
if taskConfig.GetRestartPolicy() != nil {
result.RestartPolicy = &pod.RestartPolicy{
MaxFailures: taskConfig.GetRestartPolicy().GetMaxFailures(),
}
}
container := &pod.ContainerSpec{}
if len(taskConfig.GetName()) != 0 {
container.Name = taskConfig.GetName()
}
if taskConfig.GetResource() != nil {
container.Resource = &pod.ResourceSpec{
CpuLimit: taskConfig.GetResource().GetCpuLimit(),
MemLimitMb: taskConfig.GetResource().GetMemLimitMb(),
DiskLimitMb: taskConfig.GetResource().GetDiskLimitMb(),
FdLimit: taskConfig.GetResource().GetFdLimit(),
GpuLimit: taskConfig.GetResource().GetGpuLimit(),
}
}
if taskConfig.GetContainer() != nil {
container.Container = taskConfig.GetContainer()
}
if taskConfig.GetCommand() != nil {
container.Command = taskConfig.GetCommand()
}
if taskConfig.GetExecutor() != nil {
container.Executor = taskConfig.GetExecutor()
}
if taskConfig.GetPorts() != nil {
container.Ports = ConvertPortConfigsToPortSpecs(taskConfig.GetPorts())
}
if taskConfig.GetHealthCheck() != nil {
container.LivenessCheck = &pod.HealthCheckSpec{
Enabled: taskConfig.GetHealthCheck().GetEnabled(),
InitialIntervalSecs: taskConfig.GetHealthCheck().GetInitialIntervalSecs(),
IntervalSecs: taskConfig.GetHealthCheck().GetIntervalSecs(),
MaxConsecutiveFailures: taskConfig.GetHealthCheck().GetMaxConsecutiveFailures(),
TimeoutSecs: taskConfig.GetHealthCheck().GetTimeoutSecs(),
Type: pod.HealthCheckSpec_HealthCheckType(taskConfig.GetHealthCheck().GetType()),
}
if taskConfig.GetHealthCheck().GetCommandCheck() != nil {
container.LivenessCheck.CommandCheck = &pod.HealthCheckSpec_CommandCheck{
Command: taskConfig.GetHealthCheck().GetCommandCheck().GetCommand(),
UnshareEnvironments: taskConfig.GetHealthCheck().GetCommandCheck().GetUnshareEnvironments(),
}
}
if taskConfig.GetHealthCheck().GetHttpCheck() != nil {
container.LivenessCheck.HttpCheck = &pod.HealthCheckSpec_HTTPCheck{
Scheme: taskConfig.GetHealthCheck().GetHttpCheck().GetScheme(),
Port: taskConfig.GetHealthCheck().GetHttpCheck().GetPort(),
Path: taskConfig.GetHealthCheck().GetHttpCheck().GetPath(),
}
}
}
if !reflect.DeepEqual(*container, pod.ContainerSpec{}) {
result.Containers = []*pod.ContainerSpec{container}
}
return result
}
// ConvertLabels converts v0 peloton.Label array to
// v1alpha peloton.Label array
func ConvertLabels(labels []*peloton.Label) []*v1alphapeloton.Label {
var podLabels []*v1alphapeloton.Label
for _, l := range labels {
podLabels = append(podLabels, &v1alphapeloton.Label{
Key: l.GetKey(),
Value: l.GetValue(),
})
}
return podLabels
}
// ConvertTaskConstraintsToPodConstraints converts v0 task.Constraint array to
// v1alpha pod.Constraint array
func ConvertTaskConstraintsToPodConstraints(constraints []*task.Constraint) []*pod.Constraint {
var podConstraints []*pod.Constraint
for _, constraint := range constraints {
podConstraint := &pod.Constraint{
Type: pod.Constraint_Type(constraint.GetType()),
}
if constraint.GetLabelConstraint() != nil {
podConstraint.LabelConstraint = &pod.LabelConstraint{
Kind: pod.LabelConstraint_Kind(
constraint.GetLabelConstraint().GetKind(),
),
Condition: pod.LabelConstraint_Condition(
constraint.GetLabelConstraint().GetCondition(),
),
Requirement: constraint.GetLabelConstraint().GetRequirement(),
}
if constraint.GetLabelConstraint().GetLabel() != nil {
podConstraint.LabelConstraint.Label = &v1alphapeloton.Label{
Key: constraint.GetLabelConstraint().GetLabel().GetKey(),
Value: constraint.GetLabelConstraint().GetLabel().GetValue(),
}
}
}
if constraint.GetAndConstraint() != nil {
podConstraint.AndConstraint = &pod.AndConstraint{
Constraints: ConvertTaskConstraintsToPodConstraints(constraint.GetAndConstraint().GetConstraints()),
}
}
if constraint.GetOrConstraint() != nil {
podConstraint.OrConstraint = &pod.OrConstraint{
Constraints: ConvertTaskConstraintsToPodConstraints(constraint.GetOrConstraint().GetConstraints()),
}
}
podConstraints = append(podConstraints, podConstraint)
}
return podConstraints
}
// ConvertPortConfigsToPortSpecs converts v0 task.PortConfig array to
// v1alpha pod.PortSpec array
func ConvertPortConfigsToPortSpecs(ports []*task.PortConfig) []*pod.PortSpec {
var containerPorts []*pod.PortSpec
for _, p := range ports {
containerPorts = append(
containerPorts,
&pod.PortSpec{
Name: p.GetName(),
Value: p.GetValue(),
EnvName: p.GetEnvName(),
},
)
}
return containerPorts
}
// ConvertV0SecretsToV1Secrets converts v0 peloton.Secret to v1alpha peloton.Secret
func ConvertV0SecretsToV1Secrets(secrets []*peloton.Secret) []*v1alphapeloton.Secret {
var v1secrets []*v1alphapeloton.Secret
for _, secret := range secrets {
v1secret := &v1alphapeloton.Secret{
SecretId: &v1alphapeloton.SecretID{
Value: secret.GetId().GetValue(),
},
Path: secret.GetPath(),
Value: &v1alphapeloton.Secret_Value{
Data: secret.GetValue().GetData(),
},
}
v1secrets = append(v1secrets, v1secret)
}
return v1secrets
}
// ConvertV1SecretsToV0Secrets converts v1alpha peloton.Secret to v0 peloton.Secret
func ConvertV1SecretsToV0Secrets(secrets []*v1alphapeloton.Secret) []*peloton.Secret {
var v0secrets []*peloton.Secret
for _, secret := range secrets {
v0secret := &peloton.Secret{
Id: &peloton.SecretID{
Value: secret.GetSecretId().GetValue(),
},
Path: secret.GetPath(),
Value: &peloton.Secret_Value{
Data: secret.GetValue().GetData(),
},
}
v0secrets = append(v0secrets, v0secret)
}
return v0secrets
}
// ConvertJobConfigToJobSpec converts v0 job.JobConfig to v1alpha stateless.JobSpec
func ConvertJobConfigToJobSpec(config *job.JobConfig) *stateless.JobSpec {
instanceSpec := make(map[uint32]*pod.PodSpec)
for instID, taskConfig := range config.GetInstanceConfig() {
instanceSpec[instID] = ConvertTaskConfigToPodSpec(taskConfig, "", instID)
}
return &stateless.JobSpec{
Revision: &v1alphapeloton.Revision{
Version: config.GetChangeLog().GetVersion(),
CreatedAt: config.GetChangeLog().GetCreatedAt(),
UpdatedAt: config.GetChangeLog().GetUpdatedAt(),
UpdatedBy: config.GetChangeLog().GetUpdatedBy(),
},
Name: config.GetName(),
Owner: config.GetOwner(),
OwningTeam: config.GetOwningTeam(),
LdapGroups: config.GetLdapGroups(),
Description: config.GetDescription(),
Labels: ConvertLabels(config.GetLabels()),
InstanceCount: config.GetInstanceCount(),
Sla: ConvertSLAConfigToSLASpec(config.GetSLA()),
DefaultSpec: ConvertTaskConfigToPodSpec(config.GetDefaultConfig(), "", 0),
InstanceSpec: instanceSpec,
RespoolId: &v1alphapeloton.ResourcePoolID{
Value: config.GetRespoolID().GetValue()},
}
}
// ConvertUpdateModelToWorkflowStatus converts private UpdateModel
// to v1alpha stateless.WorkflowStatus
func ConvertUpdateModelToWorkflowStatus(
runtime *job.RuntimeInfo,
updateInfo *models.UpdateModel,
) *stateless.WorkflowStatus {
if updateInfo == nil {
return nil
}
entityVersion := versionutil.GetJobEntityVersion(
updateInfo.GetJobConfigVersion(),
runtime.GetDesiredStateVersion(),
runtime.GetWorkflowVersion(),
)
prevVersion := versionutil.GetJobEntityVersion(
updateInfo.GetPrevJobConfigVersion(),
runtime.GetDesiredStateVersion(),
runtime.GetWorkflowVersion(),
)
return &stateless.WorkflowStatus{
Type: stateless.WorkflowType(updateInfo.GetType()),
State: stateless.WorkflowState(updateInfo.GetState()),
PrevState: stateless.WorkflowState(updateInfo.GetPrevState()),
NumInstancesCompleted: updateInfo.GetInstancesDone(),
NumInstancesRemaining: updateInfo.GetInstancesTotal() - updateInfo.GetInstancesDone() - updateInfo.GetInstancesFailed(),
NumInstancesFailed: updateInfo.GetInstancesFailed(),
InstancesCurrent: updateInfo.GetInstancesCurrent(),
Version: entityVersion,
PrevVersion: prevVersion,
CreationTime: updateInfo.GetCreationTime(),
UpdateTime: updateInfo.GetUpdateTime(),
CompletionTime: updateInfo.GetCompletionTime(),
}
}
// ConvertRuntimeInfoToJobStatus converts v0 job.RuntimeInfo and private
// UpdateModel to v1alpha stateless.JobStatus
func ConvertRuntimeInfoToJobStatus(
runtime *job.RuntimeInfo,
updateInfo *models.UpdateModel,
) *stateless.JobStatus {
result := &stateless.JobStatus{}
podConfigVersionStats := make(map[string]*stateless.JobStatus_PodStateStats)
result.Revision = &v1alphapeloton.Revision{
Version: runtime.GetRevision().GetVersion(),
CreatedAt: runtime.GetRevision().GetCreatedAt(),
UpdatedAt: runtime.GetRevision().GetUpdatedAt(),
UpdatedBy: runtime.GetRevision().GetUpdatedBy(),
}
result.State = stateless.JobState(runtime.GetState())
result.CreationTime = runtime.GetCreationTime()
result.PodStats = ConvertTaskStatsToPodStats(runtime.TaskStats)
result.DesiredState = stateless.JobState(runtime.GetGoalState())
result.Version = versionutil.GetJobEntityVersion(
runtime.GetConfigurationVersion(),
runtime.GetDesiredStateVersion(),
runtime.GetWorkflowVersion(),
)
result.WorkflowStatus = ConvertUpdateModelToWorkflowStatus(runtime, updateInfo)
for configVersion, taskStats := range runtime.GetTaskStatsByConfigurationVersion() {
entityVersion := versionutil.GetPodEntityVersion(configVersion)
podConfigVersionStats[entityVersion.GetValue()] = &stateless.JobStatus_PodStateStats{
StateStats: ConvertTaskStatsToPodStats(taskStats.GetStateStats()),
}
}
result.PodStatsByConfigurationVersion = podConfigVersionStats
return result
}
// ConvertJobSummary converts v0 job.JobSummary and private
// UpdateModel to v1alpha stateless.JobSummary
func ConvertJobSummary(
summary *job.JobSummary,
updateInfo *models.UpdateModel) *stateless.JobSummary {
return &stateless.JobSummary{
JobId: &v1alphapeloton.JobID{Value: summary.GetId().GetValue()},
Name: summary.GetName(),
OwningTeam: summary.GetOwningTeam(),
Owner: summary.GetOwner(),
Labels: ConvertLabels(summary.GetLabels()),
InstanceCount: summary.GetInstanceCount(),
RespoolId: &v1alphapeloton.ResourcePoolID{
Value: summary.GetRespoolID().GetValue()},
Status: ConvertRuntimeInfoToJobStatus(summary.GetRuntime(), updateInfo),
Sla: ConvertSLAConfigToSLASpec(summary.GetSLA()),
}
}
// ConvertSLAConfigToSLASpec convert job's sla config to sla spec
func ConvertSLAConfigToSLASpec(slaConfig *job.SlaConfig) *stateless.SlaSpec {
return &stateless.SlaSpec{
Priority: slaConfig.GetPriority(),
Preemptible: slaConfig.GetPreemptible(),
Revocable: slaConfig.GetRevocable(),
MaximumUnavailableInstances: slaConfig.GetMaximumUnavailableInstances(),
}
}
// ConvertSLASpecToSLAConfig converts job's sla spec to sla config
func ConvertSLASpecToSLAConfig(slaSpec *stateless.SlaSpec) *job.SlaConfig {
return &job.SlaConfig{
Priority: slaSpec.GetPriority(),
Preemptible: slaSpec.GetPreemptible(),
Revocable: slaSpec.GetRevocable(),
MaximumUnavailableInstances: slaSpec.GetMaximumUnavailableInstances(),
}
}
// ConvertUpdateModelToWorkflowInfo converts private UpdateModel
// to v1alpha stateless.WorkflowInfo
func ConvertUpdateModelToWorkflowInfo(
runtime *job.RuntimeInfo,
updateInfo *models.UpdateModel,
workflowEvents []*stateless.WorkflowEvent,
instanceWorkflowEvents []*stateless.WorkflowInfoInstanceWorkflowEvents,
) *stateless.WorkflowInfo {
result := &stateless.WorkflowInfo{}
result.Status = ConvertUpdateModelToWorkflowStatus(runtime, updateInfo)
if updateInfo.GetType() == models.WorkflowType_UPDATE {
result.InstancesAdded = util.ConvertInstanceIDListToInstanceRange(updateInfo.GetInstancesAdded())
result.InstancesRemoved = util.ConvertInstanceIDListToInstanceRange(updateInfo.GetInstancesRemoved())
result.InstancesUpdated = util.ConvertInstanceIDListToInstanceRange(updateInfo.GetInstancesUpdated())
result.UpdateSpec = &stateless.UpdateSpec{
BatchSize: updateInfo.GetUpdateConfig().GetBatchSize(),
RollbackOnFailure: updateInfo.GetUpdateConfig().GetRollbackOnFailure(),
MaxInstanceRetries: updateInfo.GetUpdateConfig().GetMaxInstanceAttempts(),
MaxTolerableInstanceFailures: updateInfo.GetUpdateConfig().GetMaxFailureInstances(),
StartPaused: updateInfo.GetUpdateConfig().GetStartPaused(),
InPlace: updateInfo.GetUpdateConfig().GetInPlace(),
}
} else if updateInfo.GetType() == models.WorkflowType_RESTART {
result.RestartSpec = &stateless.RestartSpec{
BatchSize: updateInfo.GetUpdateConfig().GetBatchSize(),
Ranges: util.ConvertInstanceIDListToInstanceRange(updateInfo.GetInstancesUpdated()),
InPlace: updateInfo.GetUpdateConfig().GetInPlace(),
}
}
result.OpaqueData = &v1alphapeloton.OpaqueData{
Data: updateInfo.GetOpaqueData().GetData(),
}
result.Events = workflowEvents
result.InstanceEvents = instanceWorkflowEvents
return result
}
// ConvertStatelessQuerySpecToJobQuerySpec converts query spec for stateless svc to
// job query spec
func ConvertStatelessQuerySpecToJobQuerySpec(spec *stateless.QuerySpec) *job.QuerySpec {
var labels []*peloton.Label
var jobStates []job.JobState
var creationTimeRange *peloton.TimeRange
var completionTimeRange *peloton.TimeRange
var respoolPath *pelotonv0respool.ResourcePoolPath
var paginationSpec *pelotonv0query.PaginationSpec
for _, label := range spec.GetLabels() {
labels = append(labels, &peloton.Label{
Key: label.GetKey(),
Value: label.GetValue(),
})
}
for _, jobState := range spec.GetJobStates() {
jobStates = append(jobStates, job.JobState(jobState))
}
if spec.GetCreationTimeRange() != nil |
if spec.GetCompletionTimeRange() != nil {
completionTimeRange = &peloton.TimeRange{
Min: spec.GetCompletionTimeRange().GetMin(),
Max: spec.GetCompletionTimeRange().GetMax(),
}
}
if spec.GetRespool() != nil {
respoolPath = &pelotonv0respool.ResourcePoolPath{
Value: spec.GetRespool().GetValue(),
}
}
if spec.GetPagination() != nil {
paginationSpec = convertV1AlphaPaginationSpecToV0PaginationSpec(
spec.GetPagination(),
)
}
return &job.QuerySpec{
Pagination: paginationSpec,
Labels: labels,
Keywords: spec.GetKeywords(),
JobStates: jobStates,
Respool: respoolPath,
Owner: spec.GetOwner(),
Name: spec.GetName(),
CreationTimeRange: creationTimeRange,
CompletionTimeRange: completionTimeRange,
}
}
// ConvertJobSpecToJobConfig converts stateless job spec to job config
func ConvertJobSpecToJobConfig(spec *stateless.JobSpec) (*job.JobConfig, error) {
result := &job.JobConfig{
Type: job.JobType_SERVICE,
Name: spec.GetName(),
Owner: spec.GetOwner(),
OwningTeam: spec.GetOwningTeam(),
LdapGroups: spec.GetLdapGroups(),
Description: spec.GetDescription(),
InstanceCount: spec.GetInstanceCount(),
}
if spec.GetRevision() != nil {
result.ChangeLog = &peloton.ChangeLog{
Version: spec.GetRevision().GetVersion(),
CreatedAt: spec.GetRevision().GetCreatedAt(),
UpdatedAt: spec.GetRevision().GetUpdatedAt(),
UpdatedBy: spec.GetRevision().GetUpdatedBy(),
}
}
if len(spec.GetLabels()) != 0 {
var labels []*peloton.Label
for _, label := range spec.GetLabels() {
labels = append(labels, &peloton.Label{
Key: label.GetKey(), Value: label.GetValue(),
})
}
result.Labels = labels
}
if spec.GetSla() != nil {
result.SLA = ConvertSLASpecToSLAConfig(spec.GetSla())
}
if spec.GetDefaultSpec() != nil {
defaultConfig, err := ConvertPodSpecToTaskConfig(spec.GetDefaultSpec())
if err != nil {
return nil, err
}
result.DefaultConfig = defaultConfig
}
if spec.GetSla() != nil && spec.GetDefaultSpec() != nil {
result.DefaultConfig.Revocable = spec.GetSla().GetRevocable()
}
if len(spec.GetInstanceSpec()) != 0 {
result.InstanceConfig = make(map[uint32]*task.TaskConfig)
for instanceID, instanceSpec := range spec.GetInstanceSpec() {
instanceConfig, err := ConvertPodSpecToTaskConfig(instanceSpec)
if err != nil {
return nil, err
}
if spec.GetSla() != nil && spec.GetDefaultSpec() != nil {
instanceConfig.Revocable = spec.GetSla().GetRevocable()
}
result.InstanceConfig[instanceID] = instanceConfig
}
}
if spec.GetRespoolId() != nil {
result.RespoolID = &peloton.ResourcePoolID{
Value: spec.GetRespoolId().GetValue(),
}
}
return result, nil
}
// ConvertPodSpecToTaskConfig converts a pod spec to task config
func ConvertPodSpecToTaskConfig(spec *pod.PodSpec) (*task.TaskConfig, error) {
if len(spec.GetContainers()) > 1 {
return nil,
yarpcerrors.UnimplementedErrorf("configuration of more than one container per pod is not supported")
}
if len(spec.GetInitContainers()) > 0 {
return nil,
yarpcerrors.UnimplementedErrorf("init containers are not supported")
}
result := &task.TaskConfig{
Controller: spec.GetController(),
KillGracePeriodSeconds: spec.GetKillGracePeriodSeconds(),
Revocable: spec.GetRevocable(),
}
var mainContainer *pod.ContainerSpec
if len(spec.GetContainers()) > 0 {
mainContainer = spec.GetContainers()[0]
result.Container = mainContainer.GetContainer()
result.Command = mainContainer.GetCommand()
result.Executor = mainContainer.GetExecutor()
}
result.Name = mainContainer.GetName()
if spec.GetLabels() != nil {
var labels []*peloton.Label
for _, label := range spec.GetLabels() {
labels = append(labels, &peloton.Label{
Key: label.GetKey(), Value: label.GetValue(),
})
}
result.Labels = labels
}
if mainContainer.GetResource() != nil {
result.Resource = &task.ResourceConfig{
CpuLimit: mainContainer.GetResource().GetCpuLimit(),
MemLimitMb: mainContainer.GetResource().GetMemLimitMb(),
DiskLimitMb: mainContainer.GetResource().GetDiskLimitMb(),
FdLimit: mainContainer.GetResource().GetFdLimit(),
GpuLimit: mainContainer.GetResource().GetGpuLimit(),
}
}
if mainContainer.GetLivenessCheck() != nil {
healthCheck := &task.HealthCheckConfig{
Enabled: mainContainer.GetLivenessCheck().GetEnabled(),
InitialIntervalSecs: mainContainer.GetLivenessCheck().GetInitialIntervalSecs(),
IntervalSecs: mainContainer.GetLivenessCheck().GetIntervalSecs(),
MaxConsecutiveFailures: mainContainer.GetLivenessCheck().GetMaxConsecutiveFailures(),
TimeoutSecs: mainContainer.GetLivenessCheck().GetTimeoutSecs(),
Type: task.HealthCheckConfig_Type(mainContainer.GetLivenessCheck().GetType()),
}
if mainContainer.GetLivenessCheck().GetCommandCheck() != nil {
healthCheck.CommandCheck = &task.HealthCheckConfig_CommandCheck{
Command: mainContainer.GetLivenessCheck().GetCommandCheck().GetCommand(),
UnshareEnvironments: mainContainer.GetLivenessCheck().GetCommandCheck().GetUnshareEnvironments(),
}
}
if mainContainer.GetLivenessCheck().GetHttpCheck() != nil {
healthCheck.HttpCheck = &task.HealthCheckConfig_HTTPCheck{
Scheme: mainContainer.GetLivenessCheck().GetHttpCheck().GetScheme(),
Port: mainContainer.GetLivenessCheck().GetHttpCheck().GetPort(),
Path: mainContainer.GetLivenessCheck().GetHttpCheck().GetPath(),
}
}
result.HealthCheck = healthCheck
}
if len(mainContainer.GetPorts()) != 0 {
var portConfigs []*task.PortConfig
for _, port := range mainContainer.GetPorts() {
portConfigs = append(portConfigs, &task.PortConfig{
Name: port.GetName(),
Value: port.GetValue(),
EnvName: port.GetEnvName(),
})
}
result.Ports = portConfigs
}
if spec.GetConstraint() != nil {
result.Constraint = ConvertPodConstraintsToTaskConstraints(
[]*pod.Constraint{spec.GetConstraint()},
)[0]
}
if spec.GetRestartPolicy() != nil {
result.RestartPolicy = &task.RestartPolicy{
MaxFailures: spec.GetRestartPolicy().GetMaxFailures(),
}
}
if spec.GetVolume() != nil {
result.Volume = &task.PersistentVolumeConfig{
ContainerPath: spec.GetVolume().GetContainerPath(),
SizeMB: spec.GetVolume().GetSizeMb(),
}
}
if spec.GetPreemptionPolicy() != nil {
result.PreemptionPolicy = &task.PreemptionPolicy{
KillOnPreempt: spec.GetPreemptionPolicy().GetKillOnPreempt(),
}
if result.GetPreemptionPolicy().GetKillOnPreempt() {
result.PreemptionPolicy.Type = task.PreemptionPolicy_TYPE_PREEMPTIBLE
} else {
result.PreemptionPolicy.Type = task.PreemptionPolicy_TYPE_NON_PREEMPTIBLE
}
}
return result, nil
}
// ConvertPodConstraintsToTaskConstraints converts pod constraints to task constraints
func ConvertPodConstraintsToTaskConstraints(
constraints []*pod.Constraint,
) []*task.Constraint {
var result []*task.Constraint
for _, podConstraint := range constraints {
taskConstraint := &task.Constraint{
Type: task.Constraint_Type(podConstraint.GetType()),
}
if podConstraint.GetLabelConstraint() != nil {
taskConstraint.LabelConstraint = &task.LabelConstraint{
Kind: task.LabelConstraint_Kind(
podConstraint.GetLabelConstraint().GetKind(),
),
Condition: task.LabelConstraint_Condition(
podConstraint.GetLabelConstraint().GetCondition(),
),
Requirement: podConstraint.GetLabelConstraint().GetRequirement(),
}
if podConstraint.GetLabelConstraint().GetLabel() != nil {
taskConstraint.LabelConstraint.Label = &peloton.Label{
Key: podConstraint.GetLabelConstraint().GetLabel().GetKey(),
Value: podConstraint.GetLabelConstraint().GetLabel().GetValue(),
}
}
}
if podConstraint.GetAndConstraint() != nil {
taskConstraint.AndConstraint = &task.AndConstraint{
Constraints: ConvertPodConstraintsToTaskConstraints(
podConstraint.GetAndConstraint().GetConstraints()),
}
}
if podConstraint.GetOrConstraint() != nil {
taskConstraint.OrConstraint = &task.OrConstraint{
Constraints: ConvertPodConstraintsToTaskConstraints(
podConstraint.GetOrConstraint().GetConstraints()),
}
}
result = append(result, taskConstraint)
}
return result
}
// ConvertUpdateSpecToUpdateConfig converts update spec to update config
func ConvertUpdateSpecToUpdateConfig(spec *stateless.UpdateSpec) *update.UpdateConfig {
return &update.UpdateConfig{
BatchSize: spec.GetBatchSize(),
RollbackOnFailure: spec.GetRollbackOnFailure(),
MaxInstanceAttempts: spec.GetMaxInstanceRetries(),
MaxFailureInstances: spec.GetMaxTolerableInstanceFailures(),
StartPaused: spec.GetStartPaused(),
InPlace: spec.GetInPlace(),
StartTasks: spec.GetStartPods(),
}
}
// ConvertCreateSpecToUpdateConfig converts create spec to update config
func ConvertCreateSpecToUpdateConfig(spec *stateless.CreateSpec) *update.UpdateConfig {
return &update.UpdateConfig{
BatchSize: spec.GetBatchSize(),
MaxInstanceAttempts: spec.GetMaxInstanceRetries(),
MaxFailureInstances: spec.GetMaxTolerableInstanceFailures(),
StartPaused: spec.GetStartPaused(),
}
}
// ConvertPodQuerySpecToTaskQuerySpec converts
// v1alpha pod.QuerySpec to v0 task.QuerySpec
func ConvertPodQuerySpecToTaskQuerySpec(spec *pod.QuerySpec) *task.QuerySpec {
var taskStates []task.TaskState
var taskNames []string
if spec.GetPodStates() != nil {
for _, state := range spec.GetPodStates() {
taskStates = append(taskStates, ConvertPodStateToTaskState(state))
}
}
if spec.GetNames() != nil {
for _, podName := range spec.GetNames() {
taskNames = append(taskNames, podName.GetValue())
}
}
return &task.QuerySpec{
Pagination: convertV1AlphaPaginationSpecToV0PaginationSpec(
spec.GetPagination(),
),
TaskStates: taskStates,
Names: taskNames,
Hosts: spec.GetHosts(),
}
}
// ConvertTaskInfosToPodInfos converts a list of
// v0 task info to a list of v1alpha pod info
func ConvertTaskInfosToPodInfos(taskInfos []*task.TaskInfo) []*pod.PodInfo {
var podInfos []*pod.PodInfo
for _, taskInfo := range taskInfos {
podInfo := &pod.PodInfo{
Spec: ConvertTaskConfigToPodSpec(
taskInfo.GetConfig(),
taskInfo.GetJobId().GetValue(),
taskInfo.GetInstanceId(),
),
Status: ConvertTaskRuntimeToPodStatus(taskInfo.GetRuntime()),
}
podInfos = append(podInfos, podInfo)
}
return podInfos
}
// ConvertTaskEventsToPodEvents converts v0 task.PodEvents to v1alpha pod.PodEvents
func ConvertTaskEventsToPodEvents(taskEvents []*task.PodEvent) []*pod.PodEvent {
var result []*pod.PodEvent
for _, e := range taskEvents {
podID := e.GetTaskId().GetValue()
prevPodID := e.GetPrevTaskId().GetValue()
desiredPodID := e.GetDesriedTaskId().GetValue()
entityVersion := versionutil.GetPodEntityVersion(e.GetConfigVersion())
desiredEntityVersion := versionutil.GetPodEntityVersion(e.GetDesiredConfigVersion())
result = append(result, &pod.PodEvent{
PodId: &v1alphapeloton.PodID{
Value: podID,
},
ActualState: ConvertTaskStateToPodState(
task.TaskState(task.TaskState_value[e.GetActualState()]),
).String(),
DesiredState: ConvertTaskStateToPodState(
task.TaskState(task.TaskState_value[e.GetGoalState()]),
).String(),
Timestamp: e.GetTimestamp(),
Version: entityVersion,
DesiredVersion: desiredEntityVersion,
AgentId: e.GetAgentID(),
Hostname: e.GetHostname(),
Message: e.GetMessage(),
Reason: e.GetReason(),
PrevPodId: &v1alphapeloton.PodID{
Value: prevPodID,
},
Healthy: pod.HealthState(task.HealthState_value[e.GetHealthy()]).String(),
DesiredPodId: &v1alphapeloton.PodID{
Value: desiredPodID,
},
})
}
return result
}
// ConvertTaskStatsToPodStats converts v0 task stats to v1alpha pod stats
func ConvertTaskStatsToPodStats(taskStats map[string]uint32) map[string]uint32 {
result := make(map[string]uint32)
for stateStr, num := range taskStats {
taskState := task.TaskState(task.TaskState_value[stateStr])
result[ConvertTaskStateToPodState(taskState).String()] = num
}
return result
}
func convertV1AlphaPaginationSpecToV0PaginationSpec(
pagination *query.PaginationSpec,
) *pelotonv0query.PaginationSpec {
if pagination == nil {
return nil
}
var orderBy []*pelotonv0query.OrderBy
for _, ele := range pagination.GetOrderBy() {
orderBy = append(orderBy, &pelotonv0query.OrderBy{
Order: pelotonv0query.OrderBy_Order(ele.GetOrder()),
Property: &pelotonv0query.PropertyPath{
Value: ele.GetProperty().GetValue(),
},
})
}
return &pelotonv0query.PaginationSpec{
Offset: pagination.GetOffset(),
Limit: pagination.GetLimit(),
OrderBy: orderBy,
MaxLimit: pagination.GetMaxLimit(),
}
}
func convertTaskTerminationStatusToPodTerminationStatus(
termStatus *task.TerminationStatus,
) *pod.TerminationStatus {
if termStatus == nil {
return nil
}
podReason := pod.TerminationStatus_TERMINATION_STATUS_REASON_INVALID
switch termStatus.GetReason() {
case task.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_ON_REQUEST:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_ON_REQUEST
case task.TerminationStatus_TERMINATION_STATUS_REASON_FAILED:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_FAILED
case task.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_HOST_MAINTENANCE:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_HOST_MAINTENANCE
case task.TerminationStatus_TERMINATION_STATUS_REASON_PREEMPTED_RESOURCES:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_PREEMPTED_RESOURCES
case task.TerminationStatus_TERMINATION_STATUS_REASON_DEADLINE_TIMEOUT_EXCEEDED:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_DEADLINE_TIMEOUT_EXCEEDED
case task.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_FOR_UPDATE:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_FOR_UPDATE
case task.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_FOR_RESTART:
podReason = pod.TerminationStatus_TERMINATION_STATUS_REASON_KILLED_FOR_RESTART
}
return &pod.TerminationStatus{
Reason: podReason,
ExitCode: termStatus.GetExitCode(),
Signal: termStatus.GetSignal(),
}
}
| {
creationTimeRange = &peloton.TimeRange{
Min: spec.GetCreationTimeRange().GetMin(),
Max: spec.GetCreationTimeRange().GetMax(),
}
} |
AdverseEvent.rs | #![allow(unused_imports, non_camel_case_types)]
use crate::models::r5::AdverseEvent_ContributingFactor::AdverseEvent_ContributingFactor;
use crate::models::r5::AdverseEvent_MitigatingAction::AdverseEvent_MitigatingAction;
use crate::models::r5::AdverseEvent_Participant::AdverseEvent_Participant;
use crate::models::r5::AdverseEvent_PreventiveAction::AdverseEvent_PreventiveAction;
use crate::models::r5::AdverseEvent_SupportingInfo::AdverseEvent_SupportingInfo;
use crate::models::r5::AdverseEvent_SuspectEntity::AdverseEvent_SuspectEntity;
use crate::models::r5::CodeableConcept::CodeableConcept;
use crate::models::r5::Element::Element;
use crate::models::r5::Extension::Extension;
use crate::models::r5::Identifier::Identifier;
use crate::models::r5::Meta::Meta;
use crate::models::r5::Narrative::Narrative;
use crate::models::r5::Period::Period;
use crate::models::r5::Reference::Reference;
use crate::models::r5::ResourceList::ResourceList;
use crate::models::r5::Timing::Timing;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// An event (i.e. any change to current patient status) that may be related to
/// unintended effects on a patient or research subject. The unintended effects
/// may require additional monitoring, treatment or hospitalization or may result in
/// death. The AdverseEvent resource also extends to potential or avoided events that
/// could have had such effects.
#[derive(Debug)]
pub struct AdverseEvent<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl AdverseEvent<'_> {
pub fn new(value: &Value) -> AdverseEvent {
AdverseEvent {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// Extensions for actuality
pub fn _actuality(&self) -> Option<Element> {
if let Some(val) = self.value.get("_actuality") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for detected
pub fn _detected(&self) -> Option<Element> {
if let Some(val) = self.value.get("_detected") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for implicitRules
pub fn _implicit_rules(&self) -> Option<Element> {
if let Some(val) = self.value.get("_implicitRules") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for language
pub fn _language(&self) -> Option<Element> {
if let Some(val) = self.value.get("_language") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for occurrenceDateTime
pub fn _occurrence_date_time(&self) -> Option<Element> {
if let Some(val) = self.value.get("_occurrenceDateTime") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for recordedDate
pub fn _recorded_date(&self) -> Option<Element> {
if let Some(val) = self.value.get("_recordedDate") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for status
pub fn _status(&self) -> Option<Element> {
if let Some(val) = self.value.get("_status") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Whether the event actually happened, or just had the potential to. Note that this
/// is independent of whether anyone was affected or harmed or how severely.
pub fn actuality(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("actuality") {
return Some(string);
}
return None;
}
/// The overall type of event, intended for search and filtering purposes.
pub fn category(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("category") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Specific event that occurred or that was averted, such as patient fall, wrong
/// organ removed, or wrong blood transfused.
pub fn code(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("code") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// These resources do not have an independent existence apart from the resource that
/// contains them - they cannot be identified independently, nor can they have their
/// own independent transaction scope.
pub fn contained(&self) -> Option<Vec<ResourceList>> {
if let Some(Value::Array(val)) = self.value.get("contained") {
return Some(
val.into_iter()
.map(|e| ResourceList {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The contributing factors suspected to have increased the probability or severity
/// of the adverse event.
pub fn contributing_factor(&self) -> Option<Vec<AdverseEvent_ContributingFactor>> {
if let Some(Value::Array(val)) = self.value.get("contributingFactor") {
return Some(
val.into_iter()
.map(|e| AdverseEvent_ContributingFactor {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Estimated or actual date the AdverseEvent began, in the opinion of the reporter.
pub fn detected(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("detected") {
return Some(string);
}
return None;
}
/// The Encounter associated with the start of the AdverseEvent.
pub fn encounter(&self) -> Option<Reference> {
if let Some(val) = self.value.get("encounter") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the resource. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The logical id of the resource, as used in the URL for the resource. Once
/// assigned, this value never changes.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// Business identifiers assigned to this adverse event by the performer or other
/// systems which remain constant as the resource is updated and propagates from
/// server to server.
pub fn identifier(&self) -> Option<Vec<Identifier>> {
if let Some(Value::Array(val)) = self.value.get("identifier") {
return Some(
val.into_iter()
.map(|e| Identifier {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A reference to a set of rules that were followed when the resource was
/// constructed, and which must be understood when processing the content. Often, this
/// is a reference to an implementation guide that defines the special rules along
/// with other profiles etc.
pub fn implicit_rules(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("implicitRules") {
return Some(string);
}
return None;
}
/// The base language in which the resource is written.
pub fn language(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("language") {
return Some(string);
}
return None;
}
/// The information about where the adverse event occurred.
pub fn location(&self) -> Option<Reference> {
if let Some(val) = self.value.get("location") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The metadata about the resource. This is content that is maintained by the
/// infrastructure. Changes to the content might not always be associated with version
/// changes to the resource.
pub fn meta(&self) -> Option<Meta> {
if let Some(val) = self.value.get("meta") {
return Some(Meta {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The ameliorating action taken after the adverse event occured in order to reduce
/// the extent of harm.
pub fn mitigating_action(&self) -> Option<Vec<AdverseEvent_MitigatingAction>> {
if let Some(Value::Array(val)) = self.value.get("mitigatingAction") {
return Some(
val.into_iter()
.map(|e| AdverseEvent_MitigatingAction {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the resource and that modifies the understanding of the element
/// that contains it and/or the understanding of the containing element's descendants.
/// Usually modifier elements provide negation or qualification. To make the use of
/// extensions safe and manageable, there is a strict set of governance applied to
/// the definition and use of extensions. Though any implementer is allowed to define
/// an extension, there is a set of requirements that SHALL be met as part of the
/// definition of the extension. Applications processing a resource are required to
/// check for modifier extensions. Modifier extensions SHALL NOT change the meaning
/// of any elements on Resource or DomainResource (including cannot change the meaning
/// of modifierExtension itself).
pub fn modifier_extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("modifierExtension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The date (and perhaps time) when the adverse event occurred.
pub fn occurrence_date_time(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("occurrenceDateTime") {
return Some(string);
}
return None;
}
/// The date (and perhaps time) when the adverse event occurred.
pub fn occurrence_period(&self) -> Option<Period> {
if let Some(val) = self.value.get("occurrencePeriod") {
return Some(Period {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The date (and perhaps time) when the adverse event occurred.
pub fn occurrence_timing(&self) -> Option<Timing> {
if let Some(val) = self.value.get("occurrenceTiming") {
return Some(Timing {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Describes the type of outcome from the adverse event, such as resolved,
/// recovering, ongoing, resolved-with-sequelae, or fatal.
pub fn outcome(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("outcome") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Indicates who or what participated in the adverse event and how they were
/// involved.
pub fn participant(&self) -> Option<Vec<AdverseEvent_Participant>> {
if let Some(Value::Array(val)) = self.value.get("participant") {
return Some(
val.into_iter()
.map(|e| AdverseEvent_Participant {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Preventive actions that contributed to avoiding the adverse event.
pub fn preventive_action(&self) -> Option<Vec<AdverseEvent_PreventiveAction>> {
if let Some(Value::Array(val)) = self.value.get("preventiveAction") {
return Some(
val.into_iter()
.map(|e| AdverseEvent_PreventiveAction {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// The date on which the existence of the AdverseEvent was first recorded.
pub fn recorded_date(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("recordedDate") {
return Some(string);
}
return None;
}
/// Information on who recorded the adverse event. May be the patient or a
/// practitioner.
pub fn recorder(&self) -> Option<Reference> {
if let Some(val) = self.value.get("recorder") {
return Some(Reference {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Information about the condition that occurred as a result of the adverse event,
/// such as hives due to the exposure to a substance (for example, a drug or a
/// chemical) or a broken leg as a result of the fall.
pub fn resulting_condition(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("resultingCondition") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Assessment whether this event, or averted event, was of clinical importance.
pub fn seriousness(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("seriousness") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The current state of the adverse event or potential adverse event.
pub fn status(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("status") {
return Some(string);
}
return None;
}
/// The research study that the subject is enrolled in.
pub fn study(&self) -> Option<Vec<Reference>> {
if let Some(Value::Array(val)) = self.value.get("study") {
return Some(
val.into_iter()
.map(|e| Reference {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// This subject or group impacted by the event.
pub fn subject(&self) -> Reference {
Reference {
value: Cow::Borrowed(&self.value["subject"]),
}
}
/// Supporting information relevant to the event.
pub fn supporting_info(&self) -> Option<Vec<AdverseEvent_SupportingInfo>> {
if let Some(Value::Array(val)) = self.value.get("supportingInfo") {
return Some(
val.into_iter()
.map(|e| AdverseEvent_SupportingInfo {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Describes the entity that is suspected to have caused the adverse event.
pub fn suspect_entity(&self) -> Option<Vec<AdverseEvent_SuspectEntity>> {
if let Some(Value::Array(val)) = self.value.get("suspectEntity") {
return Some(
val.into_iter()
.map(|e| AdverseEvent_SuspectEntity {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A human-readable narrative that contains a summary of the resource and can be used
/// to represent the content of the resource to a human. The narrative need not encode
/// all the structured data, but is required to contain sufficient detail to make it
/// "clinically safe" for a human to just read the narrative. Resource definitions
/// may define what content should be represented in the narrative to ensure clinical
/// safety.
pub fn text(&self) -> Option<Narrative> {
if let Some(val) = self.value.get("text") {
return Some(Narrative {
value: Cow::Borrowed(val),
});
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self._actuality() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._detected() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._implicit_rules() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._language() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._occurrence_date_time() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._recorded_date() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._status() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.actuality() {}
if let Some(_val) = self.category() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.code() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.contained() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.contributing_factor() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.detected() {}
if let Some(_val) = self.encounter() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.identifier() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.implicit_rules() {}
if let Some(_val) = self.language() {}
if let Some(_val) = self.location() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.meta() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.mitigating_action() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.modifier_extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.occurrence_date_time() {}
if let Some(_val) = self.occurrence_period() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.occurrence_timing() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.outcome() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.participant() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.preventive_action() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.recorded_date() {}
if let Some(_val) = self.recorder() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.resulting_condition() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.seriousness() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.status() {}
if let Some(_val) = self.study() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if !self.subject().validate() {
return false;
}
if let Some(_val) = self.supporting_info() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.suspect_entity() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.text() {
if !_val.validate() {
return false;
}
}
return true;
}
}
#[derive(Debug)]
pub struct AdverseEventBuilder {
pub(crate) value: Value,
}
impl AdverseEventBuilder {
pub fn build(&self) -> AdverseEvent {
AdverseEvent {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(existing: AdverseEvent) -> AdverseEventBuilder {
AdverseEventBuilder {
value: (*existing.value).clone(),
}
}
pub fn new(subject: Reference) -> AdverseEventBuilder {
let mut __value: Value = json!({});
__value["subject"] = json!(subject.value);
return AdverseEventBuilder { value: __value };
}
pub fn _actuality<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_actuality"] = json!(val.value);
return self;
}
pub fn _detected<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_detected"] = json!(val.value);
return self;
}
pub fn _implicit_rules<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_implicitRules"] = json!(val.value);
return self;
}
pub fn _language<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_language"] = json!(val.value);
return self;
}
pub fn _occurrence_date_time<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_occurrenceDateTime"] = json!(val.value);
return self;
}
pub fn _recorded_date<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_recordedDate"] = json!(val.value);
return self;
}
pub fn _status<'a>(&'a mut self, val: Element) -> &'a mut AdverseEventBuilder {
self.value["_status"] = json!(val.value);
return self;
}
pub fn actuality<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["actuality"] = json!(val);
return self;
}
pub fn category<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut AdverseEventBuilder {
self.value["category"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn code<'a>(&'a mut self, val: CodeableConcept) -> &'a mut AdverseEventBuilder {
self.value["code"] = json!(val.value);
return self;
}
pub fn contained<'a>(&'a mut self, val: Vec<ResourceList>) -> &'a mut AdverseEventBuilder {
self.value["contained"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn contributing_factor<'a>(
&'a mut self,
val: Vec<AdverseEvent_ContributingFactor>,
) -> &'a mut AdverseEventBuilder {
self.value["contributingFactor"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn detected<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["detected"] = json!(val);
return self;
}
pub fn encounter<'a>(&'a mut self, val: Reference) -> &'a mut AdverseEventBuilder {
self.value["encounter"] = json!(val.value);
return self;
}
pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut AdverseEventBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn identifier<'a>(&'a mut self, val: Vec<Identifier>) -> &'a mut AdverseEventBuilder {
self.value["identifier"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn implicit_rules<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["implicitRules"] = json!(val);
return self;
}
pub fn language<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["language"] = json!(val);
return self;
}
pub fn location<'a>(&'a mut self, val: Reference) -> &'a mut AdverseEventBuilder {
self.value["location"] = json!(val.value);
return self;
}
pub fn meta<'a>(&'a mut self, val: Meta) -> &'a mut AdverseEventBuilder {
self.value["meta"] = json!(val.value);
return self;
}
pub fn mitigating_action<'a>(
&'a mut self,
val: Vec<AdverseEvent_MitigatingAction>,
) -> &'a mut AdverseEventBuilder {
self.value["mitigatingAction"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn modifier_extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut AdverseEventBuilder {
self.value["modifierExtension"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn occurrence_date_time<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder { | pub fn occurrence_period<'a>(&'a mut self, val: Period) -> &'a mut AdverseEventBuilder {
self.value["occurrencePeriod"] = json!(val.value);
return self;
}
pub fn occurrence_timing<'a>(&'a mut self, val: Timing) -> &'a mut AdverseEventBuilder {
self.value["occurrenceTiming"] = json!(val.value);
return self;
}
pub fn outcome<'a>(&'a mut self, val: Vec<CodeableConcept>) -> &'a mut AdverseEventBuilder {
self.value["outcome"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn participant<'a>(
&'a mut self,
val: Vec<AdverseEvent_Participant>,
) -> &'a mut AdverseEventBuilder {
self.value["participant"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn preventive_action<'a>(
&'a mut self,
val: Vec<AdverseEvent_PreventiveAction>,
) -> &'a mut AdverseEventBuilder {
self.value["preventiveAction"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn recorded_date<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["recordedDate"] = json!(val);
return self;
}
pub fn recorder<'a>(&'a mut self, val: Reference) -> &'a mut AdverseEventBuilder {
self.value["recorder"] = json!(val.value);
return self;
}
pub fn resulting_condition<'a>(
&'a mut self,
val: Vec<Reference>,
) -> &'a mut AdverseEventBuilder {
self.value["resultingCondition"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn seriousness<'a>(&'a mut self, val: CodeableConcept) -> &'a mut AdverseEventBuilder {
self.value["seriousness"] = json!(val.value);
return self;
}
pub fn status<'a>(&'a mut self, val: &str) -> &'a mut AdverseEventBuilder {
self.value["status"] = json!(val);
return self;
}
pub fn study<'a>(&'a mut self, val: Vec<Reference>) -> &'a mut AdverseEventBuilder {
self.value["study"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn supporting_info<'a>(
&'a mut self,
val: Vec<AdverseEvent_SupportingInfo>,
) -> &'a mut AdverseEventBuilder {
self.value["supportingInfo"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn suspect_entity<'a>(
&'a mut self,
val: Vec<AdverseEvent_SuspectEntity>,
) -> &'a mut AdverseEventBuilder {
self.value["suspectEntity"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn text<'a>(&'a mut self, val: Narrative) -> &'a mut AdverseEventBuilder {
self.value["text"] = json!(val.value);
return self;
}
} | self.value["occurrenceDateTime"] = json!(val);
return self;
}
|
ints.py | from typing import Any, BinaryIO
from kale.util.struct_stream import StructStream
class int8(StructStream):
PACK = "!b"
|
class int16(StructStream):
PACK = "!h"
class uint16(StructStream):
PACK = "!H"
class int32(StructStream):
PACK = "!l"
class uint32(StructStream):
PACK = "!L"
class int64(StructStream):
PACK = "!q"
class uint64(StructStream):
PACK = "!Q"
class uint128(int):
@classmethod
def parse(cls, f: BinaryIO) -> Any:
read_bytes = f.read(16)
assert len(read_bytes) == 16
n = int.from_bytes(read_bytes, "big", signed=False)
assert n <= (2 ** 128) - 1 and n >= 0
return cls(n)
def stream(self, f):
assert self <= (2 ** 128) - 1 and self >= 0
f.write(self.to_bytes(16, "big", signed=False))
class int512(int):
# Uses 65 bytes to fit in the sign bit
@classmethod
def parse(cls, f: BinaryIO) -> Any:
read_bytes = f.read(65)
assert len(read_bytes) == 65
n = int.from_bytes(read_bytes, "big", signed=True)
assert n <= (2 ** 512) - 1 and n >= -(2 ** 512)
return cls(n)
def stream(self, f):
assert self <= (2 ** 512) - 1 and self >= -(2 ** 512)
f.write(self.to_bytes(65, "big", signed=True)) |
class uint8(StructStream):
PACK = "!B" |
utils.rs | use std::{collections::HashMap, hash::Hash};
use serde::de::Error as DeError;
use serde::de::MapAccess;
use serde::ser::{Serialize, SerializeSeq, Serializer};
#[cfg(all(feature = "cache", feature = "model"))]
use super::permissions::Permissions;
use super::prelude::*;
#[cfg(all(feature = "cache", feature = "model"))]
use crate::cache::Cache;
#[cfg(feature = "cache")]
use crate::internal::prelude::*;
pub fn default_true() -> bool {
true
}
pub fn deserialize_emojis<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<EmojiId, Emoji>, D::Error> {
let vec: Vec<Emoji> = Deserialize::deserialize(deserializer)?;
let mut emojis = HashMap::new();
for emoji in vec {
emojis.insert(emoji.id, emoji);
}
Ok(emojis)
}
pub fn serialize_emojis<S: Serializer>(
emojis: &HashMap<EmojiId, Emoji>,
serializer: S,
) -> StdResult<S::Ok, S::Error> {
let mut seq = serializer.serialize_seq(Some(emojis.len()))?;
for emoji in emojis.values() {
seq.serialize_element(emoji)?;
}
seq.end()
}
pub fn deserialize_guild_channels<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<ChannelId, GuildChannel>, D::Error> {
let vec: Vec<GuildChannel> = Deserialize::deserialize(deserializer)?;
let mut map = HashMap::new();
for channel in vec {
map.insert(channel.id, channel);
}
Ok(map)
}
pub fn deserialize_members<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<UserId, Member>, D::Error> {
let vec: Vec<Member> = Deserialize::deserialize(deserializer)?;
let mut members = HashMap::new();
for member in vec {
let user_id = member.user.id;
members.insert(user_id, member);
}
Ok(members)
}
pub fn deserialize_presences<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<UserId, Presence>, D::Error> {
let vec: Vec<Presence> = Deserialize::deserialize(deserializer)?;
let mut presences = HashMap::new();
for presence in vec {
presences.insert(presence.user_id, presence);
}
Ok(presences)
}
pub fn serialize_presences<S: Serializer>(
presences: &HashMap<UserId, Presence>,
serializer: S,
) -> StdResult<S::Ok, S::Error> {
let mut seq = serializer.serialize_seq(Some(presences.len()))?;
for presence in presences.values() {
seq.serialize_element(presence)?;
}
seq.end()
}
pub fn deserialize_private_channels<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<ChannelId, Channel>, D::Error> {
let vec: Vec<Channel> = Deserialize::deserialize(deserializer)?;
let mut private_channels = HashMap::new();
for private_channel in vec {
let id = match private_channel {
Channel::Private(ref channel) => channel.id,
Channel::Guild(_) => unreachable!("Guild private channel decode"),
Channel::Category(_) => unreachable!("Channel category private channel decode"),
};
private_channels.insert(id, private_channel);
}
Ok(private_channels)
}
pub fn serialize_private_channels<S: Serializer>(
private_channels: &HashMap<ChannelId, Channel>,
serializer: S,
) -> StdResult<S::Ok, S::Error> {
let mut seq = serializer.serialize_seq(Some(private_channels.len()))?;
for private_channel in private_channels.values() {
seq.serialize_element(private_channel)?;
}
seq.end()
}
pub fn deserialize_roles<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<RoleId, Role>, D::Error> {
let vec: Vec<Role> = Deserialize::deserialize(deserializer)?;
let mut roles = HashMap::new();
for role in vec {
roles.insert(role.id, role);
}
Ok(roles)
}
pub fn serialize_roles<S: Serializer>(
roles: &HashMap<RoleId, Role>,
serializer: S,
) -> StdResult<S::Ok, S::Error> {
let mut seq = serializer.serialize_seq(Some(roles.len()))?;
for role in roles.values() {
seq.serialize_element(role)?;
}
seq.end()
}
pub fn deserialize_single_recipient<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<User, D::Error> {
let mut users: Vec<User> = Deserialize::deserialize(deserializer)?;
let user = if users.is_empty() {
return Err(DeError::custom("Expected a single recipient"));
} else {
users.remove(0)
};
Ok(user)
}
pub fn serialize_single_recipient<S: Serializer>(
user: &User,
serializer: S,
) -> StdResult<S::Ok, S::Error> {
let mut seq = serializer.serialize_seq(Some(1))?;
seq.serialize_element(user)?;
seq.end()
}
pub fn deserialize_u16<'de, D: Deserializer<'de>>(deserializer: D) -> StdResult<u16, D::Error> {
deserializer.deserialize_any(U16Visitor)
}
pub fn deserialize_u64<'de, D: Deserializer<'de>>(deserializer: D) -> StdResult<u64, D::Error> {
deserializer.deserialize_any(U64Visitor)
}
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn serialize_u64<S: Serializer>(data: &u64, ser: S) -> StdResult<S::Ok, S::Error> {
ser.serialize_str(&data.to_string())
}
pub fn deserialize_voice_states<'de, D: Deserializer<'de>>(
deserializer: D,
) -> StdResult<HashMap<UserId, VoiceState>, D::Error> {
let vec: Vec<VoiceState> = Deserialize::deserialize(deserializer)?;
let mut voice_states = HashMap::new();
for voice_state in vec {
voice_states.insert(voice_state.user_id, voice_state);
}
Ok(voice_states)
}
pub fn serialize_gen_map<K: Eq + Hash, S: Serializer, V: Serialize>(
map: &HashMap<K, V>,
serializer: S,
) -> StdResult<S::Ok, S::Error> {
let mut seq = serializer.serialize_seq(Some(map.len()))?;
for value in map.values() {
seq.serialize_element(&value)?;
}
seq.end()
}
/// Tries to find a user's permissions using the cache.
/// Unlike [`user_has_perms`], this function will return `true` even when
/// the permissions are not in the cache.
#[cfg(all(feature = "cache", feature = "model"))]
#[inline]
pub async fn user_has_perms_cache(
cache: impl AsRef<Cache>,
channel_id: ChannelId,
guild_id: Option<GuildId>,
permissions: Permissions,
) -> Result<()> |
#[cfg(all(feature = "cache", feature = "model"))]
pub async fn user_has_perms(
cache: impl AsRef<Cache>,
channel_id: ChannelId,
guild_id: Option<GuildId>,
mut permissions: Permissions,
) -> Result<bool> {
let cache = cache.as_ref();
let channel = match cache.channel(channel_id).await {
Some(channel) => channel,
None => return Err(Error::Model(ModelError::ChannelNotFound)),
};
// Both users in DMs, all users in groups, and maybe all channels in categories
// will have the same permissions.
//
// The only exception to this is when the current user is blocked by
// the recipient in a DM channel, preventing the current user
// from sending messages.
//
// Since serenity can't _reasonably_ check and keep track of these,
// just assume that all permissions are granted and return `true`.
let (guild_id, guild_channel) = match channel {
Channel::Guild(channel) => (channel.guild_id, channel),
Channel::Category(_) => return Ok(true),
Channel::Private(_) => match guild_id {
Some(_) => return Err(Error::Model(ModelError::InvalidChannelType)),
None => return Ok(true),
},
};
let guild = match cache.guild(guild_id).await {
Some(guild) => guild,
None => return Err(Error::Model(ModelError::GuildNotFound)),
};
let member = match guild.members.get(&cache.current_user().await.id) {
Some(member) => member,
None => return Err(Error::Model(ModelError::MemberNotFound)),
};
let perms = guild.user_permissions_in(&guild_channel, member)?;
permissions.remove(perms);
Ok(permissions.is_empty())
}
macro_rules! num_visitors {
($($visitor:ident: $type:ty),*) => {
$(
#[derive(Debug)]
pub struct $visitor;
impl<'de> Visitor<'de> for $visitor {
type Value = $type;
fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {
formatter.write_str("identifier")
}
fn visit_str<E: DeError>(self, v: &str) -> StdResult<Self::Value, E> {
v.parse::<$type>().map_err(|_| {
let mut s = String::with_capacity(32);
s.push_str("Unknown ");
s.push_str(stringify!($type));
s.push_str(" value: ");
s.push_str(v);
DeError::custom(s)
})
}
fn visit_i64<E: DeError>(self, v: i64) -> StdResult<Self::Value, E> { Ok(v as $type) }
fn visit_u64<E: DeError>(self, v: u64) -> StdResult<Self::Value, E> { Ok(v as $type) }
// This is called when serde_json's `arbitrary_precision` feature is enabled.
fn visit_map<A: MapAccess<'de>>(self, mut map: A) -> StdResult<Self::Value, A::Error> {
struct Id {
num: $type,
}
struct StrVisitor;
impl<'de> Visitor<'de> for StrVisitor {
type Value = $type;
fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {
formatter.write_str("string")
}
fn visit_str<E: DeError>(self, s: &str) -> StdResult<Self::Value, E> { s.parse().map_err(E::custom) }
fn visit_string<E: DeError>(self, s: String) -> StdResult<Self::Value, E> { s.parse().map_err(E::custom) }
}
impl<'de> Deserialize<'de> for Id {
fn deserialize<D: Deserializer<'de>>(des: D) -> StdResult<Self, D::Error> {
Ok(Id { num: des.deserialize_str(StrVisitor)? })
}
}
map.next_value::<Id>().map(|id| id.num)
}
}
)*
}
}
num_visitors!(U16Visitor: u16, U32Visitor: u32, U64Visitor: u64);
| {
if match user_has_perms(cache, channel_id, guild_id, permissions).await {
Err(Error::Model(err)) => err.is_cache_err(),
result => result?,
} {
Ok(())
} else {
Err(Error::Model(ModelError::InvalidPermissions(permissions)))
}
} |
init.go | package cmd
import (
"github.com/spf13/cobra"
docker "github.com/hahwul/backbomb/pkg/docker"
)
// initCmd represents the init command
var initCmd = &cobra.Command{
Use: "init",
Short: "Initialization backbomb docker image",
Run: func(cmd *cobra.Command, args []string) {
docker.Init()
},
}
func init() {
rootCmd.AddCommand(initCmd)
| // Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// initCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// initCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
} | // Here you will define your flags and configuration settings.
|
mod.rs | use json::JsonValue;
pub trait SelectionLens {
fn select<'a>(&self, input: Option<&'a JsonValue>) -> Option<&'a JsonValue>;
}
pub trait SelectionLensParser {
fn try_parse<'a>(
&self,
lens_pattern: Option<&'a str>,
) -> Result<(Box<dyn SelectionLens>, Option<&'a str>), Option<&'a str>>;
}
mod array_member;
mod identity;
mod prop;
mod sequence;
mod value_matchers;
pub fn match_json_slice<'a>(
matchers: &Vec<Box<dyn SelectionLens>>,
json_input: &'a JsonValue,
match_root_only: bool,
) -> Result<&'a JsonValue, ()> {
match matchers.iter().try_fold(json_input, |json_slice, matcher| {
matcher.select(Some(&json_slice))
}) {
Some(matching_slice) => Ok(matching_slice),
None => match (match_root_only, json_input) {
(false, JsonValue::Object(ref object)) => match object
.iter()
.map(|(_, value)| match_json_slice(matchers, value, match_root_only))
.find(|res| res.is_ok())
{
Some(Ok(matching_slice)) => Ok(matching_slice),
_ => Err(()),
},
(false, JsonValue::Array(ref sequence)) => match sequence
.iter()
.map(|value| match_json_slice(matchers, value, match_root_only))
.find(|res| res.is_ok())
{
Some(Ok(matching_slice)) => Ok(matching_slice),
_ => Err(()),
},
(_, _) => Err(()),
},
}
}
pub fn match_filter(filter: &str) -> Result<(Box<dyn SelectionLens>, Option<&str>), &str> {
lazy_static! {
static ref IDENTITY_PARSER: identity::IdentityParser = identity::IdentityParser {};
static ref PROP_PARSER: prop::PropParser = prop::PropParser {};
static ref ARRAY_MEMBER_PARSER: array_member::ArrayMemberParser =
array_member::ArrayMemberParser {};
static ref SEQUENCE_PARSER: sequence::SequenceParser = sequence::SequenceParser {};
}
IDENTITY_PARSER
.try_parse(Some(filter))
.or_else(|unmatched_filter| PROP_PARSER.try_parse(unmatched_filter))
.or_else(|unmatched_filter| ARRAY_MEMBER_PARSER.try_parse(unmatched_filter))
.or_else(|unmatched_filter| SEQUENCE_PARSER.try_parse(unmatched_filter))
.map_err(|_| filter)
}
pub fn | (filter: &str) -> Result<Vec<Box<dyn SelectionLens>>, &str> {
let mut matchers: Vec<Box<dyn SelectionLens>> = vec![];
let mut unmatched_filter: Result<Option<&str>, &str> = Ok(Some(filter));
while let Ok(Some(filter)) = unmatched_filter {
match match_filter(filter) {
Ok((matcher, remainder)) => {
matchers.push(matcher);
unmatched_filter = Ok(remainder);
}
Err(remaining_filter) => {
unmatched_filter = Err(remaining_filter);
}
};
}
match unmatched_filter {
Ok(None) => Ok(matchers),
Ok(Some(remaining_filter)) => Err(remaining_filter),
Err(remaining_filter) => Err(remaining_filter),
}
}
pub fn match_filters(filter: &str) -> Result<Vec<Box<dyn SelectionLens>>, String> {
try_to_match_filters(filter)
.map_err(|unmatched_filter| format!("Invalid filter: {:?}", unmatched_filter))
}
| try_to_match_filters |
LogisticModel.py | from torch import nn
import torch
class LogisticRegression(nn.Module):
def __init__(self,
theta_params: int): | self.__sigmoid_layer = nn.Sigmoid()
def forward(self,
x_input: torch.tensor) -> torch.tensor:
return self.__sigmoid_layer(self.__linear(x_input)) |
super(LogisticRegression, self).__init__()
self.__linear = nn.Linear(theta_params, 1) |
update_metadata_account_v2.rs | #![cfg(feature = "test-bpf")]
mod utils;
use mpl_token_metadata::{
error::MetadataError,
id, instruction,
state::{Creator, Collection, DataV2, Key, MAX_NAME_LENGTH, MAX_SYMBOL_LENGTH, MAX_URI_LENGTH},
utils::puffed_out_string,
};
use num_traits::FromPrimitive;
use solana_program_test::*;
use solana_sdk::{
instruction::InstructionError,
signature::{Keypair, Signer},
transaction::{Transaction, TransactionError},
transport::TransportError,
};
use utils::*;
mod update_metadata_account_v2 {
use super::*;
#[tokio::test]
async fn success_compatible() {
let mut context = program_test().start_with_context().await;
let test_metadata = Metadata::new();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let puffed_symbol = puffed_out_string(&symbol, MAX_SYMBOL_LENGTH);
let puffed_uri = puffed_out_string(&uri, MAX_URI_LENGTH);
test_metadata
.create(
&mut context,
name,
symbol.clone(),
uri.clone(),
None,
10,
true,
)
.await
.unwrap();
let updated_name = "New Name".to_string();
let puffed_updated_name = puffed_out_string(&updated_name, MAX_NAME_LENGTH);
test_metadata
.update_v2(
&mut context,
updated_name,
symbol,
uri,
None,
10,
false,
Some(Collection {
key: test_metadata.pubkey,
verified: false,
}),
None,
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(metadata.data.name, puffed_updated_name,);
assert_eq!(metadata.data.symbol, puffed_symbol);
assert_eq!(metadata.data.uri, puffed_uri);
assert_eq!(metadata.data.seller_fee_basis_points, 10);
assert_eq!(metadata.data.creators, None);
assert_eq!(metadata.primary_sale_happened, false);
assert_eq!(metadata.is_mutable, false);
assert_eq!(metadata.mint, test_metadata.mint.pubkey());
assert_eq!(metadata.update_authority, context.payer.pubkey());
assert_eq!(metadata.key, Key::MetadataV1);
assert_eq!(metadata.collection.unwrap().key, test_metadata.pubkey);
}
#[tokio::test]
async fn success() {
let mut context = program_test().start_with_context().await;
let test_metadata = Metadata::new();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
let puffed_symbol = puffed_out_string(&symbol, MAX_SYMBOL_LENGTH);
let puffed_uri = puffed_out_string(&uri, MAX_URI_LENGTH);
test_metadata
.create_v2(
&mut context,
name,
symbol.clone(),
uri.clone(),
None,
10,
true,
None,
None,
None,
)
.await
.unwrap();
let updated_name = "New Name".to_string();
let puffed_updated_name = puffed_out_string(&updated_name, MAX_NAME_LENGTH);
test_metadata | updated_name,
symbol,
uri,
None,
10,
false,
None,
None,
)
.await
.unwrap();
let metadata = test_metadata.get_data(&mut context).await;
assert_eq!(metadata.data.name, puffed_updated_name,);
assert_eq!(metadata.data.symbol, puffed_symbol);
assert_eq!(metadata.data.uri, puffed_uri);
assert_eq!(metadata.data.seller_fee_basis_points, 10);
assert_eq!(metadata.data.creators, None);
assert_eq!(metadata.primary_sale_happened, false);
assert_eq!(metadata.is_mutable, false);
assert_eq!(metadata.mint, test_metadata.mint.pubkey());
assert_eq!(metadata.update_authority, context.payer.pubkey());
assert_eq!(metadata.key, Key::MetadataV1);
}
#[tokio::test]
async fn fail_invalid_update_authority() {
let mut context = program_test().start_with_context().await;
let test_metadata = Metadata::new();
let fake_update_authority = Keypair::new();
test_metadata
.create(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
true,
)
.await
.unwrap();
let tx = Transaction::new_signed_with_payer(
&[instruction::update_metadata_accounts_v2(
id(),
test_metadata.pubkey,
fake_update_authority.pubkey(),
None,
None,
None,
None,
)],
Some(&context.payer.pubkey()),
&[&context.payer, &fake_update_authority],
context.last_blockhash,
);
let result = context
.banks_client
.process_transaction(tx)
.await
.unwrap_err();
assert_custom_error!(result, MetadataError::UpdateAuthorityIncorrect);
}
#[tokio::test]
async fn cannot_flip_is_mutable_from_false_to_true() {
let mut context = program_test().start_with_context().await;
let test_metadata = Metadata::new();
let name = "Test".to_string();
let symbol = "TST".to_string();
let uri = "uri".to_string();
// Start with NFT immutable.
let is_mutable = false;
test_metadata
.create(
&mut context,
name,
symbol.clone(),
uri.clone(),
None,
10,
is_mutable,
)
.await
.unwrap();
let tx = Transaction::new_signed_with_payer(
&[instruction::update_metadata_accounts_v2(
id(),
test_metadata.pubkey,
context.payer.pubkey().clone(),
None,
None,
None,
// Try to flip to be mutable.
Some(true),
)],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
let result = context
.banks_client
.process_transaction(tx)
.await
.unwrap_err();
// We should not be able to make an immutable NFT mutable again.
assert_custom_error!(result, MetadataError::IsMutableCanOnlyBeFlippedToFalse);
}
#[tokio::test]
async fn fail_cannot_verify_collection() {
let mut context = program_test().start_with_context().await;
let test_metadata = Metadata::new();
test_metadata
.create_v2(
&mut context,
"Test".to_string(),
"TST".to_string(),
"uri".to_string(),
None,
10,
true,
None,
None,
None,
)
.await
.unwrap();
let test_collection = Metadata::new();
test_collection
.create_v2(
&mut context,
"Test Col".to_string(),
"TSTCOL".to_string(),
"uricol".to_string(),
None,
10,
false,
None,
None,
None,
)
.await
.unwrap();
let collection_master_edition_account = MasterEditionV2::new(&test_collection);
collection_master_edition_account
.create_v3(&mut context, Some(1))
.await
.unwrap();
let tx = Transaction::new_signed_with_payer(
&[instruction::update_metadata_accounts_v2(
id(),
test_metadata.pubkey,
context.payer.pubkey().clone(),
None,
Some(DataV2 {
name: "Test".to_string(),
symbol: "TST".to_string(),
uri: "uri".to_string(),
creators: None,
seller_fee_basis_points: 10,
collection: Some(Collection {
key: test_collection.pubkey,
verified: true,
}),
uses: None,
}),
None,
None,
)],
Some(&context.payer.pubkey()),
&[&context.payer],
context.last_blockhash,
);
let result = context
.banks_client
.process_transaction(tx)
.await
.unwrap_err();
assert_custom_error!(
result,
MetadataError::CollectionCannotBeVerifiedInThisInstruction
);
}
} | .update_v2(
&mut context, |
util.py | from .network import Person, Post, SocialNetwork
from dsa import Array, mergesort, Set
from typing import List
__all__ = [
"people_by_popularity",
"posts_by_popularity",
"read_event_file",
"read_network_file"
]
# Creates a network from a network file.
def read_network_file(file_path: str, **network_args) -> SocialNetwork:
with open(file_path) as file:
lines = file.readlines()
# Note: allowed to use List for readlines().
split_lines: Array[List[str]] = Array(len(lines))
for i in range(len(split_lines)):
line = lines[i].rstrip("\n")
cols = line.split(":")
if not line or len(cols) not in (1, 2):
raise ValueError(f"line {i + 1} has invalid format.")
split_lines[i] = cols
# Pre-scan file to get number of people.
names = Set()
for i, columns in enumerate(split_lines, 1):
if len(columns) == 1:
name = columns[0]
if not name or name.isspace():
raise ValueError(f"line {i}: name cannot be blank or whitespace.")
names.add(name)
network_args["expected_people"] = len(names)
network = SocialNetwork(**network_args)
for name in names:
network.add_person(name)
for i, columns in enumerate(split_lines):
if len(columns) == 2:
try:
person1 = network.find_person(columns[0])
person2 = network.find_person(columns[1])
person2.follow(person1)
except ValueError as e:
raise ValueError(f"line {i}: {e}")
return network
# Reads an event file and applies the events to a network.
def read_event_file(file_path: str, network: SocialNetwork) -> None:
|
# Returns an array of a network's people sorted descending by follower count.
def people_by_popularity(network: SocialNetwork) -> Array[Person]:
people = Array(network.people)
# Mergesort to avoid slowdown of quicksort if many people have similar follower count.
# (Which will happen as simulations progress.)
mergesort(people, reverse=True, key=lambda p: p.follower_count)
return people
# Returns an array of a network's posts sorted descending by like count.
def posts_by_popularity(network: SocialNetwork) -> Array[Post]:
posts = Array(network.posts)
# Mergesort to avoid slowdown of quicksort if many posts have similar like count.
# (Which will happen as simulations progress.)
mergesort(posts, reverse=True, key=lambda p: p.like_count)
return posts
| with open(file_path) as file:
for i, line in enumerate(file, 1):
line = line.rstrip("\n")
cols = line.split(":")
if len(cols) == 2 and cols[0] in ("A", "a"):
name = cols[1]
if not name or name.isspace():
raise ValueError(f"line {i}: name cannot be blank or whitespace.")
try:
network.add_person(name)
except ValueError as e:
raise ValueError(f"line {i}: {e}")
elif len(cols) == 2 and cols[0] in ("R", "r"):
try:
network.delete_person(network.find_person(cols[1]))
except ValueError as e:
raise ValueError(f"line {i}: {e}")
elif len(cols) == 3 and cols[0] in ("F", "f"):
try:
person1 = network.find_person(cols[1])
person2 = network.find_person(cols[2])
person2.follow(person1)
except ValueError as e:
raise ValueError(f"line {i}: {e}")
elif len(cols) == 3 and cols[0] in ("U", "u"):
try:
person1 = network.find_person(cols[1])
person2 = network.find_person(cols[2])
person1.unfollow(person2)
except ValueError as e:
raise ValueError(f"line {i}: {e}")
elif len(cols) == 3 and cols[0] in ("P", "p"):
try:
person = network.find_person(cols[1])
person.make_post(cols[2])
except ValueError as e:
raise ValueError(f"line {i}: {e}")
elif len(cols) == 4 and cols[0] in ("P", "p"):
try:
clickbait_factor = int(cols[3])
except ValueError:
raise ValueError(f"line {i}: invalid clickbait factor.")
else:
try:
person = network.find_person(cols[1])
person.make_post(cols[2], clickbait_factor)
except ValueError as e:
raise ValueError(f"line {i}: {e}")
else:
raise ValueError(f"line {i} has invalid format.") |
handlers_test.go | package username
import (
"context"
"testing"
"github.com/iov-one/weave"
"github.com/iov-one/weave/errors"
"github.com/iov-one/weave/migration"
"github.com/iov-one/weave/store"
"github.com/iov-one/weave/weavetest"
"github.com/iov-one/weave/weavetest/assert"
"github.com/iov-one/weave/x"
)
func TestRegisterTokenHandler(t *testing.T) {
var (
aliceCond = weavetest.NewCondition()
bobbyCond = weavetest.NewCondition()
)
cases := map[string]struct {
Tx weave.Tx
Auth x.Authenticator
WantCheckErr *errors.Error
WantDeliverErr *errors.Error
}{
"success": {
Tx: &weavetest.Tx{
Msg: &RegisterTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "bobby*iov",
Targets: []BlockchainAddress{
{BlockchainID: "bc_1", Address: "addr1"},
{BlockchainID: "bc_2", Address: "addr2"},
},
},
},
Auth: &weavetest.Auth{Signer: bobbyCond},
},
"username must be unique": {
Tx: &weavetest.Tx{
Msg: &RegisterTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice*iov",
Targets: []BlockchainAddress{
{BlockchainID: "bc_1", Address: "addr1"},
},
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
WantCheckErr: errors.ErrDuplicate,
WantDeliverErr: errors.ErrDuplicate,
},
"target can be empty": {
Tx: &weavetest.Tx{
Msg: &RegisterTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice2*iov",
Targets: []BlockchainAddress{},
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
},
"username must be provided": {
Tx: &weavetest.Tx{
Msg: &RegisterTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "",
Targets: []BlockchainAddress{
{BlockchainID: "bc_1", Address: "addr1"},
},
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
WantCheckErr: errors.ErrInput,
WantDeliverErr: errors.ErrInput,
},
}
for testName, tc := range cases {
t.Run(testName, func(t *testing.T) {
db := store.MemStore()
migration.MustInitPkg(db, "username")
b := NewTokenBucket()
_, err := b.Put(db, []byte("alice*iov"), &Token{
Metadata: &weave.Metadata{Schema: 1},
Targets: []BlockchainAddress{
{BlockchainID: "unichain", Address: "some-unichain-address"},
},
Owner: aliceCond.Address(),
})
assert.Nil(t, err)
h := registerTokenHandler{
auth: tc.Auth,
bucket: b,
}
cache := db.CacheWrap()
if _, err := h.Check(context.TODO(), cache, tc.Tx); !tc.WantCheckErr.Is(err) {
t.Fatalf("unexpected check error: %s", err)
}
cache.Discard()
if _, err := h.Deliver(context.TODO(), db, tc.Tx); !tc.WantDeliverErr.Is(err) {
t.Fatalf("unexpected deliver error: %s", err)
}
})
}
}
func TestChangeTokenOwnerHandler(t *testing.T) {
var (
aliceCond = weavetest.NewCondition()
bobbyCond = weavetest.NewCondition()
)
cases := map[string]struct {
Tx weave.Tx
Auth x.Authenticator
WantCheckErr *errors.Error
WantDeliverErr *errors.Error
}{
"success": {
Tx: &weavetest.Tx{
Msg: &TransferTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice*iov",
NewOwner: bobbyCond.Address(),
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
},
"only the owner can change the token": {
Tx: &weavetest.Tx{
Msg: &TransferTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice*iov",
NewOwner: bobbyCond.Address(),
},
},
WantCheckErr: errors.ErrUnauthorized,
WantDeliverErr: errors.ErrUnauthorized,
Auth: &weavetest.Auth{Signer: bobbyCond},
},
"token must exist": {
Tx: &weavetest.Tx{
Msg: &TransferTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "does-not-exist*iov",
NewOwner: bobbyCond.Address(),
},
},
WantCheckErr: errors.ErrNotFound,
WantDeliverErr: errors.ErrNotFound,
Auth: &weavetest.Auth{Signer: bobbyCond},
},
"change to the same owner (no change) is allowed": {
Tx: &weavetest.Tx{
Msg: &TransferTokenMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice*iov",
NewOwner: aliceCond.Address(),
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
},
}
for testName, tc := range cases {
t.Run(testName, func(t *testing.T) {
db := store.MemStore()
migration.MustInitPkg(db, "username")
b := NewTokenBucket()
_, err := b.Put(db, []byte("alice*iov"), &Token{
Metadata: &weave.Metadata{Schema: 1},
Targets: []BlockchainAddress{
{BlockchainID: "unichain", Address: "some-unichain-address"},
},
Owner: aliceCond.Address(),
})
assert.Nil(t, err)
h := transferTokenHandler{
auth: tc.Auth,
bucket: b,
}
cache := db.CacheWrap()
if _, err := h.Check(context.TODO(), cache, tc.Tx); !tc.WantCheckErr.Is(err) {
t.Fatalf("unexpected check error: %s", err)
}
cache.Discard()
if _, err := h.Deliver(context.TODO(), db, tc.Tx); !tc.WantDeliverErr.Is(err) {
t.Fatalf("unexpected deliver error: %s", err)
}
})
}
}
func TestChangeTokenTargetHandler(t *testing.T) {
var (
aliceCond = weavetest.NewCondition()
bobbyCond = weavetest.NewCondition()
)
cases := map[string]struct {
Tx weave.Tx
Auth x.Authenticator
WantCheckErr *errors.Error
WantDeliverErr *errors.Error
}{
"success": {
Tx: &weavetest.Tx{
Msg: &ChangeTokenTargetsMsg{ | Username: "alice*iov",
NewTargets: []BlockchainAddress{
{BlockchainID: "hydracoin", Address: "some-hydra-address"},
{BlockchainID: "pegasuscoin", Address: "some-pagasus-address"},
},
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
},
"can change target to the same value (no change)": {
Tx: &weavetest.Tx{
Msg: &ChangeTokenTargetsMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice*iov",
NewTargets: []BlockchainAddress{
{BlockchainID: "unichain", Address: "some-unicorn-address"},
},
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
},
"invalid message": {
Tx: &weavetest.Tx{
Msg: &ChangeTokenTargetsMsg{
Metadata: nil,
Username: "",
NewTargets: []BlockchainAddress{},
},
},
Auth: &weavetest.Auth{Signer: aliceCond},
WantCheckErr: errors.ErrMetadata,
WantDeliverErr: errors.ErrMetadata,
},
"only the owner can change the token": {
Tx: &weavetest.Tx{
Msg: &ChangeTokenTargetsMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "alice*iov",
NewTargets: []BlockchainAddress{
{BlockchainID: "hydracoin", Address: "some-hydra-address"},
},
},
},
WantCheckErr: errors.ErrUnauthorized,
WantDeliverErr: errors.ErrUnauthorized,
Auth: &weavetest.Auth{Signer: bobbyCond},
},
"token must exist": {
Tx: &weavetest.Tx{
Msg: &ChangeTokenTargetsMsg{
Metadata: &weave.Metadata{Schema: 1},
Username: "does-not-exist*iov",
NewTargets: []BlockchainAddress{
{BlockchainID: "hydracoin", Address: "some-hydra-address"},
},
},
},
WantCheckErr: errors.ErrNotFound,
WantDeliverErr: errors.ErrNotFound,
Auth: &weavetest.Auth{Signer: bobbyCond},
},
}
for testName, tc := range cases {
t.Run(testName, func(t *testing.T) {
db := store.MemStore()
migration.MustInitPkg(db, "username")
b := NewTokenBucket()
_, err := b.Put(db, []byte("alice*iov"), &Token{
Metadata: &weave.Metadata{Schema: 1},
Targets: []BlockchainAddress{
{BlockchainID: "unichain", Address: "some-unicorn-address"},
},
Owner: aliceCond.Address(),
})
assert.Nil(t, err)
h := changeTokenTargetsHandler{
auth: tc.Auth,
bucket: b,
}
cache := db.CacheWrap()
if _, err := h.Check(context.TODO(), cache, tc.Tx); !tc.WantCheckErr.Is(err) {
t.Fatalf("unexpected check error: %s", err)
}
cache.Discard()
if _, err := h.Deliver(context.TODO(), db, tc.Tx); !tc.WantDeliverErr.Is(err) {
t.Fatalf("unexpected deliver error: %s", err)
}
})
}
} | Metadata: &weave.Metadata{Schema: 1}, |
behaviours.ts | import { Behaviours, http, idiom as lang, _, ui } from 'entcore';
console.log('directory behaviours loaded');
Behaviours.register('directory', {
rights:{
workflow: {
externalNotifications: "org.entcore.timeline.controllers.TimelineController|mixinConfig",
historyView: "org.entcore.timeline.controllers.TimelineController|historyView",
showMoodMotto: "org.entcore.directory.controllers.UserBookController|userBookMottoMood",
switchTheme: "org.entcore.directory.controllers.UserBookController|userBookSwitchTheme",
generateMergeKey: "org.entcore.directory.controllers.UserController|generateMergeKey",
mergeByKey: "org.entcore.directory.controllers.UserController|mergeByKey",
allowSharebookmarks: "org.entcore.directory.controllers.ShareBookmarkController|allowSharebookmarks",
allowLoginUpdate: "org.entcore.directory.controllers.UserController|allowLoginUpdate",
allowClassAdminAddUsers: "org.entcore.directory.controllers.DirectoryController|allowClassAdminAddUsers",
allowClassAdminResetPassword: "org.entcore.directory.controllers.DirectoryController|allowClassAdminResetPassword",
allowClassAdminBlockUsers: "org.entcore.directory.controllers.DirectoryController|allowClassAdminBlockUsers",
allowClassAdminDeleteUsers: "org.entcore.directory.controllers.DirectoryController|allowClassAdminDeleteUsers",
allowClassAdminUnlinkUsers: "org.entcore.directory.controllers.DirectoryController|allowClassAdminUnlinkUsers"
}
},
sniplets: {
facebook: {
title: 'sniplet.facebook.title',
description: 'sniplet.facebook.desc',
controller: {
initSource: function(){
this.source = {
groups: []
};
this.search = {
text: '',
groups: [],
structures: [],
structure: null
};
http().get('/userbook/structures').done(function(structures){
this.search.structures = structures;
this.$apply('search');
}.bind(this));
},
viewUserInfos: function(userId){
window.open('/userbook/annuaire#/' + userId, '_blank');
},
removeGroup: function(index, group){
this.source.groups.splice(index, 1);
this.search.groups.push(group);
},
addGroup: function(group){
this.source.groups.push(group);
var index = this.search.groups.indexOf(group);
this.search.groups.splice(index, 1);
},
loadGroups: function(){
var that = this
http().get('/userbook/structure/' + this.search.structure.id).done(function(structure){
this.search.groups = structure.profileGroups.concat(structure.manualGroups);
_.map(this.search.groups, function(group){ group.translatedName = that.groupTranslation(group.name) })
this.$apply('search');
}.bind(this));
},
init: function(){
this.source.groups.forEach(function(group){
http().get('/userbook/visible/users/' + group.id).done(function(users){
group.users = users; | },
applySource: function(){
this.setSnipletSource(this.source);
},
colorFromType: function(type){
return ui.profileColors.match(type);
},
groupTranslation: function(groupName){
var splittedName = groupName.split('-')
return splittedName.length > 1 ?
lang.translate(groupName.substring(0, groupName.lastIndexOf('-'))) + '-' + lang.translate(groupName.split('-')[splittedName.length - 1]) :
groupName
},
getType: function(type){
if(type instanceof Array)
return type[0]
return type
}
}
}
}
}); | this.$apply('source');
}.bind(this));
}.bind(this)) |
Stage.js | import React from 'react';
import classNames from 'classnames';
import Gocd from '../lib/Gocd';
var allStatuses = {
"Passed": "passed",
"Cancelled": "cancelled",
"Failed": "failed",
"Unknown": "unknown",
"Building": "building"
};
export default React.createClass({
getInitialState: function(){
return {jobs: []};
},
componentDidMount: function() {
if (this.props.drillDown) {
Gocd.fetchJobs(this.props.data.details_path, (function(jobs){
this.setState({jobs: jobs});
}).bind(this),
//TODO: error handling
function(){});
} | this.props.drillDown? undefined: allStatuses[this.props.data.status]);
let renderJobs = () => this.state.jobs.map((job, index) => (<div key={index} className={classNames("job", allStatuses[job.result])}>{job.name}</div>));
return (<div className={stageClass}>{this.props.drillDown? renderJobs() :this.props.data.name}</div>);
}
}); | },
render: function() {
let stageClass = classNames("stage", "fill-height-or-more", |
makesedonac.py | #! /usr/bin/env python3
#
# makesedonac.py
#
# Compile sedonac.jar
#
# Author: Brian Frank
# Creation: 7 Dec 07
#
from __future__ import print_function
import os
import env
import compilejar
depends = [env.sedonaJar]
srcDir = os.path.join(env.src, "sedonac", "src")
jarFile = env.sedonacJar
packages = [
"sedonac",
"sedonac.analysis",
"sedonac.asm",
"sedonac.ast",
"sedonac.gen",
"sedonac.ir",
"sedonac.namespace",
"sedonac.parser",
"sedonac.platform",
"sedonac.scode",
"sedonac.steps",
"sedonac.test",
"sedonac.translate",
"sedonac.util",
]
# Make
def compile():
try:
compilejar.compile(srcDir, depends, packages, jarFile)
except env.BuildError:
print("**")
print("** FAILED [" + jarFile + "]")
print("**")
return 1
# Main
if __name__ == '__main__':
| compile() |
|
main.rs | extern crate rand;
mod connection;
mod node;
mod lib1;
use rand::Rng;
use node::{Node, NodeType};
use connection::Connection;
fn main() {
let mut node_list : Vec<Node> = sample_node_list();
let mut conn_list : Vec<Connection> = sampel_conn_list();
let trainning_data_list : Vec<(f64, f64)> = sample_training_data();
let test_data_list : Vec<(f64, f64)> = sample_input_data();
for i in 0..10 {
conn_list = lib1::walk_and_get_new_connection_list(&trainning_data_list, 0.7, 0.3,
&mut conn_list, &mut node_list);
println!("#{} Error [{:.5}]", i, calc_error_rate(&mut node_list, &conn_list, &test_data_list));
}
}
fn calc_error_rate(node_list : &mut Vec<Node>, conn_list : &Vec<Connection>,
input_list : &Vec<(f64, f64)>) -> f64 {
let mut sum_of_error = 0.0;
for &(input_1, input_2) in input_list {
lib1::reset_node_list(node_list);
let output = lib1::walk_for_output(node_list, &conn_list, input_1, input_2);
sum_of_error += lib1::calc_error(input_1, input_2, output);
}
sum_of_error / input_list.len() as f64
}
fn sample_training_data() -> Vec<(f64, f64)> {
vec![(1.0, 0.0), (1.0, 1.0), (0.0, 1.0), (0.0, 0.0)]
}
fn sample_input_data() -> Vec<(f64, f64)> {
let mut data : Vec<(f64, f64)> = Vec::new();
let mut rng = rand::thread_rng();
for _ in 0..1000 {
let x_f = (rng.gen::<u32>() % 2) as f64; |
data.push((x_f, y_f));
}
return data;
}
fn sample_node_list() -> Vec<Node> {
vec![
Node::new("i1", NodeType::Input_1, 0.0),
Node::new("i2", NodeType::Input_2, 0.0),
Node::new("b1", NodeType::Bias, 1.0),
Node::new("h1", NodeType::Hidden, 0.0),
Node::new("h2", NodeType::Hidden, 0.0),
Node::new("b2", NodeType::Bias, 1.0),
Node::new("o1", NodeType::Output, 0.0),
]
}
fn sampel_conn_list() -> Vec<Connection> {
vec![
Connection::new("i1".to_string(), "h1".to_string(), -0.07),
Connection::new("i1".to_string(), "h2".to_string(), 0.94),
Connection::new("i2".to_string(), "h1".to_string(), 0.22),
Connection::new("i2".to_string(), "h2".to_string(), 0.46),
Connection::new("b1".to_string(), "h1".to_string(), -0.46),
Connection::new("b1".to_string(), "h2".to_string(), 0.10),
Connection::new("h1".to_string(), "o1".to_string(), -0.22),
Connection::new("h2".to_string(), "o1".to_string(), 0.58),
Connection::new("b2".to_string(), "o1".to_string(), 0.78),
]
} | let y_f = (rng.gen::<u32>() % 2) as f64; |
pixman_region.rs | use std::mem;
use libc::{c_int, c_uint};
use wlroots_sys::{pixman_region32_fini, pixman_region32_init,
pixman_region32_t, pixman_region32_union_rect};
/// A pixman region, used for damage tracking.
#[derive(Debug)]
pub struct PixmanRegion {
pub region: pixman_region32_t
}
impl PixmanRegion {
/// Make a new pixman region.
pub fn new() -> Self {
unsafe {
// NOTE Rational for uninitialized memory:
// We are automatically filling it in with pixman_region32_init.
let mut region = mem::uninitialized();
pixman_region32_init(&mut region);
PixmanRegion { region }
}
}
pub fn rectangle(&mut self, x: c_int, y: c_int, width: c_uint, height: c_uint) |
}
impl Drop for PixmanRegion {
fn drop(&mut self) {
unsafe { pixman_region32_fini(&mut self.region) }
}
}
| {
unsafe {
let region_ptr = &mut self.region as *mut _;
pixman_region32_union_rect(region_ptr, region_ptr, x, y, width, height);
}
} |
utils.go | package internal
import (
"github.com/go-resty/resty/v2"
"log"
"sync"
"time"
)
func Trace(resp *resty.Response, err error) {
log.Println("Response Info:")
log.Println(" URL :", resp.Request.URL)
log.Println(" Error :", err)
log.Println(" Status Code:", resp.StatusCode())
log.Println(" Status :", resp.Status())
log.Println(" Proto :", resp.Proto())
log.Println(" Time :", resp.Time())
log.Println(" Received At:", resp.ReceivedAt())
//log.Println(" Body :\n", resp)
log.Println()
}
// Parallelize parallelize the function calls
func Parallelize(functions ...func()) {
var waitGroup sync.WaitGroup
waitGroup.Add(len(functions))
defer waitGroup.Wait()
for _, function := range functions {
go func(copy func()) {
defer waitGroup.Done()
copy()
}(function)
}
}
// Contains Check value existence in a slice
func | (s []string, str string) bool {
for _, v := range s {
if v == str {
return true
}
}
return false
}
type poller struct {
ticker *time.Ticker
function func()
}
// RunNewPoller Run a new Poller around a lambda function, that tick every time.Duration
func RunNewPoller(timeInterval time.Duration, function func()) *poller {
ticker := time.NewTicker(timeInterval)
defer ticker.Stop()
poll := &poller{
ticker: ticker,
function: function,
}
poll.Run()
return poll
}
func (p *poller) Run() {
for ; true; <-p.ticker.C {
log.Println("Tick for polling...")
p.function()
}
}
| Contains |
mod.rs | pub mod day01;
#[cfg(feature = "visualization")]
pub mod day01_renderer;
pub mod day02;
pub mod day03;
#[cfg(feature = "visualization")]
pub mod day03_renderer;
pub mod day04;
pub mod day05;
pub mod day06;
pub mod day07;
pub mod day08;
#[cfg(feature = "visualization")] | pub mod day08_renderer;
pub mod day09;
pub mod day10;
pub mod day11;
#[cfg(feature = "visualization")]
pub mod day11_renderer;
pub mod day12;
#[cfg(feature = "visualization")]
pub mod day12_renderer;
pub mod day13;
pub mod day14;
pub mod day15;
pub mod day16;
pub mod day17;
pub mod day18;
pub mod day19;
pub mod day20;
pub mod day21;
pub mod day22;
pub mod day23;
pub mod day24;
pub mod day25; | |
stages.js | import express from "express"
const router = express.Router();
import Stage from '../models/stage.model.js';
router.route('/').get((req, res) => {
Stage.find().sort({ name: 1 })
.then(stages => res.json(stages))
.catch(err => res.status(400).json('Error: ' + err));
});
router.route('/').post((req, res) => {
const name = req.body.name;
const profilePictureURL = req.body.profilePictureURL;
const wikiURL = req.body.wikiURL;
const newStage = new Stage({
"name": name,
"profilePictureURL": profilePictureURL,
"wikiURL": wikiURL
});
newStage.save()
.then(() => res.json(`Stage '${name}' added!`))
.catch(err => res.status(400).json('Error: ' + err));
});
router.route('/:id').get((req, res) => {
Stage.findById(req.params.id)
.then(stage => res.json(stage))
.catch(err => res.status(400).json('Error: ' + err));
});
router.route('/:id').delete((req, res) => {
Stage.findByIdAndDelete(req.params.id)
.then(() => res.json('Stage deleted.'))
.catch(err => res.status(400).json('Error: ' + err));
});
router.route('/:id').put((req, res) => {
Stage.findById(req.params.id)
.then(stage => {
stage.name = req.body.name;
stage.profilePictureURL = req.body.profilePictureURL;
stage.wikiURL = req.body.wikiURL;
stage.save()
.then(() => res.json('Stage updated!'))
.catch(err => res.status(400).json('Error: ' + err));
})
.catch(err => res.status(400).json('Error: ' + err)); | });
export default router; |
|
Tab.d.ts | import * as React from "react";
import { CompositeItemOptions, CompositeItemHTMLProps } from "../Composite/CompositeItem";
import { TabStateReturn } from "./TabState";
export declare type TabOptions = CompositeItemOptions & Pick<Partial<TabStateReturn>, "manual"> & Pick<TabStateReturn, "panels" | "selectedId" | "select">;
export declare type TabHTMLProps = CompositeItemHTMLProps;
export declare type TabProps = TabOptions & TabHTMLProps;
export declare const useTab: {
(options?: TabOptions | undefined, htmlProps?: import("..").TabbableHTMLProps | undefined, unstable_ignoreUseOptions?: boolean | undefined): import("..").TabbableHTMLProps;
unstable_propsAreEqual: (prev: import("..").BoxOptions & {
disabled?: boolean | undefined;
focusable?: boolean | undefined;
} & {
unstable_clickOnEnter?: boolean | undefined;
unstable_clickOnSpace?: boolean | undefined;
} & Pick<Partial<import("..").unstable_IdStateReturn>, "baseId" | "unstable_idCountRef"> & {
id?: string | undefined;
} & Pick<Partial<import("..").CompositeStateReturn>, "baseId" | "unstable_virtual" | "orientation" | "unstable_moves" | "unstable_hasActiveWidget"> & Pick<import("..").CompositeStateReturn, "down" | "up" | "next" | "first" | "last" | "currentId" | "items" | "registerItem" | "unregisterItem" | "previous" | "setCurrentId"> & Pick<Partial<TabStateReturn>, "manual"> & Pick<TabStateReturn, "select" | "selectedId" | "panels"> & React.HTMLAttributes<any> & React.RefAttributes<any> & {
wrapElement?: ((element: React.ReactNode) => React.ReactNode) | undefined;
} & {
disabled?: boolean | undefined;
}, next: import("..").BoxOptions & { | unstable_clickOnSpace?: boolean | undefined;
} & Pick<Partial<import("..").unstable_IdStateReturn>, "baseId" | "unstable_idCountRef"> & {
id?: string | undefined;
} & Pick<Partial<import("..").CompositeStateReturn>, "baseId" | "unstable_virtual" | "orientation" | "unstable_moves" | "unstable_hasActiveWidget"> & Pick<import("..").CompositeStateReturn, "down" | "up" | "next" | "first" | "last" | "currentId" | "items" | "registerItem" | "unregisterItem" | "previous" | "setCurrentId"> & Pick<Partial<TabStateReturn>, "manual"> & Pick<TabStateReturn, "select" | "selectedId" | "panels"> & React.HTMLAttributes<any> & React.RefAttributes<any> & {
wrapElement?: ((element: React.ReactNode) => React.ReactNode) | undefined;
} & {
disabled?: boolean | undefined;
}) => boolean;
__keys: readonly any[];
__useOptions: (options: TabOptions, htmlProps: import("..").TabbableHTMLProps) => TabOptions;
};
export declare const Tab: import("reakit-system/src/createComponent").Component<"button", TabOptions>; | disabled?: boolean | undefined;
focusable?: boolean | undefined;
} & {
unstable_clickOnEnter?: boolean | undefined; |
modpow.rs | use criterion::{criterion_group, criterion_main, Bencher, Criterion};
use num_bigint::BigUint;
use num_traits::One;
#[cfg(feature = "rug")]
use rug::Integer;
fn num_modpow(b: &mut Bencher) {
let n = "9387019355706217197639129234358945126657617361248696932841794255538327365072557602175160199263073329488914880215590036563068284078359088114486271428098753"; | let n = n.parse().unwrap();
let x: BigUint = x.parse().unwrap();
let mut e = BigUint::one();
e <<= level as usize;
b.iter(|| x.modpow(&e, &n));
}
#[cfg(feature = "rug")]
fn gmp_modpow(b: &mut Bencher) {
let n = "9387019355706217197639129234358945126657617361248696932841794255538327365072557602175160199263073329488914880215590036563068284078359088114486271428098753";
let x = "2148617454765635492758175407769288127281667975788420713054995716016550287184632946544163990319181591625774561067011999700977775946073267145316355582522577";
let level = 10_000;
let n: Integer = n.parse().unwrap();
let x: Integer = x.parse().unwrap();
let mut e = Integer::new();
e.set_bit(level, true);
b.iter(|| x.pow_mod_ref(&e, &n).unwrap());
}
fn bench_modpow(c: &mut Criterion) {
c.bench_function("num bigint", num_modpow);
#[cfg(feature = "rug")]
{
c.bench_function("gmp", gmp_modpow);
}
}
criterion_group!(benches, bench_modpow);
criterion_main!(benches); | let x = "2148617454765635492758175407769288127281667975788420713054995716016550287184632946544163990319181591625774561067011999700977775946073267145316355582522577";
let level = 10_000; |
serializers.py | from rest_framework import serializers
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email')
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data): | return user
class LoginSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, data):
user = authenticate(**data)
if user and user.is_active:
return user
raise serializers.ValidationError("Incorrect credentials") | user = User.objects.create_user(
validated_data['username'], validated_data['email'], validated_data['password'])
|
home.js | $(document).ready(function () {
startSlider(0);
});
function startSlider(idx) {
$img = $("#slide div img").eq(idx);
$img.fadeIn('slow', function () {
$img.delay(5000).fadeOut('slow', function () { | startSlider(idx + 1);
}
});
});
}
$(document).ready(function () {
$("div.bhoechie-tab-menu>div.list-group>a").click(function (e) {
e.preventDefault();
$(this).siblings('a.active').removeClass("active");
$(this).addClass("active");
var index = $(this).index();
$("div.bhoechie-tab>div.bhoechie-tab-content").removeClass("active");
$("div.bhoechie-tab>div.bhoechie-tab-content").eq(index).addClass("active");
});
}); | if ($("#slide div img").length - 1 == idx) {
startSlider(0);
} else { |
openssl_crypto_ocsp_ocsp_srv.go | // go-libtor - Self-contained Tor from Go | /*
#define OPENSSLDIR "/usr/local/ssl"
#define ENGINESDIR "/usr/local/lib/engines"
#include <crypto/ocsp/ocsp_srv.c>
*/
import "C" | // Copyright (c) 2018 Péter Szilágyi. All rights reserved.
package libtor
|
entities.js | import merge from 'lodash.merge';
import * as types from '../constants/ActionTypes';
const initialState = {
grouplist: [],
users: [],
};
export default function | (state = initialState, action) {
let index = -1;
let group = null;
switch (action.type) {
case types.LOGOUT:
return { ...initialState };
case types.GOT_UNREADMESSAGE:
group = state.grouplist.find(group => group.Id == action.groupId);
if (group) group.unread += 1;
return { ...state };
case types.FETCH_MESSAGE_SUCCESS:
group = state.grouplist.find(group => group.Id == action.groupId);
if (group) group.unread = 0;
return { ...state };
case types.FETCH_GROUPLIST_SUCCESS:
return { ...state, grouplist: action.groups.map(group => ({ ...group, unread: 0 })) };
case types.FETCH_ACCOUNTLIST_SUCCESS:
return { ...state, users: action.accounts };
case types.SET_FAVORITEGROUP_SUCCESS:
index = state.users.indexOf(action.originUInfo);
state.users.splice(index, 1, action.newUInfo);
return { ...state };
case types.ADD_NEWGROUP_SUCCESS:
return { ...state, grouplist: [...state.grouplist, { ...action.group, unread: 0 }] };
case types.SAVEPROFILE_SUCCESS:
index = state.users.findIndex(user => user.Id == action.uInfo.Id);
if (index != -1)
state.users.splice(index, 1, action.uInfo);
return { ...state };
default:
return state;
}
}
| entities |
msg-list.component.ts | import {Component, OnInit} from '@angular/core';
import {Message} from './msg.model';
import {MsgService} from './msg.service';
@Component({
selector:'my-msg-list', // Used in msgs.component.html
templateUrl: './msg-list.component.html',
})
export class | implements OnInit {
public messages: Message[];
constructor(private MsgService: MsgService) {}
ngOnInit() {
this.MsgService.getMsg()
.subscribe(
(msgs: Message[]) => this.messages = msgs
);
}
}
| MsgListComponent |
argument_alignment_after.rs | fn foo(x: i32,
y: i32,
z: i32) {}
pub fn new<S>(shape: S,
material_idx: usize)
-> Primitive
where S: Shape + 'static {}
fn main() {
fooooo(1,
2,
3)
} |
extern {
fn variadic(a: i32,
b: i32,
c: i32,
...)
-> i32;
} | |
service.rs | // Copyright 2019 The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use super::{
config::LivenessConfig,
error::LivenessError,
message::{PingPong, PingPongMessage},
state::LivenessState,
LivenessRequest,
LivenessResponse,
LOG_TARGET,
};
use crate::{
domain_message::DomainMessage,
services::liveness::{handle::LivenessEventSender, LivenessEvent, PingPongEvent},
tari_message::TariMessageType,
};
use futures::{future::Either, pin_mut, stream::StreamExt, Stream};
use log::*;
use std::{iter, sync::Arc, time::Instant};
use tari_comms::{
connectivity::{ConnectivityRequester, ConnectivitySelection},
peer_manager::NodeId,
types::CommsPublicKey,
};
use tari_comms_dht::{
domain_message::OutboundDomainMessage,
outbound::{DhtOutboundError, OutboundMessageRequester},
};
use tari_service_framework::reply_channel::RequestContext;
use tari_shutdown::ShutdownSignal;
use tokio::time;
/// Service responsible for testing Liveness of Peers.
pub struct LivenessService<THandleStream, TPingStream> {
config: LivenessConfig,
request_rx: Option<THandleStream>,
ping_stream: Option<TPingStream>,
state: LivenessState,
connectivity: ConnectivityRequester,
outbound_messaging: OutboundMessageRequester,
event_publisher: LivenessEventSender,
shutdown_signal: Option<ShutdownSignal>,
}
impl<TRequestStream, TPingStream> LivenessService<TRequestStream, TPingStream>
where
TPingStream: Stream<Item = DomainMessage<PingPongMessage>>,
TRequestStream: Stream<Item = RequestContext<LivenessRequest, Result<LivenessResponse, LivenessError>>>,
{
#[allow(clippy::too_many_arguments)]
pub fn new(
config: LivenessConfig,
request_rx: TRequestStream,
ping_stream: TPingStream,
state: LivenessState,
connectivity: ConnectivityRequester,
outbound_messaging: OutboundMessageRequester,
event_publisher: LivenessEventSender,
shutdown_signal: ShutdownSignal,
) -> Self
{
Self {
request_rx: Some(request_rx),
ping_stream: Some(ping_stream),
state,
connectivity,
outbound_messaging,
event_publisher,
shutdown_signal: Some(shutdown_signal),
config,
}
}
pub async fn run(mut self) {
debug!(target: LOG_TARGET, "Liveness service started");
debug!(target: LOG_TARGET, "Config = {:?}", self.config);
let ping_stream = self.ping_stream.take().expect("ping_stream cannot be None").fuse();
pin_mut!(ping_stream);
let request_stream = self.request_rx.take().expect("ping_stream cannot be None").fuse();
pin_mut!(request_stream);
let mut ping_tick = match self.config.auto_ping_interval {
Some(interval) => Either::Left(time::interval_at((Instant::now() + interval).into(), interval)),
None => Either::Right(futures::stream::iter(iter::empty())),
}
.fuse();
let mut shutdown_signal = self
.shutdown_signal
.take()
.expect("Liveness service initialized without shutdown signal");
loop {
futures::select! {
// Requests from the handle
request_context = request_stream.select_next_some() => {
let (request, reply_tx) = request_context.split();
let _ = reply_tx.send(self.handle_request(request).await);
},
// Tick events
_ = ping_tick.select_next_some() => {
if let Err(err) = self.start_ping_round().await {
warn!(target: LOG_TARGET, "Error when pinging peers: {}", err);
}
},
// Incoming messages from the Comms layer
msg = ping_stream.select_next_some() => {
if let Err(err) = self.handle_incoming_message(msg).await {
warn!(target: LOG_TARGET, "Failed to handle incoming PingPong message: {}", err);
}
},
_ = shutdown_signal => {
info!(target: LOG_TARGET, "Liveness service shutting down because the shutdown signal was received");
break;
}
}
}
}
async fn handle_incoming_message(&mut self, msg: DomainMessage<PingPongMessage>) -> Result<(), LivenessError> {
let DomainMessage::<_> {
source_peer,
inner: ping_pong_msg,
..
} = msg;
let node_id = source_peer.node_id;
let public_key = source_peer.public_key;
match ping_pong_msg.kind().ok_or_else(|| LivenessError::InvalidPingPongType)? {
PingPong::Ping => {
self.state.inc_pings_received();
self.send_pong(ping_pong_msg.nonce, public_key).await.unwrap();
self.state.inc_pongs_sent();
debug!(
target: LOG_TARGET,
"Received ping from peer '{}' with useragent '{}'",
node_id.short_str(),
source_peer.user_agent,
);
let ping_event = PingPongEvent::new(node_id, None, ping_pong_msg.metadata.into());
self.publish_event(LivenessEvent::ReceivedPing(Box::new(ping_event)));
},
PingPong::Pong => {
if !self.state.is_inflight(ping_pong_msg.nonce) {
debug!(
target: LOG_TARGET,
"Received Pong that was not requested from '{}' with useragent {}. Ignoring it.",
node_id.short_str(),
source_peer.user_agent,
);
return Ok(());
}
let maybe_latency = self.state.record_pong(ping_pong_msg.nonce);
debug!(
target: LOG_TARGET,
"Received pong from peer '{}' with useragent '{}'. {}",
node_id.short_str(),
source_peer.user_agent,
maybe_latency.map(|ms| format!("Latency: {}ms", ms)).unwrap_or_default(),
);
let pong_event = PingPongEvent::new(node_id, maybe_latency, ping_pong_msg.metadata.into());
self.publish_event(LivenessEvent::ReceivedPong(Box::new(pong_event)));
},
}
Ok(())
}
async fn send_ping(&mut self, node_id: NodeId) -> Result<(), LivenessError> {
let msg = PingPongMessage::ping_with_metadata(self.state.metadata().clone());
self.state.add_inflight_ping(msg.nonce, &node_id);
debug!(target: LOG_TARGET, "Sending ping to peer '{}'", node_id.short_str(),);
self.outbound_messaging
.send_direct_node_id(node_id, OutboundDomainMessage::new(TariMessageType::PingPong, msg))
.await
.map_err(Into::<DhtOutboundError>::into)?;
Ok(())
}
async fn send_pong(&mut self, nonce: u64, dest: CommsPublicKey) -> Result<(), LivenessError> {
let msg = PingPongMessage::pong_with_metadata(nonce, self.state.metadata().clone());
self.outbound_messaging
.send_direct(dest, OutboundDomainMessage::new(TariMessageType::PingPong, msg))
.await
.map(|_| ())
.map_err(Into::into)
}
async fn handle_request(&mut self, request: LivenessRequest) -> Result<LivenessResponse, LivenessError> {
use LivenessRequest::*;
match request {
SendPing(node_id) => { | GetPingCount => {
let ping_count = self.get_ping_count();
Ok(LivenessResponse::Count(ping_count))
},
GetPongCount => {
let pong_count = self.get_pong_count();
Ok(LivenessResponse::Count(pong_count))
},
GetAvgLatency(node_id) => {
let latency = self.state.get_avg_latency_ms(&node_id);
Ok(LivenessResponse::AvgLatency(latency))
},
SetMetadataEntry(key, value) => {
self.state.set_metadata_entry(key, value);
Ok(LivenessResponse::Ok)
},
}
}
async fn start_ping_round(&mut self) -> Result<(), LivenessError> {
let selected_peers = self
.connectivity
.select_connections(ConnectivitySelection::random_nodes(
self.config.num_peers_per_round,
Default::default(),
))
.await?;
if selected_peers.is_empty() {
warn!(
target: LOG_TARGET,
"Cannot broadcast pings because there are no broadcast peers available"
)
}
let len_peers = selected_peers.len();
debug!(target: LOG_TARGET, "Sending liveness ping to {} peer(s)", len_peers);
for conn in selected_peers {
let msg = PingPongMessage::ping_with_metadata(self.state.metadata().clone());
self.state.add_inflight_ping(msg.nonce, conn.peer_node_id());
self.outbound_messaging
.send_direct_node_id(
conn.peer_node_id().clone(),
OutboundDomainMessage::new(TariMessageType::PingPong, msg),
)
.await?;
}
self.publish_event(LivenessEvent::BroadcastedNeighbourPings(len_peers));
Ok(())
}
fn publish_event(&mut self, event: LivenessEvent) {
let _ = self.event_publisher.send(Arc::new(event)).map_err(|_| {
trace!(
target: LOG_TARGET,
"Could not publish LivenessEvent as there are no subscribers"
)
});
}
fn get_ping_count(&self) -> usize {
self.state.pings_received()
}
fn get_pong_count(&self) -> usize {
self.state.pongs_received()
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::{
proto::liveness::MetadataKey,
services::liveness::{handle::LivenessHandle, state::Metadata},
};
use futures::{
channel::{mpsc, oneshot},
stream,
};
use rand::rngs::OsRng;
use std::time::Duration;
use tari_comms::{
message::MessageTag,
multiaddr::Multiaddr,
peer_manager::{NodeId, Peer, PeerFeatures, PeerFlags},
test_utils::mocks::create_connectivity_mock,
};
use tari_comms_dht::{
envelope::{DhtMessageHeader, DhtMessageType, Network},
outbound::{DhtOutboundRequest, MessageSendState, SendMessageResponse},
};
use tari_crypto::keys::PublicKey;
use tari_service_framework::reply_channel;
use tari_shutdown::Shutdown;
use tokio::{sync::broadcast, task};
#[tokio_macros::test_basic]
async fn get_ping_pong_count() {
let state = LivenessState::new();
state.inc_pings_received();
state.inc_pongs_received();
state.inc_pongs_received();
let (connectivity, mock) = create_connectivity_mock();
mock.spawn();
// Setup a CommsOutbound service handle which is not connected to the actual CommsOutbound service
let (outbound_tx, _) = mpsc::channel(10);
let outbound_messaging = OutboundMessageRequester::new(outbound_tx);
// Setup liveness service
let (sender_service, receiver) = reply_channel::unbounded();
let (publisher, _) = broadcast::channel(200);
let mut liveness_handle = LivenessHandle::new(sender_service, publisher.clone());
let shutdown = Shutdown::new();
let service = LivenessService::new(
Default::default(),
receiver,
stream::empty(),
state,
connectivity,
outbound_messaging,
publisher,
shutdown.to_signal(),
);
// Run the service
task::spawn(service.run());
let res = liveness_handle.get_ping_count().await.unwrap();
assert_eq!(res, 1);
let res = liveness_handle.get_pong_count().await.unwrap();
assert_eq!(res, 2);
}
#[tokio_macros::test]
async fn send_ping() {
let (connectivity, mock) = create_connectivity_mock();
mock.spawn();
// Setup a CommsOutbound service handle which is not connected to the actual CommsOutbound service
let (outbound_tx, mut outbound_rx) = mpsc::channel(10);
let outbound_messaging = OutboundMessageRequester::new(outbound_tx);
// Setup liveness service
let (sender_service, receiver) = reply_channel::unbounded();
let (publisher, _) = broadcast::channel(200);
let mut liveness_handle = LivenessHandle::new(sender_service, publisher.clone());
let shutdown = Shutdown::new();
let service = LivenessService::new(
Default::default(),
receiver,
stream::empty(),
LivenessState::default(),
connectivity,
outbound_messaging,
publisher,
shutdown.to_signal(),
);
// Run the LivenessService
task::spawn(service.run());
let (_, pk) = CommsPublicKey::random_keypair(&mut rand::rngs::OsRng);
let node_id = NodeId::from_key(&pk).unwrap();
// Receive outbound request
task::spawn(async move {
match outbound_rx.select_next_some().await {
DhtOutboundRequest::SendMessage(_, _, reply_tx) => {
let (_, rx) = oneshot::channel();
reply_tx
.send(SendMessageResponse::Queued(
vec![MessageSendState::new(MessageTag::new(), rx)].into(),
))
.unwrap();
},
}
});
let _res = liveness_handle.send_ping(node_id).await.unwrap();
}
fn create_dummy_message<T>(inner: T) -> DomainMessage<T> {
let (_, pk) = CommsPublicKey::random_keypair(&mut OsRng);
let source_peer = Peer::new(
pk.clone(),
NodeId::from_key(&pk).unwrap(),
Vec::<Multiaddr>::new().into(),
PeerFlags::empty(),
PeerFeatures::COMMUNICATION_NODE,
&[],
Default::default(),
);
DomainMessage {
dht_header: DhtMessageHeader {
version: 0,
destination: Default::default(),
origin_mac: Vec::new(),
ephemeral_public_key: None,
message_type: DhtMessageType::None,
network: Network::LocalTest,
flags: Default::default(),
message_tag: MessageTag::new(),
},
authenticated_origin: None,
source_peer,
inner,
}
}
#[tokio_macros::test]
async fn handle_message_ping() {
let state = LivenessState::new();
let (connectivity, mock) = create_connectivity_mock();
mock.spawn();
// Setup a CommsOutbound service handle which is not connected to the actual CommsOutbound service
let (outbound_tx, mut outbound_rx) = mpsc::channel(10);
let outbound_messaging = OutboundMessageRequester::new(outbound_tx);
let metadata = Metadata::new();
let msg = create_dummy_message(PingPongMessage::ping_with_metadata(metadata));
// A stream which emits one message and then closes
let pingpong_stream = stream::iter(std::iter::once(msg));
// Setup liveness service
let (publisher, _) = broadcast::channel(200);
let shutdown = Shutdown::new();
let service = LivenessService::new(
Default::default(),
stream::empty(),
pingpong_stream,
state,
connectivity,
outbound_messaging,
publisher,
shutdown.to_signal(),
);
task::spawn(service.run());
// Test oms got request to send message
unwrap_oms_send_msg!(outbound_rx.select_next_some().await);
}
#[tokio_macros::test_basic]
async fn handle_message_pong() {
let mut state = LivenessState::new();
let (connectivity, mock) = create_connectivity_mock();
mock.spawn();
let (outbound_tx, _) = mpsc::channel(10);
let outbound_messaging = OutboundMessageRequester::new(outbound_tx);
let mut metadata = Metadata::new();
metadata.insert(MetadataKey::ChainMetadata, b"dummy-data".to_vec());
let msg = create_dummy_message(PingPongMessage::pong_with_metadata(123, metadata.clone()));
state.add_inflight_ping(msg.inner.nonce, &msg.source_peer.node_id);
// A stream which emits an inflight pong message and an unexpected one
let malicious_msg = create_dummy_message(PingPongMessage::pong_with_metadata(321, metadata));
let pingpong_stream = stream::iter(vec![msg, malicious_msg]);
// Setup liveness service
let (publisher, _) = broadcast::channel(200);
let mut shutdown = Shutdown::new();
let service = LivenessService::new(
Default::default(),
stream::empty(),
pingpong_stream,
state,
connectivity,
outbound_messaging,
publisher.clone(),
shutdown.to_signal(),
);
task::spawn(service.run());
// Listen for the pong event
let subscriber = publisher.subscribe();
let event = time::timeout(Duration::from_secs(10), subscriber.fuse().select_next_some())
.await
.unwrap()
.unwrap();
match &*event {
LivenessEvent::ReceivedPong(event) => {
assert_eq!(event.metadata.get(MetadataKey::ChainMetadata).unwrap(), b"dummy-data");
},
_ => panic!("Unexpected event"),
}
shutdown.trigger().unwrap();
// No further events (malicious_msg was ignored)
let mut subscriber = publisher.subscribe().fuse();
drop(publisher);
let msg = subscriber.next().await;
assert_eq!(msg.is_none(), true);
}
} | self.send_ping(node_id).await?;
self.state.inc_pings_sent();
Ok(LivenessResponse::Ok)
}, |
createWallGeometry.js | /**
* Cesium - https://github.com/AnalyticalGraphicsInc/cesium
*
* Copyright 2011-2017 Cesium Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Columbus View (Pat. Pend.)
*
* Portions licensed separately.
* See https://github.com/AnalyticalGraphicsInc/cesium/blob/master/LICENSE.md for full licensing details.
*/
define(['./when-a55a8a4c', './Check-bc1d37d9', './Math-edfe2d1c', './Cartesian2-f0158650', './BoundingSphere-02d3af5e', './RuntimeError-7c184ac0', './WebGLConstants-4c11ee5f', './ComponentDatatype-919a7463', './GeometryAttribute-de0e4f68', './PrimitiveType-97893bc7', './FeatureDetection-bac17d71', './Transforms-a6232c91', './GeometryAttributes-1c7ce91d', './VertexFormat-7f136973', './IndexDatatype-18a8cae6', './IntersectionTests-b4e5ec49', './Plane-a550aa8c', './EllipsoidTangentPlane-0da38d24', './EllipsoidRhumbLine-ff66f54b', './earcut-2.2.1-b404d9e6', './PolygonPipeline-c1e5a14e', './EllipsoidGeodesic-5bacea45', './PolylinePipeline-69e6af68', './WallGeometryLibrary-921013e1'], function (when, Check, _Math, Cartesian2, BoundingSphere, RuntimeError, WebGLConstants, ComponentDatatype, GeometryAttribute, PrimitiveType, FeatureDetection, Transforms, GeometryAttributes, VertexFormat, IndexDatatype, IntersectionTests, Plane, EllipsoidTangentPlane, EllipsoidRhumbLine, earcut2_2_1, PolygonPipeline, EllipsoidGeodesic, PolylinePipeline, WallGeometryLibrary) { 'use strict';
var scratchCartesian3Position1 = new Cartesian2.Cartesian3();
var scratchCartesian3Position2 = new Cartesian2.Cartesian3();
var scratchCartesian3Position3 = new Cartesian2.Cartesian3();
var scratchCartesian3Position4 = new Cartesian2.Cartesian3();
var scratchCartesian3Position5 = new Cartesian2.Cartesian3();
var scratchBitangent = new Cartesian2.Cartesian3();
var scratchTangent = new Cartesian2.Cartesian3();
var scratchNormal = new Cartesian2.Cartesian3();
/**
* A description of a wall, which is similar to a KML line string. A wall is defined by a series of points,
* which extrude down to the ground. Optionally, they can extrude downwards to a specified height.
*
* @alias WallGeometry
* @constructor
*
* @param {Object} options Object with the following properties:
* @param {Cartesian3[]} options.positions An array of Cartesian objects, which are the points of the wall.
* @param {Number} [options.granularity=CesiumMath.RADIANS_PER_DEGREE] The distance, in radians, between each latitude and longitude. Determines the number of positions in the buffer.
* @param {Number[]} [options.maximumHeights] An array parallel to <code>positions</code> that give the maximum height of the
* wall at <code>positions</code>. If undefined, the height of each position in used.
* @param {Number[]} [options.minimumHeights] An array parallel to <code>positions</code> that give the minimum height of the
* wall at <code>positions</code>. If undefined, the height at each position is 0.0.
* @param {Ellipsoid} [options.ellipsoid=Ellipsoid.WGS84] The ellipsoid for coordinate manipulation
* @param {VertexFormat} [options.vertexFormat=VertexFormat.DEFAULT] The vertex attributes to be computed.
*
* @exception {DeveloperError} positions length must be greater than or equal to 2.
* @exception {DeveloperError} positions and maximumHeights must have the same length.
* @exception {DeveloperError} positions and minimumHeights must have the same length.
*
* @see WallGeometry#createGeometry
* @see WallGeometry#fromConstantHeight
*
* @demo {@link https://sandcastle.cesium.com/index.html?src=Wall.html|Cesium Sandcastle Wall Demo}
*
* @example
* // create a wall that spans from ground level to 10000 meters
* var wall = new Cesium.WallGeometry({
* positions : Cesium.Cartesian3.fromDegreesArrayHeights([
* 19.0, 47.0, 10000.0,
* 19.0, 48.0, 10000.0,
* 20.0, 48.0, 10000.0,
* 20.0, 47.0, 10000.0,
* 19.0, 47.0, 10000.0
* ])
* });
* var geometry = Cesium.WallGeometry.createGeometry(wall);
*/
function | (options) {
options = when.defaultValue(options, when.defaultValue.EMPTY_OBJECT);
var wallPositions = options.positions;
var maximumHeights = options.maximumHeights;
var minimumHeights = options.minimumHeights;
//>>includeStart('debug', pragmas.debug);
if (!when.defined(wallPositions)) {
throw new Check.DeveloperError('options.positions is required.');
}
if (when.defined(maximumHeights) && maximumHeights.length !== wallPositions.length) {
throw new Check.DeveloperError('options.positions and options.maximumHeights must have the same length.');
}
if (when.defined(minimumHeights) && minimumHeights.length !== wallPositions.length) {
throw new Check.DeveloperError('options.positions and options.minimumHeights must have the same length.');
}
//>>includeEnd('debug');
var vertexFormat = when.defaultValue(options.vertexFormat, VertexFormat.VertexFormat.DEFAULT);
var granularity = when.defaultValue(options.granularity, _Math.CesiumMath.RADIANS_PER_DEGREE);
var ellipsoid = when.defaultValue(options.ellipsoid, Cartesian2.Ellipsoid.WGS84);
this._positions = wallPositions;
this._minimumHeights = minimumHeights;
this._maximumHeights = maximumHeights;
this._vertexFormat = VertexFormat.VertexFormat.clone(vertexFormat);
this._granularity = granularity;
this._ellipsoid = Cartesian2.Ellipsoid.clone(ellipsoid);
this._workerName = 'createWallGeometry';
var numComponents = 1 + wallPositions.length * Cartesian2.Cartesian3.packedLength + 2;
if (when.defined(minimumHeights)) {
numComponents += minimumHeights.length;
}
if (when.defined(maximumHeights)) {
numComponents += maximumHeights.length;
}
/**
* The number of elements used to pack the object into an array.
* @type {Number}
*/
this.packedLength = numComponents + Cartesian2.Ellipsoid.packedLength + VertexFormat.VertexFormat.packedLength + 1;
}
/**
* Stores the provided instance into the provided array.
*
* @param {WallGeometry} value The value to pack.
* @param {Number[]} array The array to pack into.
* @param {Number} [startingIndex=0] The index into the array at which to start packing the elements.
*
* @returns {Number[]} The array that was packed into
*/
WallGeometry.pack = function(value, array, startingIndex) {
//>>includeStart('debug', pragmas.debug);
if (!when.defined(value)) {
throw new Check.DeveloperError('value is required');
}
if (!when.defined(array)) {
throw new Check.DeveloperError('array is required');
}
//>>includeEnd('debug');
startingIndex = when.defaultValue(startingIndex, 0);
var i;
var positions = value._positions;
var length = positions.length;
array[startingIndex++] = length;
for (i = 0; i < length; ++i, startingIndex += Cartesian2.Cartesian3.packedLength) {
Cartesian2.Cartesian3.pack(positions[i], array, startingIndex);
}
var minimumHeights = value._minimumHeights;
length = when.defined(minimumHeights) ? minimumHeights.length : 0;
array[startingIndex++] = length;
if (when.defined(minimumHeights)) {
for (i = 0; i < length; ++i) {
array[startingIndex++] = minimumHeights[i];
}
}
var maximumHeights = value._maximumHeights;
length = when.defined(maximumHeights) ? maximumHeights.length : 0;
array[startingIndex++] = length;
if (when.defined(maximumHeights)) {
for (i = 0; i < length; ++i) {
array[startingIndex++] = maximumHeights[i];
}
}
Cartesian2.Ellipsoid.pack(value._ellipsoid, array, startingIndex);
startingIndex += Cartesian2.Ellipsoid.packedLength;
VertexFormat.VertexFormat.pack(value._vertexFormat, array, startingIndex);
startingIndex += VertexFormat.VertexFormat.packedLength;
array[startingIndex] = value._granularity;
return array;
};
var scratchEllipsoid = Cartesian2.Ellipsoid.clone(Cartesian2.Ellipsoid.UNIT_SPHERE);
var scratchVertexFormat = new VertexFormat.VertexFormat();
var scratchOptions = {
positions : undefined,
minimumHeights : undefined,
maximumHeights : undefined,
ellipsoid : scratchEllipsoid,
vertexFormat : scratchVertexFormat,
granularity : undefined
};
/**
* Retrieves an instance from a packed array.
*
* @param {Number[]} array The packed array.
* @param {Number} [startingIndex=0] The starting index of the element to be unpacked.
* @param {WallGeometry} [result] The object into which to store the result.
* @returns {WallGeometry} The modified result parameter or a new WallGeometry instance if one was not provided.
*/
WallGeometry.unpack = function(array, startingIndex, result) {
//>>includeStart('debug', pragmas.debug);
if (!when.defined(array)) {
throw new Check.DeveloperError('array is required');
}
//>>includeEnd('debug');
startingIndex = when.defaultValue(startingIndex, 0);
var i;
var length = array[startingIndex++];
var positions = new Array(length);
for (i = 0; i < length; ++i, startingIndex += Cartesian2.Cartesian3.packedLength) {
positions[i] = Cartesian2.Cartesian3.unpack(array, startingIndex);
}
length = array[startingIndex++];
var minimumHeights;
if (length > 0) {
minimumHeights = new Array(length);
for (i = 0; i < length; ++i) {
minimumHeights[i] = array[startingIndex++];
}
}
length = array[startingIndex++];
var maximumHeights;
if (length > 0) {
maximumHeights = new Array(length);
for (i = 0; i < length; ++i) {
maximumHeights[i] = array[startingIndex++];
}
}
var ellipsoid = Cartesian2.Ellipsoid.unpack(array, startingIndex, scratchEllipsoid);
startingIndex += Cartesian2.Ellipsoid.packedLength;
var vertexFormat = VertexFormat.VertexFormat.unpack(array, startingIndex, scratchVertexFormat);
startingIndex += VertexFormat.VertexFormat.packedLength;
var granularity = array[startingIndex];
if (!when.defined(result)) {
scratchOptions.positions = positions;
scratchOptions.minimumHeights = minimumHeights;
scratchOptions.maximumHeights = maximumHeights;
scratchOptions.granularity = granularity;
return new WallGeometry(scratchOptions);
}
result._positions = positions;
result._minimumHeights = minimumHeights;
result._maximumHeights = maximumHeights;
result._ellipsoid = Cartesian2.Ellipsoid.clone(ellipsoid, result._ellipsoid);
result._vertexFormat = VertexFormat.VertexFormat.clone(vertexFormat, result._vertexFormat);
result._granularity = granularity;
return result;
};
/**
* A description of a wall, which is similar to a KML line string. A wall is defined by a series of points,
* which extrude down to the ground. Optionally, they can extrude downwards to a specified height.
*
* @param {Object} options Object with the following properties:
* @param {Cartesian3[]} options.positions An array of Cartesian objects, which are the points of the wall.
* @param {Number} [options.maximumHeight] A constant that defines the maximum height of the
* wall at <code>positions</code>. If undefined, the height of each position in used.
* @param {Number} [options.minimumHeight] A constant that defines the minimum height of the
* wall at <code>positions</code>. If undefined, the height at each position is 0.0.
* @param {Ellipsoid} [options.ellipsoid=Ellipsoid.WGS84] The ellipsoid for coordinate manipulation
* @param {VertexFormat} [options.vertexFormat=VertexFormat.DEFAULT] The vertex attributes to be computed.
* @returns {WallGeometry}
*
*
* @example
* // create a wall that spans from 10000 meters to 20000 meters
* var wall = Cesium.WallGeometry.fromConstantHeights({
* positions : Cesium.Cartesian3.fromDegreesArray([
* 19.0, 47.0,
* 19.0, 48.0,
* 20.0, 48.0,
* 20.0, 47.0,
* 19.0, 47.0,
* ]),
* minimumHeight : 20000.0,
* maximumHeight : 10000.0
* });
* var geometry = Cesium.WallGeometry.createGeometry(wall);
*
* @see WallGeometry#createGeometry
*/
WallGeometry.fromConstantHeights = function(options) {
options = when.defaultValue(options, when.defaultValue.EMPTY_OBJECT);
var positions = options.positions;
//>>includeStart('debug', pragmas.debug);
if (!when.defined(positions)) {
throw new Check.DeveloperError('options.positions is required.');
}
//>>includeEnd('debug');
var minHeights;
var maxHeights;
var min = options.minimumHeight;
var max = options.maximumHeight;
var doMin = when.defined(min);
var doMax = when.defined(max);
if (doMin || doMax) {
var length = positions.length;
minHeights = (doMin) ? new Array(length) : undefined;
maxHeights = (doMax) ? new Array(length) : undefined;
for (var i = 0; i < length; ++i) {
if (doMin) {
minHeights[i] = min;
}
if (doMax) {
maxHeights[i] = max;
}
}
}
var newOptions = {
positions : positions,
maximumHeights : maxHeights,
minimumHeights : minHeights,
ellipsoid : options.ellipsoid,
vertexFormat : options.vertexFormat
};
return new WallGeometry(newOptions);
};
/**
* Computes the geometric representation of a wall, including its vertices, indices, and a bounding sphere.
*
* @param {WallGeometry} wallGeometry A description of the wall.
* @returns {Geometry|undefined} The computed vertices and indices.
*/
WallGeometry.createGeometry = function(wallGeometry) {
var wallPositions = wallGeometry._positions;
var minimumHeights = wallGeometry._minimumHeights;
var maximumHeights = wallGeometry._maximumHeights;
var vertexFormat = wallGeometry._vertexFormat;
var granularity = wallGeometry._granularity;
var ellipsoid = wallGeometry._ellipsoid;
var pos = WallGeometryLibrary.WallGeometryLibrary.computePositions(ellipsoid, wallPositions, maximumHeights, minimumHeights, granularity, true);
if (!when.defined(pos)) {
return;
}
var bottomPositions = pos.bottomPositions;
var topPositions = pos.topPositions;
var numCorners = pos.numCorners;
var length = topPositions.length;
var size = length * 2;
var positions = vertexFormat.position ? new Float64Array(size) : undefined;
var normals = vertexFormat.normal ? new Float32Array(size) : undefined;
var tangents = vertexFormat.tangent ? new Float32Array(size) : undefined;
var bitangents = vertexFormat.bitangent ? new Float32Array(size) : undefined;
var textureCoordinates = vertexFormat.st ? new Float32Array(size / 3 * 2) : undefined;
var positionIndex = 0;
var normalIndex = 0;
var bitangentIndex = 0;
var tangentIndex = 0;
var stIndex = 0;
// add lower and upper points one after the other, lower
// points being even and upper points being odd
var normal = scratchNormal;
var tangent = scratchTangent;
var bitangent = scratchBitangent;
var recomputeNormal = true;
length /= 3;
var i;
var s = 0;
var ds = 1/(length - wallPositions.length + 1);
for (i = 0; i < length; ++i) {
var i3 = i * 3;
var topPosition = Cartesian2.Cartesian3.fromArray(topPositions, i3, scratchCartesian3Position1);
var bottomPosition = Cartesian2.Cartesian3.fromArray(bottomPositions, i3, scratchCartesian3Position2);
if (vertexFormat.position) {
// insert the lower point
positions[positionIndex++] = bottomPosition.x;
positions[positionIndex++] = bottomPosition.y;
positions[positionIndex++] = bottomPosition.z;
// insert the upper point
positions[positionIndex++] = topPosition.x;
positions[positionIndex++] = topPosition.y;
positions[positionIndex++] = topPosition.z;
}
if (vertexFormat.st) {
textureCoordinates[stIndex++] = s;
textureCoordinates[stIndex++] = 0.0;
textureCoordinates[stIndex++] = s;
textureCoordinates[stIndex++] = 1.0;
}
if (vertexFormat.normal || vertexFormat.tangent || vertexFormat.bitangent) {
var nextPosition;
var nextTop = Cartesian2.Cartesian3.clone(Cartesian2.Cartesian3.ZERO, scratchCartesian3Position5);
var groundPosition = ellipsoid.scaleToGeodeticSurface(Cartesian2.Cartesian3.fromArray(topPositions, i3, scratchCartesian3Position2), scratchCartesian3Position2);
if (i + 1 < length) {
nextPosition = ellipsoid.scaleToGeodeticSurface(Cartesian2.Cartesian3.fromArray(topPositions, i3 + 3, scratchCartesian3Position3), scratchCartesian3Position3);
nextTop = Cartesian2.Cartesian3.fromArray(topPositions, i3 + 3, scratchCartesian3Position5);
}
if (recomputeNormal) {
var scalednextPosition = Cartesian2.Cartesian3.subtract(nextTop, topPosition, scratchCartesian3Position4);
var scaledGroundPosition = Cartesian2.Cartesian3.subtract(groundPosition, topPosition, scratchCartesian3Position1);
normal = Cartesian2.Cartesian3.normalize(Cartesian2.Cartesian3.cross(scaledGroundPosition, scalednextPosition, normal), normal);
recomputeNormal = false;
}
if (Cartesian2.Cartesian3.equalsEpsilon(nextPosition, groundPosition, _Math.CesiumMath.EPSILON10)) {
recomputeNormal = true;
} else {
s += ds;
if (vertexFormat.tangent) {
tangent = Cartesian2.Cartesian3.normalize(Cartesian2.Cartesian3.subtract(nextPosition, groundPosition, tangent), tangent);
}
if (vertexFormat.bitangent) {
bitangent = Cartesian2.Cartesian3.normalize(Cartesian2.Cartesian3.cross(normal, tangent, bitangent), bitangent);
}
}
if (vertexFormat.normal) {
normals[normalIndex++] = normal.x;
normals[normalIndex++] = normal.y;
normals[normalIndex++] = normal.z;
normals[normalIndex++] = normal.x;
normals[normalIndex++] = normal.y;
normals[normalIndex++] = normal.z;
}
if (vertexFormat.tangent) {
tangents[tangentIndex++] = tangent.x;
tangents[tangentIndex++] = tangent.y;
tangents[tangentIndex++] = tangent.z;
tangents[tangentIndex++] = tangent.x;
tangents[tangentIndex++] = tangent.y;
tangents[tangentIndex++] = tangent.z;
}
if (vertexFormat.bitangent) {
bitangents[bitangentIndex++] = bitangent.x;
bitangents[bitangentIndex++] = bitangent.y;
bitangents[bitangentIndex++] = bitangent.z;
bitangents[bitangentIndex++] = bitangent.x;
bitangents[bitangentIndex++] = bitangent.y;
bitangents[bitangentIndex++] = bitangent.z;
}
}
}
var attributes = new GeometryAttributes.GeometryAttributes();
if (vertexFormat.position) {
attributes.position = new GeometryAttribute.GeometryAttribute({
componentDatatype : ComponentDatatype.ComponentDatatype.DOUBLE,
componentsPerAttribute : 3,
values : positions
});
}
if (vertexFormat.normal) {
attributes.normal = new GeometryAttribute.GeometryAttribute({
componentDatatype : ComponentDatatype.ComponentDatatype.FLOAT,
componentsPerAttribute : 3,
values : normals
});
}
if (vertexFormat.tangent) {
attributes.tangent = new GeometryAttribute.GeometryAttribute({
componentDatatype : ComponentDatatype.ComponentDatatype.FLOAT,
componentsPerAttribute : 3,
values : tangents
});
}
if (vertexFormat.bitangent) {
attributes.bitangent = new GeometryAttribute.GeometryAttribute({
componentDatatype : ComponentDatatype.ComponentDatatype.FLOAT,
componentsPerAttribute : 3,
values : bitangents
});
}
if (vertexFormat.st) {
attributes.st = new GeometryAttribute.GeometryAttribute({
componentDatatype : ComponentDatatype.ComponentDatatype.FLOAT,
componentsPerAttribute : 2,
values : textureCoordinates
});
}
// prepare the side walls, two triangles for each wall
//
// A (i+1) B (i+3) E
// +--------+-------+
// | / | /| triangles: A C B
// | / | / | B C D
// | / | / |
// | / | / |
// | / | / |
// | / | / |
// +--------+-------+
// C (i) D (i+2) F
//
var numVertices = size / 3;
size -= 6 * (numCorners + 1);
var indices = IndexDatatype.IndexDatatype.createTypedArray(numVertices, size);
var edgeIndex = 0;
for (i = 0; i < numVertices - 2; i += 2) {
var LL = i;
var LR = i + 2;
var pl = Cartesian2.Cartesian3.fromArray(positions, LL * 3, scratchCartesian3Position1);
var pr = Cartesian2.Cartesian3.fromArray(positions, LR * 3, scratchCartesian3Position2);
if (Cartesian2.Cartesian3.equalsEpsilon(pl, pr, _Math.CesiumMath.EPSILON10)) {
continue;
}
var UL = i + 1;
var UR = i + 3;
indices[edgeIndex++] = UL;
indices[edgeIndex++] = LL;
indices[edgeIndex++] = UR;
indices[edgeIndex++] = UR;
indices[edgeIndex++] = LL;
indices[edgeIndex++] = LR;
}
return new GeometryAttribute.Geometry({
attributes : attributes,
indices : indices,
primitiveType : PrimitiveType.PrimitiveType.TRIANGLES,
boundingSphere : new BoundingSphere.BoundingSphere.fromVertices(positions)
});
};
function createWallGeometry(wallGeometry, offset) {
if (when.defined(offset)) {
wallGeometry = WallGeometry.unpack(wallGeometry, offset);
}
wallGeometry._ellipsoid = Cartesian2.Ellipsoid.clone(wallGeometry._ellipsoid);
return WallGeometry.createGeometry(wallGeometry);
}
return createWallGeometry;
});
| WallGeometry |
project_resource_handler_test.go | package handler
import (
"bytes"
"encoding/json"
"errors"
"github.com/gin-gonic/gin"
errors2 "github.com/keptn/keptn/resource-service/errors"
handler_mock "github.com/keptn/keptn/resource-service/handler/fake"
"github.com/keptn/keptn/resource-service/models"
"github.com/stretchr/testify/require"
"net/http"
"net/http/httptest"
"testing"
)
const createResourcesTestPayload = `{
"resources": [
{
"resourceURI": "resource.yaml",
"resourceContent": "c3RyaW5n"
}
]
}`
const createResourcesWithoutBase64EncodingTestPayload = `{
"resources": [
{
"resourceURI": "resource.yaml",
"resourceContent": "string"
}
]
}`
const createResourcesInvalidResourceURITestPayload = `{
"resources": [
{
"resourceURI": "../resource.yaml",
"resourceContent": "c3RyaW5n"
}
]
}`
const updateResourceTestPayload = `{
"resourceContent": "c3RyaW5n"
}`
const updateResourceWithoutBase64EncodingTestPayload = `{
"resourceContent": "string"
}`
var testGetResourceResponse = models.GetResourceResponse{
Resource: models.Resource{
ResourceContent: "resource.yaml",
ResourceURI: "c3RyaW5n",
},
Metadata: models.Version{
Branch: "master",
UpstreamURL: "http://upstream-url.git",
Version: "commit-id",
},
}
var testGetResourceCommitResponse = models.GetResourceResponse{
Resource: models.Resource{
ResourceContent: "resource.yaml",
ResourceURI: "c3RyaW5n",
},
Metadata: models.Version{
Branch: "master",
UpstreamURL: "http://upstream-url.git",
Version: "my-amazing-commit-id",
},
}
var testGetResourcesResponse = models.GetResourcesResponse{
NextPageKey: "1",
PageSize: 1,
Resources: []models.GetResourceResponse{
testGetResourceResponse,
},
TotalCount: 2,
}
func TestProjectResourceHandler_CreateProjectResources(t *testing.T) {
type fields struct {
ProjectResourceManager *handler_mock.IResourceManagerMock
}
tests := []struct {
name string
fields fields
request *http.Request
wantParams *models.CreateResourcesParams
wantStatus int
}{
{
name: "create resource successful",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{CreateResourcesFunc: func(project models.CreateResourcesParams) (*models.WriteResourceResponse, error) {
return &models.WriteResourceResponse{CommitID: "my-commit-id"}, nil
}},
},
request: httptest.NewRequest(http.MethodPost, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesTestPayload))),
wantParams: &models.CreateResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
CreateResourcesPayload: models.CreateResourcesPayload{
Resources: []models.Resource{
{
ResourceURI: "resource.yaml",
ResourceContent: "c3RyaW5n",
},
},
},
},
wantStatus: http.StatusCreated,
},
{
name: "resource content not base64 encoded",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{CreateResourcesFunc: func(project models.CreateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPost, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesWithoutBase64EncodingTestPayload))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "resourceUri contains invalid string",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{CreateResourcesFunc: func(project models.CreateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPost, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesInvalidResourceURITestPayload))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "project not found",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{CreateResourcesFunc: func(project models.CreateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors2.ErrProjectNotFound
}},
},
request: httptest.NewRequest(http.MethodPost, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesTestPayload))),
wantParams: &models.CreateResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
CreateResourcesPayload: models.CreateResourcesPayload{
Resources: []models.Resource{
{
ResourceURI: "resource.yaml",
ResourceContent: "c3RyaW5n",
},
},
},
},
wantStatus: http.StatusNotFound,
},
{
name: "internal error",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{CreateResourcesFunc: func(project models.CreateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("oops")
}},
},
request: httptest.NewRequest(http.MethodPost, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesTestPayload))),
wantParams: &models.CreateResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
CreateResourcesPayload: models.CreateResourcesPayload{
Resources: []models.Resource{
{
ResourceURI: "resource.yaml",
ResourceContent: "c3RyaW5n",
},
},
},
},
wantStatus: http.StatusInternalServerError,
},
{
name: "invalid payload",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{CreateResourcesFunc: func(project models.CreateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPost, "/project/my-project/resource", bytes.NewBuffer([]byte("invalid"))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager)
router := gin.Default()
router.POST("/project/:projectName/resource", ph.CreateProjectResources)
resp := performRequest(router, tt.request)
require.Equal(t, tt.wantStatus, resp.Code)
if tt.wantParams != nil {
require.Len(t, tt.fields.ProjectResourceManager.CreateResourcesCalls(), 1)
require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.CreateResourcesCalls()[0].Params)
} else {
require.Empty(t, tt.fields.ProjectResourceManager.CreateResourcesCalls())
}
})
}
}
func TestProjectResourceHandler_UpdateProjectResources(t *testing.T) {
type fields struct {
ProjectResourceManager *handler_mock.IResourceManagerMock
}
tests := []struct {
name string
fields fields
request *http.Request
wantParams *models.UpdateResourcesParams
wantStatus int
}{
{
name: "update resource successful",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourcesFunc: func(project models.UpdateResourcesParams) (*models.WriteResourceResponse, error) {
return &models.WriteResourceResponse{CommitID: "my-commit-id"}, nil
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesTestPayload))),
wantParams: &models.UpdateResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
UpdateResourcesPayload: models.UpdateResourcesPayload{
Resources: []models.Resource{
{
ResourceURI: "resource.yaml",
ResourceContent: "c3RyaW5n",
},
},
},
},
wantStatus: http.StatusOK,
},
{
name: "resource content not base64 encoded",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourcesFunc: func(project models.UpdateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesWithoutBase64EncodingTestPayload))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "resourceUri contains invalid string",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourcesFunc: func(project models.UpdateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesInvalidResourceURITestPayload))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "project not found",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourcesFunc: func(project models.UpdateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors2.ErrProjectNotFound
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesTestPayload))),
wantParams: &models.UpdateResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
UpdateResourcesPayload: models.UpdateResourcesPayload{
Resources: []models.Resource{
{
ResourceURI: "resource.yaml",
ResourceContent: "c3RyaW5n",
},
},
},
},
wantStatus: http.StatusNotFound,
},
{
name: "internal error",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourcesFunc: func(project models.UpdateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("oops")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource", bytes.NewBuffer([]byte(createResourcesTestPayload))),
wantParams: &models.UpdateResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
UpdateResourcesPayload: models.UpdateResourcesPayload{
Resources: []models.Resource{
{
ResourceURI: "resource.yaml",
ResourceContent: "c3RyaW5n",
},
},
},
},
wantStatus: http.StatusInternalServerError,
},
{
name: "invalid payload",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourcesFunc: func(project models.UpdateResourcesParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource", bytes.NewBuffer([]byte("invalid"))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager)
router := gin.Default()
router.PUT("/project/:projectName/resource", ph.UpdateProjectResources)
resp := performRequest(router, tt.request)
require.Equal(t, tt.wantStatus, resp.Code)
if tt.wantParams != nil {
require.Len(t, tt.fields.ProjectResourceManager.UpdateResourcesCalls(), 1)
require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.UpdateResourcesCalls()[0].Params)
} else {
require.Empty(t, tt.fields.ProjectResourceManager.UpdateResourcesCalls())
}
})
}
}
func TestProjectResourceHandler_GetProjectResources(t *testing.T) {
type fields struct {
ProjectResourceManager *handler_mock.IResourceManagerMock
}
tests := []struct {
name string
fields fields
request *http.Request
wantParams *models.GetResourcesParams
wantResult *models.GetResourcesResponse
wantStatus int
}{
{
name: "get resource list",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return &testGetResourcesResponse, nil
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource?gitCommitID=commit-id&pageSize=3&nextPageKey=2", nil),
wantParams: &models.GetResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
GetResourcesQuery: models.GetResourcesQuery{
GitCommitID: "commit-id",
PageSize: 3,
NextPageKey: "2",
},
},
wantResult: &testGetResourcesResponse,
wantStatus: http.StatusOK,
},
{
name: "get resource list - use default pageSize",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return &testGetResourcesResponse, nil
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource?gitCommitID=commit-id", nil),
wantParams: &models.GetResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
GetResourcesQuery: models.GetResourcesQuery{
GitCommitID: "commit-id",
PageSize: 20,
},
},
wantResult: &testGetResourcesResponse,
wantStatus: http.StatusOK,
},
{
name: "get resource list - use default pageSize and no git commit ID",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return &testGetResourcesResponse, nil
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource", nil),
wantParams: &models.GetResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
GetResourcesQuery: models.GetResourcesQuery{
PageSize: 20,
},
},
wantResult: &testGetResourcesResponse,
wantStatus: http.StatusOK,
},
{
name: "get resource list - invalid value for pageSize",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return nil, errors.New("should not have been called")
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource?pageSize=invalid", nil),
wantParams: nil,
wantResult: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "project not found",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return nil, errors2.ErrProjectNotFound
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource?gitCommitID=commit-id&pageSize=3&nextPageKey=2", nil),
wantParams: &models.GetResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
GetResourcesQuery: models.GetResourcesQuery{
GitCommitID: "commit-id",
PageSize: 3,
NextPageKey: "2",
},
},
wantResult: nil,
wantStatus: http.StatusNotFound,
},
{
name: "random error",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return nil, errors.New("oops")
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource?gitCommitID=commit-id&pageSize=3&nextPageKey=2", nil),
wantParams: &models.GetResourcesParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
GetResourcesQuery: models.GetResourcesQuery{
GitCommitID: "commit-id",
PageSize: 3,
NextPageKey: "2",
},
},
wantResult: nil,
wantStatus: http.StatusInternalServerError,
},
{
name: "project not set",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourcesFunc: func(params models.GetResourcesParams) (*models.GetResourcesResponse, error) {
return nil, errors.New("oops")
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/%20/resource?gitCommitID=commit-id&pageSize=3&nextPageKey=2", nil),
wantParams: nil,
wantResult: nil,
wantStatus: http.StatusBadRequest,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager)
router := gin.Default()
router.GET("/project/:projectName/resource", ph.GetProjectResources)
resp := performRequest(router, tt.request)
if tt.wantParams != nil | else {
require.Empty(t, tt.fields.ProjectResourceManager.GetResourcesCalls())
}
require.Equal(t, tt.wantStatus, resp.Code)
if tt.wantResult != nil {
result := &models.GetResourcesResponse{}
err := json.Unmarshal(resp.Body.Bytes(), result)
require.Nil(t, err)
require.Equal(t, tt.wantResult, result)
}
})
}
}
func TestProjectResourceHandler_GetProjectResource(t *testing.T) {
type fields struct {
ProjectResourceManager *handler_mock.IResourceManagerMock
}
tests := []struct {
name string
fields fields
request *http.Request
wantParams *models.GetResourceParams
wantResult *models.GetResourceResponse
wantStatus int
}{
{
name: "get resource",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourceFunc: func(params models.GetResourceParams) (*models.GetResourceResponse, error) {
return &testGetResourceResponse, nil
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource/my-resource.yaml?gitCommitID=commit-id", nil),
wantParams: &models.GetResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "my-resource.yaml",
GetResourceQuery: models.GetResourceQuery{
GitCommitID: "commit-id",
},
},
wantResult: &testGetResourceResponse,
wantStatus: http.StatusOK,
},
{
name: "get resource in parent directory- should return error",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourceFunc: func(params models.GetResourceParams) (*models.GetResourceResponse, error) {
return &testGetResourceResponse, nil
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource/..my-resource.yaml?gitCommitID=commit-id", nil),
wantParams: nil,
wantResult: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "resource not found",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourceFunc: func(params models.GetResourceParams) (*models.GetResourceResponse, error) {
return nil, errors2.ErrResourceNotFound
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource/my-resource.yaml?gitCommitID=commit-id", nil),
wantParams: &models.GetResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "my-resource.yaml",
GetResourceQuery: models.GetResourceQuery{
GitCommitID: "commit-id",
},
},
wantResult: nil,
wantStatus: http.StatusNotFound,
},
{
name: "project not found",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{
GetResourceFunc: func(params models.GetResourceParams) (*models.GetResourceResponse, error) {
return nil, errors2.ErrProjectNotFound
},
},
},
request: httptest.NewRequest(http.MethodGet, "/project/my-project/resource/my-resource.yaml?gitCommitID=commit-id", nil),
wantParams: &models.GetResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "my-resource.yaml",
GetResourceQuery: models.GetResourceQuery{
GitCommitID: "commit-id",
},
},
wantResult: nil,
wantStatus: http.StatusNotFound,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager)
router := gin.Default()
router.GET("/project/:projectName/resource/:resourceURI", ph.GetProjectResource)
resp := performRequest(router, tt.request)
if tt.wantParams != nil {
require.Len(t, tt.fields.ProjectResourceManager.GetResourceCalls(), 1)
require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.GetResourceCalls()[0].Params)
} else {
require.Empty(t, tt.fields.ProjectResourceManager.GetResourceCalls())
}
require.Equal(t, tt.wantStatus, resp.Code)
if tt.wantResult != nil {
result := &models.GetResourceResponse{}
err := json.Unmarshal(resp.Body.Bytes(), result)
require.Nil(t, err)
require.Equal(t, tt.wantResult, result)
}
})
}
}
func TestProjectResourceHandler_UpdateProjectResource(t *testing.T) {
type fields struct {
ProjectResourceManager *handler_mock.IResourceManagerMock
}
tests := []struct {
name string
fields fields
request *http.Request
wantParams *models.UpdateResourceParams
wantStatus int
}{
{
name: "update resource successful",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(params models.UpdateResourceParams) (*models.WriteResourceResponse, error) {
return &models.WriteResourceResponse{CommitID: "my-commit-id"}, nil
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte(updateResourceTestPayload))),
wantParams: &models.UpdateResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "resource.yaml",
UpdateResourcePayload: models.UpdateResourcePayload{ResourceContent: "c3RyaW5n"},
},
wantStatus: http.StatusOK,
},
{
name: "resource content not base64 encoded",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(params models.UpdateResourceParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte(updateResourceWithoutBase64EncodingTestPayload))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "resourceUri contains invalid string",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(params models.UpdateResourceParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/..resource.yaml", bytes.NewBuffer([]byte(updateResourceTestPayload))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "internal error",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(project models.UpdateResourceParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("oops")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte(updateResourceTestPayload))),
wantParams: &models.UpdateResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "resource.yaml",
UpdateResourcePayload: models.UpdateResourcePayload{ResourceContent: "c3RyaW5n"},
},
wantStatus: http.StatusInternalServerError,
},
{
name: "invalid payload",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{UpdateResourceFunc: func(project models.UpdateResourceParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("should not have been called")
}},
},
request: httptest.NewRequest(http.MethodPut, "/project/my-project/resource/resource.yaml", bytes.NewBuffer([]byte("invalid"))),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager)
router := gin.Default()
router.PUT("/project/:projectName/resource/:resourceURI", ph.UpdateProjectResource)
resp := performRequest(router, tt.request)
if tt.wantParams != nil {
require.Len(t, tt.fields.ProjectResourceManager.UpdateResourceCalls(), 1)
require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.UpdateResourceCalls()[0].Params)
} else {
require.Empty(t, tt.fields.ProjectResourceManager.UpdateResourceCalls())
}
require.Equal(t, tt.wantStatus, resp.Code)
})
}
}
func TestProjectResourceHandler_DeleteProjectResource(t *testing.T) {
type fields struct {
ProjectResourceManager *handler_mock.IResourceManagerMock
}
tests := []struct {
name string
fields fields
request *http.Request
wantParams *models.DeleteResourceParams
wantStatus int
}{
{
name: "delete resource",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{DeleteResourceFunc: func(params models.DeleteResourceParams) (*models.WriteResourceResponse, error) {
return &models.WriteResourceResponse{CommitID: "my-commit-id"}, nil
}},
},
request: httptest.NewRequest(http.MethodDelete, "/project/my-project/resource/resource.yaml", nil),
wantParams: &models.DeleteResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "resource.yaml",
},
wantStatus: http.StatusOK,
},
{
name: "project name empty",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{DeleteResourceFunc: func(params models.DeleteResourceParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("oops")
}},
},
request: httptest.NewRequest(http.MethodDelete, "/project/%20/resource/resource.yaml", nil),
wantParams: nil,
wantStatus: http.StatusBadRequest,
},
{
name: "random error",
fields: fields{
ProjectResourceManager: &handler_mock.IResourceManagerMock{DeleteResourceFunc: func(params models.DeleteResourceParams) (*models.WriteResourceResponse, error) {
return nil, errors.New("oops")
}},
},
request: httptest.NewRequest(http.MethodDelete, "/project/my-project/resource/resource.yaml", nil),
wantParams: &models.DeleteResourceParams{
ResourceContext: models.ResourceContext{
Project: models.Project{ProjectName: "my-project"},
},
ResourceURI: "resource.yaml",
},
wantStatus: http.StatusInternalServerError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ph := NewProjectResourceHandler(tt.fields.ProjectResourceManager)
router := gin.Default()
router.DELETE("/project/:projectName/resource/:resourceURI", ph.DeleteProjectResource)
resp := performRequest(router, tt.request)
if tt.wantParams != nil {
require.Len(t, tt.fields.ProjectResourceManager.DeleteResourceCalls(), 1)
require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.DeleteResourceCalls()[0].Params)
} else {
require.Empty(t, tt.fields.ProjectResourceManager.DeleteResourceCalls())
}
require.Equal(t, tt.wantStatus, resp.Code)
})
}
}
| {
require.Len(t, tt.fields.ProjectResourceManager.GetResourcesCalls(), 1)
require.Equal(t, *tt.wantParams, tt.fields.ProjectResourceManager.GetResourcesCalls()[0].Params)
} |
randomizer.rs | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use chrono::prelude::*;
use chrono::DateTime;
use protobuf::well_known_types::Timestamp;
use protobuf::RepeatedField;
use rand::distributions::Distribution;
use rand::seq::SliceRandom;
use rand::Rng;
use rand_distr::Normal;
use uuid::Uuid;
use crate::gen::billing::{Batch, Record, ResourceInfo};
pub static NUM_CLIENTS: u32 = 100;
pub trait Randomizer {
fn random(rng: &mut impl Rng) -> Self;
}
pub struct RecordState {
pub last_time: DateTime<Utc>,
}
fn protobuf_timestamp(time: DateTime<Utc>) -> Timestamp |
/// Construct a Batch that depends on `state`
///
/// In particular this will have somewhat sensible values for all fields, and
/// will be the next time slice after `state.last_time`, incrementing `last_time` to now
pub fn random_batch(rng: &mut impl Rng, state: &mut RecordState) -> Batch {
let id = Uuid::new_v4();
let dur_val = rng.gen_range(15, 1_000);
let dur = chrono::Duration::seconds(dur_val);
let interval_start_time = state.last_time.clone();
let interval_start = protobuf_timestamp(state.last_time);
state.last_time = state.last_time.checked_add_signed(dur).unwrap();
let interval_end = protobuf_timestamp(state.last_time);
let mut records = RepeatedField::<Record>::new();
for _ in 0..rng.gen_range(1, 50) {
records.push(random_record(rng, interval_start_time, dur_val));
}
let mut batch = Batch::new();
batch.set_id(id.to_string());
batch.set_interval_start(interval_start);
batch.set_interval_end(interval_end);
batch.set_records(records);
batch
}
fn random_record(rng: &mut impl Rng, start_at: DateTime<Utc>, max_secs: i64) -> Record {
let start_offset = rng.gen_range(0, max_secs - 1);
let interval_start = start_at
.checked_add_signed(chrono::Duration::seconds(start_offset))
.unwrap();
let interval_end = interval_start
.checked_add_signed(chrono::Duration::seconds(
rng.gen_range(start_offset, max_secs),
))
.unwrap();
static POSSIBLE_METERS: [&str; 1] = ["execution_time_ms"];
let meter = (*POSSIBLE_METERS.choose(rng).unwrap()).to_string();
let n = Normal::new(50.0, 10.0).unwrap();
let mut val;
loop {
val = n.sample(rng);
if (1.0..1000.0).contains(&val) {
break;
}
}
let mut record = Record::new();
record.set_id(Uuid::new_v4().to_string());
record.set_interval_start(protobuf_timestamp(interval_start));
record.set_interval_end(protobuf_timestamp(interval_end));
record.set_meter(meter);
record.set_value(val as u32);
record.set_info(ResourceInfo::random(rng));
record
}
impl Randomizer for ResourceInfo {
fn random(rng: &mut impl Rng) -> ResourceInfo {
static POSSIBLE_CPUS: &[i32] = &[1, 2];
static POSSIBLE_MEM: &[i32] = &[8, 16];
static POSSIBLE_DISK: &[i32] = &[128];
let mut resource_info = ResourceInfo::new();
resource_info.set_cpu_num(*POSSIBLE_CPUS.choose(rng).unwrap());
resource_info.set_memory_gb(*POSSIBLE_MEM.choose(rng).unwrap());
resource_info.set_disk_gb(*POSSIBLE_DISK.choose(rng).unwrap());
resource_info.set_client_id(rng.gen_range(1, NUM_CLIENTS as i32));
resource_info.set_vm_id(rng.gen_range(1000, 2000));
resource_info
}
}
| {
let mut ret = Timestamp::new();
ret.set_seconds(time.timestamp());
ret.set_nanos(time.timestamp_subsec_nanos() as i32);
ret
} |
remote_executor_service.py | # Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A generic worker binary for deployment, e.g., on GCP."""
from absl import app
from absl import flags
import grpc
import tensorflow_federated as tff
FLAGS = flags.FLAGS
flags.DEFINE_integer('port', '8000', 'port to listen on')
flags.DEFINE_integer('threads', '10', 'number of worker threads in thread pool')
flags.DEFINE_string('private_key', '', 'the private key for SSL/TLS setup')
flags.DEFINE_string('certificate_chain', '', 'the cert for SSL/TLS setup')
flags.DEFINE_integer('clients', '1', 'number of clients to host on this worker')
flags.DEFINE_integer('fanout', '100',
'max fanout in the hierarchy of local executors')
def main(argv):
|
if __name__ == '__main__':
app.run(main)
| del argv
executor_factory = tff.framework.local_executor_factory(
num_clients=FLAGS.clients, max_fanout=FLAGS.fanout)
if FLAGS.private_key:
if FLAGS.certificate_chain:
with open(FLAGS.private_key, 'rb') as f:
private_key = f.read()
with open(FLAGS.certificate_chain, 'rb') as f:
certificate_chain = f.read()
credentials = grpc.ssl_server_credentials(((
private_key,
certificate_chain,
),))
else:
raise ValueError(
'Private key has been specified, but the certificate chain missing.')
else:
credentials = None
tff.simulation.run_server(
executor_factory.create_executor({}), FLAGS.threads, FLAGS.port,
credentials) |
plot_ohc_drift.py | """
Filename: plot_ohc_drift.py
Author: Damien Irving, [email protected]
Description: Create a bar chart showing drift in ocean heat content
and its thermal and barystatic components
"""
# Import general Python modules
import sys
import os
import re
import pdb
import argparse
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import cmdline_provenance as cmdprov
cwd = os.getcwd()
repo_dir = '/'
for directory in cwd.split('/')[1:]:
repo_dir = os.path.join(repo_dir, directory)
if directory == 'ocean-analysis':
break
import matplotlib as mpl
mpl.rcParams['axes.labelsize'] = 'large' | mpl.rcParams['legend.fontsize'] = 'large'
# Define functions
def get_quartiles(df, column_name, df_project, units):
"""Get the ensemble quartiles"""
assert len(df) == len(df_project)
quartiles = ['# ' + column_name + ' quartiles']
for project in ['cmip6', 'cmip5']:
df_subset = df[df_project == project]
upper_quartile = df_subset[column_name].abs().quantile(0.75)
median = df_subset[column_name].abs().quantile(0.5)
lower_quartile = df_subset[column_name].abs().quantile(0.25)
upper_quartile_text = "%s upper quartile: %f %s" %(project, upper_quartile, units)
median_text = "%s median: %f %s" %(project, median, units)
lower_quartile_text = "%s lower quartile: %f %s" %(project, lower_quartile, units)
quartiles.append(upper_quartile_text)
quartiles.append(median_text)
quartiles.append(lower_quartile_text)
return quartiles
def main(inargs):
"""Run the program."""
df = pd.read_csv(inargs.infile)
df.set_index(df['model'], drop=True, inplace=True)
#df.set_index(df['model'] + ' (' + df['run'] + ')', drop=True, inplace=True)
x = np.arange(df.shape[0])
ncmip5 = df['project'].value_counts()['cmip5']
df_ohc = df[['OHC (J yr-1)', 'thermal OHC (J yr-1)', 'barystatic OHC (J yr-1)']]
sec_in_year = 365.25 * 24 * 60 * 60
earth_surface_area = 5.1e14
df_ohc = (df_ohc / sec_in_year) / earth_surface_area
df_ohc = df_ohc.rename(columns={"OHC (J yr-1)": "change in OHC ($dH/dt$)",
"thermal OHC (J yr-1)": "change in OHC temperature component ($dH_T/dt$)",
"barystatic OHC (J yr-1)": "change in OHC barystatic component ($dH_m/dt$)"})
df_ohc.plot.bar(figsize=(18,6), color=['#272727', 'tab:red', 'tab:blue'], width=0.9, zorder=2)
plt.axhspan(0.4, 1.0, color='0.95', zorder=1)
plt.axvline(x=ncmip5 - 0.5, color='0.5', linewidth=2.0)
units = 'equivalent planetary energy imbalance (W m$^{-2}$)'
plt.ylabel(units)
plt.axvline(x=x[0]-0.5, color='0.5', linewidth=0.1)
for val in x:
plt.axvline(x=val+0.5, color='0.5', linewidth=0.1)
quartiles = get_quartiles(df_ohc, "change in OHC ($dH/dt$)", df['project'], units)
plt.savefig(inargs.outfile, bbox_inches='tight', dpi=400)
log_file = re.sub('.png', '.met', inargs.outfile)
log_text = cmdprov.new_log(git_repo=repo_dir, extra_notes=quartiles)
cmdprov.write_log(log_file, log_text)
if __name__ == '__main__':
extra_info ="""
author:
Damien Irving, [email protected]
"""
description = 'Create a bar chart showing drift in ocean heat content'
parser = argparse.ArgumentParser(description=description,
epilog=extra_info,
argument_default=argparse.SUPPRESS,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("infile", type=str, help="Input file name")
parser.add_argument("outfile", type=str, help="Output file name")
args = parser.parse_args()
main(args) | mpl.rcParams['axes.titlesize'] = 'x-large'
mpl.rcParams['xtick.labelsize'] = 'medium'
mpl.rcParams['ytick.labelsize'] = 'large' |
bp_doc_gen.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
import glob
import os
import sys
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
import carla
COLOR_LIST = '#498efc'
def join(elem, separator=''):
return separator.join(elem)
def color(col, buf):
return join(['<font color="', col, '">', buf, '</font>'])
def valid_dic_val(dic, value):
return value in dic and dic[value]
def italic(buf):
return join(['_', buf, '_'])
def bold(buf):
return join(['**', buf, '**'])
def parentheses(buf):
return join(['(', buf, ')'])
def sub(buf):
return join(['<sub>', buf, '</sub>'])
def code(buf):
return join(['`', buf, '`'])
class MarkdownFile:
def __init__(self):
self._data = ""
self._list_depth = 0
self.endl = ' \n'
def data(self):
return self._data
def list_push(self, buf=''):
if buf:
self.text(join([
' ' * self._list_depth if self._list_depth != 0 else '', '- ', buf]))
self._list_depth = (self._list_depth + 1)
def list_pushn(self, buf):
self.list_push(join([buf, self.endl]))
def list_pop(self):
self._list_depth = max(self._list_depth - 1, 0)
def list_popn(self):
self.list_pop()
self._data = join([self._data, '\n'])
def list_depth(self):
|
def text(self, buf):
self._data = join([self._data, buf])
def textn(self, buf):
self._data = join([self._data, self.list_depth(), buf, self.endl])
def not_title(self, buf):
self._data = join([
self._data, '\n', self.list_depth(), '<h1>', buf, '</h1>', '\n'])
def title(self, strongness, buf):
self._data = join([
self._data, '\n', self.list_depth(), '#' * strongness, ' ', buf, '\n'])
def new_line(self):
self._data = join([self._data, self.endl])
def code_block(self, buf, language=''):
return join(['```', language, '\n', self.list_depth(), buf, '\n', self.list_depth(), '```\n'])
def main():
"""Generates markdown file"""
client = carla.Client('127.0.0.1', 2000)
client.set_timeout(2.0)
world = client.get_world()
bp_dict = {}
blueprints = [bp for bp in world.get_blueprint_library().filter('*')] # Returns list of all blueprints
blueprint_ids = [bp.id for bp in world.get_blueprint_library().filter('*')] # Returns list of all blueprint ids
# Creates a dict key = walker, static, prop, vehicle, sensor, controller; value = [bp_id, blueprint]
for bp_id in sorted(blueprint_ids):
bp_type = bp_id.split('.')[0]
value = []
for bp in blueprints:
if bp.id == bp_id:
value = [bp_id, bp]
if bp_type in bp_dict:
bp_dict[bp_type].append(value)
else:
bp_dict[bp_type] = [value]
# Actual documentation
md = MarkdownFile()
md.not_title('Blueprint Library')
md.textn(
"The Blueprint Library ([`carla.BlueprintLibrary`](../python_api/#carlablueprintlibrary-class)) " +
"is a summary of all [`carla.ActorBlueprint`](../python_api/#carla.ActorBlueprint) " +
"and its attributes ([`carla.ActorAttribute`](../python_api/#carla.ActorAttribute)) " +
"available to the user in CARLA.")
md.textn("\nHere is an example code for printing all actor blueprints and their attributes:")
md.textn(md.code_block("blueprints = [bp for bp in world.get_blueprint_library().filter('*')]\n"
"for blueprint in blueprints:\n"
" print(blueprint.id)\n"
" for attr in blueprint:\n"
" print(' - {}'.format(attr))", "py"))
md.textn("Check out our [blueprint tutorial](../python_api_tutorial/#blueprints).")
for key, value in bp_dict.items(): # bp types, bp's
md.title(3, key) # Key = walker, static, controller, sensor, vehicle
for bp in sorted(value): # Value = bp[0]= name bp[1]= blueprint
md.list_pushn(bold(color(COLOR_LIST, bp[0]))) # bp name
md.list_push(bold('Attributes:') + '\n')
for attr in sorted(bp[1], key=lambda x: x.id): # for attribute in blueprint
md.list_push(code(attr.id))
md.text(' ' + parentheses(italic(str(attr.type))))
if attr.is_modifiable:
md.text(sub(italic(' – Modifiable')))
md.list_popn()
md.list_pop()
md.list_pop()
md.list_pop()
return md.data()
if __name__ == '__main__':
try:
script_path = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(script_path, '../../Docs/bp_library.md'), 'w') as md_file:
md_file.write(main())
print("Done!")
except KeyboardInterrupt:
print('\nCancelled by user. Bye!')
| if self._data.strip()[-1:] != '\n' or self._list_depth == 0:
return ''
return join([' ' * self._list_depth]) |
index.js | import { Helmet } from 'react-helmet';
import { withRouter } from "react-router-dom";
import StandardTooltip from '~/components/StandardTooltip';
import Main from '~/appComponents/Main';
import Loading from '~/components/Loading';
import { TextInput, Select } from '~/components/_standardForm';
import CourseCategoryFormLine from '~/appComponents/CourseCategoryFormLine';
import CourseModel from '~/models/CourseModel';
import CourseApi from '~/api/CourseApi';
import css from './index.css';
@withRouter
class Page_courses_new extends React.Component {
static propTypes = {
history: PropTypes.object.isRequired
}
state = {
speSave: { status: 'success' },
formState: {
title: '',
description: '',
courseCategoryId: 1, // Other
ifPublic: true
},
formValidation: {}
}
apiCreateCourse = (event) => {
event.preventDefault();
const formValidation = CourseModel.validateForm(this.state.formState);
if (formValidation === true) {
CourseApi.create(
spe => this.setState({ speSave: spe }),
this.state.formState
)
.then((course) => this.props.history.push(`/courses/${course.id}`));
} else {
this.setState({ formValidation });
}
}
inputProps = () => ({
formState: this.state.formState,
updateFormState: (formState) => this.setState({ formState }),
formValidation: this.state.formValidation
})
render = () =>
<Main className={css.main}>
<div className="space"/>
<div className="container">
<div className="standard-title-and-description">
<h2 className="title">Create Course</h2>
<article className="description">
Create, study, share your own flashcards!<br/>
You'll be able to import flashcards from Excel after creation.
</article>
</div> |
<form className="standard-form -bordered" onSubmit={this.apiCreateCourse}>
<div className="form-insides">
<TextInput {...this.inputProps()} label="* Title" name="title" autoFocus/>
<div className="two-form-lines-in-row">
<CourseCategoryFormLine {...this.inputProps()} label="Category" name="courseCategoryId"/>
<Select
{...this.inputProps()}
label={
<span>
Public/Private <StandardTooltip tooltipEl="Private courses won't be listed in /courses. Consider making your course public if other people may want to use it!"/>
</span>
}
name="ifPublic"
possibleValues={{ true: 'Public', false: "Private" }}
/>
</div>
</div>
<button
className="button -purple standard-submit-button"
type="submit"
>Create</button>
<Loading spe={this.state.speSave}/>
</form>
</div>
<Helmet>
<title>New Course</title>
<meta name="description" content="Create and memorize your personal flashcards."/> :
</Helmet>
</Main>
}
export default Page_courses_new; | |
OneTimePasswordRegistrationView.ts | import { connect } from 'react-redux';
import OneTimePasswordRegistrationView from '../../../views/OneTimePasswordRegistrationView/OneTimePasswordRegistrationView';
import { RootState } from '../../../reducers';
import { Dispatch } from 'redux';
import {to} from 'await-to-js';
import { generateTotpSecret, generateTotpSecretSuccess, generateTotpSecretFailure } from '../../../reducers/Portal/OneTimePasswordRegistration/actions';
import { push } from 'connected-react-router';
const mapStateToProps = (state: RootState) => ({
error: state.oneTimePasswordRegistration.error,
secret: state.oneTimePasswordRegistration.secret,
});
async function checkIdentity(token: string) {
return fetch(`/api/secondfactor/totp/identity/finish?token=${token}`, {
method: 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
},
})
.then(async (res) => {
if (res.status !== 200) {
throw new Error('Status code ' + res.status);
}
const body = await res.json();
if ('error' in body) {
throw new Error(body['error']);
}
return body;
});
}
async function tryGenerateTotpSecret(dispatch: Dispatch, token: string) {
let err, result;
dispatch(generateTotpSecret());
[err, result] = await to(checkIdentity(token));
if (err) {
const e = err;
setTimeout(() => {
dispatch(generateTotpSecretFailure(e.message));
}, 2000);
return;
}
dispatch(generateTotpSecretSuccess(result));
}
const mapDispatchToProps = (dispatch: Dispatch) => {
let internalToken: string; | },
onRetryClicked: async () => {
await tryGenerateTotpSecret(dispatch, internalToken);
},
onCancelClicked: () => {
dispatch(push('/'));
},
onLoginClicked: () => {
dispatch(push('/'));
}
}
}
export default connect(mapStateToProps, mapDispatchToProps)(OneTimePasswordRegistrationView); | return {
onInit: async (token: string) => {
internalToken = token;
await tryGenerateTotpSecret(dispatch, internalToken); |
solution_test.go | package main
import "testing"
func runSample(t *testing.T, n int, A []int, expect int) {
res := solve(n, A)
if res != expect {
t.Errorf("sample %d %v, expect %d, but got %d", n, A, expect, res)
}
}
func TestSample1(t *testing.T) |
func TestSample2(t *testing.T) {
n := 4
A := []int{4, 1, 2, 3}
runSample(t, n, A, 2)
}
| {
n := 3
A := []int{1, 2, 3}
runSample(t, n, A, 1)
} |
helper.go | /*
Copyright 2021 ToucanSoftware. |
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util | |
txvalidator_test.go | package blockchain
import (
"bytes"
"crypto/elliptic"
"crypto/rand"
"fmt"
"math"
"testing"
"github.com/elastos/Elastos.ELA/common"
"github.com/elastos/Elastos.ELA/common/config"
"github.com/elastos/Elastos.ELA/common/log"
"github.com/elastos/Elastos.ELA/core/contract"
"github.com/elastos/Elastos.ELA/core/contract/program"
"github.com/elastos/Elastos.ELA/core/types"
"github.com/elastos/Elastos.ELA/core/types/outputpayload"
"github.com/elastos/Elastos.ELA/core/types/payload"
"github.com/elastos/Elastos.ELA/crypto"
"github.com/elastos/Elastos.ELA/dpos/state"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
)
type txValidatorTestSuite struct {
suite.Suite
ELA int64
foundationAddress common.Uint168
HeightVersion1 uint32
Chain *BlockChain
OriginalLedger *Ledger
}
func (s *txValidatorTestSuite) SetupSuite() {
config.Parameters = config.ConfigParams{Configuration: &config.Template}
log.NewDefault(
config.Parameters.PrintLevel,
config.Parameters.MaxPerLogSize,
config.Parameters.MaxLogsSize,
)
foundation, err := common.Uint168FromAddress("8VYXVxKKSAxkmRrfmGpQR2Kc66XhG6m3ta")
if err != nil {
s.Error(err)
}
FoundationAddress = *foundation
s.foundationAddress = FoundationAddress
chainStore, err := NewChainStore("Chain_UnitTest1",
config.DefaultParams.GenesisBlock)
if err != nil {
s.Error(err)
}
s.Chain, err = New(chainStore, &config.DefaultParams, state.NewState(&config.DefaultParams, nil))
if err != nil {
s.Error(err)
}
s.OriginalLedger = DefaultLedger
DefaultLedger = &Ledger{}
}
func (s *txValidatorTestSuite) TearDownSuite() {
s.Chain.db.Close()
DefaultLedger = s.OriginalLedger
}
func (s *txValidatorTestSuite) TestCheckTransactionSize() {
tx := buildTx()
buf := new(bytes.Buffer)
err := tx.Serialize(buf)
if !s.NoError(err) {
return
}
// normal
err = checkTransactionSize(tx)
s.NoError(err, "[CheckTransactionSize] passed normal size")
}
func (s *txValidatorTestSuite) TestCheckTransactionInput() {
// coinbase transaction
tx := newCoinBaseTransaction(new(payload.CoinBase), 0)
tx.Inputs[0].Previous.Index = math.MaxUint16
err := checkTransactionInput(tx)
s.NoError(err)
// invalid coinbase refer index
tx.Inputs[0].Previous.Index = 0
err = checkTransactionInput(tx)
s.EqualError(err, "invalid coinbase input")
// invalid coinbase refer id
tx.Inputs[0].Previous.Index = math.MaxUint16
rand.Read(tx.Inputs[0].Previous.TxID[:])
err = checkTransactionInput(tx)
s.EqualError(err, "invalid coinbase input")
// multiple coinbase inputs
tx.Inputs = append(tx.Inputs, &types.Input{})
err = checkTransactionInput(tx)
s.EqualError(err, "coinbase must has only one input")
// normal transaction
tx = buildTx()
err = checkTransactionInput(tx)
s.NoError(err)
// no inputs
tx.Inputs = nil
err = checkTransactionInput(tx)
s.EqualError(err, "transaction has no inputs")
// normal transaction with coinbase input
tx.Inputs = append(tx.Inputs, &types.Input{Previous: *types.NewOutPoint(common.EmptyHash, math.MaxUint16)})
err = checkTransactionInput(tx)
s.EqualError(err, "invalid transaction input")
// duplicated inputs
tx = buildTx()
tx.Inputs = append(tx.Inputs, tx.Inputs[0])
err = checkTransactionInput(tx)
s.EqualError(err, "duplicated transaction inputs")
}
func (s *txValidatorTestSuite) TestCheckTransactionOutput() {
// coinbase
tx := newCoinBaseTransaction(new(payload.CoinBase), 0)
tx.Outputs = []*types.Output{
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress},
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress},
}
err := checkTransactionOutput(s.HeightVersion1, tx)
s.NoError(err)
// outputs < 2
tx.Outputs = []*types.Output{
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress},
}
err = checkTransactionOutput(s.HeightVersion1, tx)
s.EqualError(err, "coinbase output is not enough, at least 2")
// invalid asset id
tx.Outputs = []*types.Output{
{AssetID: common.EmptyHash, ProgramHash: s.foundationAddress},
{AssetID: common.EmptyHash, ProgramHash: s.foundationAddress},
}
err = checkTransactionOutput(s.HeightVersion1, tx)
s.EqualError(err, "Asset ID in coinbase is invalid")
// reward to foundation in coinbase = 30% (CheckTxOut version)
totalReward := config.DefaultParams.RewardPerBlock
fmt.Printf("Block reward amount %s", totalReward.String())
foundationReward := common.Fixed64(float64(totalReward) * 0.3)
fmt.Printf("Foundation reward amount %s", foundationReward.String())
tx.Outputs = []*types.Output{
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress, Value: foundationReward},
{AssetID: config.ELAAssetID, ProgramHash: common.Uint168{}, Value: totalReward - foundationReward},
}
err = checkTransactionOutput(s.HeightVersion1, tx)
s.NoError(err)
// reward to foundation in coinbase < 30% (CheckTxOut version)
foundationReward = common.Fixed64(float64(totalReward) * 0.299999)
fmt.Printf("Foundation reward amount %s", foundationReward.String())
tx.Outputs = []*types.Output{
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress, Value: foundationReward},
{AssetID: config.ELAAssetID, ProgramHash: common.Uint168{}, Value: totalReward - foundationReward},
}
err = checkTransactionOutput(s.HeightVersion1, tx)
s.EqualError(err, "Reward to foundation in coinbase < 30%")
// normal transaction
tx = buildTx()
for _, output := range tx.Outputs {
output.AssetID = config.ELAAssetID
output.ProgramHash = common.Uint168{}
}
err = checkTransactionOutput(s.HeightVersion1, tx)
s.NoError(err)
// outputs < 1
tx.Outputs = nil
err = checkTransactionOutput(s.HeightVersion1, tx)
s.EqualError(err, "transaction has no outputs")
// invalid asset ID
tx.Outputs = randomOutputs()
for _, output := range tx.Outputs {
output.AssetID = common.EmptyHash
output.ProgramHash = common.Uint168{}
}
err = checkTransactionOutput(s.HeightVersion1, tx)
s.EqualError(err, "asset ID in output is invalid")
// invalid program hash
tx.Outputs = randomOutputs()
for _, output := range tx.Outputs {
output.AssetID = config.ELAAssetID
address := common.Uint168{}
address[0] = 0x23
output.ProgramHash = address
}
}
func (s *txValidatorTestSuite) TestCheckAmountPrecision() {
// precision check
for i := 8; i >= 0; i-- {
amount := common.Fixed64(math.Pow(10, float64(i)))
fmt.Printf("Amount %s", amount.String())
s.Equal(true, checkAmountPrecise(amount, byte(8-i)))
s.Equal(false, checkAmountPrecise(amount, byte(8-i-1)))
}
}
func (s *txValidatorTestSuite) TestCheckAttributeProgram() {
// valid attributes
tx := buildTx()
usages := []types.AttributeUsage{
types.Nonce,
types.Script,
types.Description,
types.DescriptionUrl,
types.Memo,
}
for _, usage := range usages {
attr := types.NewAttribute(usage, nil)
tx.Attributes = append(tx.Attributes, &attr)
}
err := checkAttributeProgram(s.HeightVersion1, tx)
s.EqualError(err, "no programs found in transaction")
// invalid attributes
getInvalidUsage := func() types.AttributeUsage {
var usage = make([]byte, 1)
NEXT:
rand.Read(usage)
if types.IsValidAttributeType(types.AttributeUsage(usage[0])) {
goto NEXT
}
return types.AttributeUsage(usage[0])
}
for i := 0; i < 10; i++ {
attr := types.NewAttribute(getInvalidUsage(), nil)
tx.Attributes = []*types.Attribute{&attr}
err := checkAttributeProgram(s.HeightVersion1, tx)
s.EqualError(err, fmt.Sprintf("invalid attribute usage %v", attr.Usage))
}
tx.Attributes = nil
// empty programs
tx.Programs = []*program.Program{}
err = checkAttributeProgram(s.HeightVersion1, tx)
s.EqualError(err, "no programs found in transaction")
// nil program code
p := &program.Program{}
tx.Programs = append(tx.Programs, p)
err = checkAttributeProgram(s.HeightVersion1, tx)
s.EqualError(err, "invalid program code nil")
// nil program parameter
var code = make([]byte, 21)
rand.Read(code)
p = &program.Program{Code: code}
tx.Programs = []*program.Program{p}
err = checkAttributeProgram(s.HeightVersion1, tx)
s.EqualError(err, "invalid program parameter nil")
}
func (s *txValidatorTestSuite) TestCheckTransactionPayload() {
// normal
tx := new(types.Transaction)
payload := &payload.RegisterAsset{
Asset: payload.Asset{
Name: "ELA",
Precision: 0x08,
AssetType: payload.Token,
},
Amount: 3300 * 10000 * 10000000,
}
tx.Payload = payload
err := checkTransactionPayload(tx)
s.NoError(err)
// invalid precision
payload.Asset.Precision = 9
err = checkTransactionPayload(tx)
s.EqualError(err, "Invalide asset Precision.")
// invalid amount
payload.Asset.Precision = 0
payload.Amount = 1234567
err = checkTransactionPayload(tx)
s.EqualError(err, "Invalide asset value,out of precise.")
}
func (s *txValidatorTestSuite) TestCheckDuplicateSidechainTx() {
hashStr1 := "8a6cb4b5ff1a4f8368c6513a536c663381e3fdeff738e9b437bd8fce3fb30b62"
hashBytes1, _ := common.HexStringToBytes(hashStr1)
hash1, _ := common.Uint256FromBytes(hashBytes1)
hashStr2 := "cc62e14f5f9526b7f4ff9d34dcd0643dacb7886707c57f49ec97b95ec5c4edac"
hashBytes2, _ := common.HexStringToBytes(hashStr2)
hash2, _ := common.Uint256FromBytes(hashBytes2)
// 1. Generate the ill withdraw transaction which have duplicate sidechain tx
txn := new(types.Transaction)
txn.TxType = types.WithdrawFromSideChain
txn.Payload = &payload.WithdrawFromSideChain{
BlockHeight: 100,
GenesisBlockAddress: "eb7adb1fea0dd6185b09a43bdcd4924bb22bff7151f0b1b4e08699840ab1384b",
SideChainTransactionHashes: []common.Uint256{
*hash1,
*hash2,
*hash1, // duplicate tx hash
},
}
// 2. Run CheckDuplicateSidechainTx
err := checkDuplicateSidechainTx(txn)
s.EqualError(err, "Duplicate sidechain tx detected in a transaction")
}
func (s *txValidatorTestSuite) TestCheckTransactionBalance() {
// WithdrawFromSideChain will pass check in any condition
tx := new(types.Transaction)
tx.TxType = types.WithdrawFromSideChain
// single output
outputValue1 := common.Fixed64(100 * s.ELA)
deposit := newCoinBaseTransaction(new(payload.CoinBase), 0)
deposit.Outputs = []*types.Output{
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress, Value: outputValue1},
}
references := map[*types.Input]*types.Output{
&types.Input{}: {Value: outputValue1},
}
s.EqualError(checkTransactionFee(tx, references), "transaction fee not enough")
references = map[*types.Input]*types.Output{
&types.Input{}: {Value: outputValue1 + common.Fixed64(config.Parameters.PowConfiguration.MinTxFee)},
}
s.NoError(checkTransactionFee(tx, references))
// multiple output
outputValue1 = common.Fixed64(30 * s.ELA)
outputValue2 := common.Fixed64(70 * s.ELA)
tx.Outputs = []*types.Output{
{AssetID: config.ELAAssetID, ProgramHash: s.foundationAddress, Value: outputValue1},
{AssetID: config.ELAAssetID, ProgramHash: common.Uint168{}, Value: outputValue2},
}
references = map[*types.Input]*types.Output{
&types.Input{}: {Value: outputValue1 + outputValue2},
}
s.EqualError(checkTransactionFee(tx, references), "transaction fee not enough")
references = map[*types.Input]*types.Output{
&types.Input{}: {Value: outputValue1 + outputValue2 + common.Fixed64(config.Parameters.PowConfiguration.MinTxFee)},
}
s.NoError(checkTransactionFee(tx, references))
}
func (s *txValidatorTestSuite) TestCheckSideChainPowConsensus() {
// 1. Generate a side chain pow transaction
txn := new(types.Transaction)
txn.TxType = types.SideChainPow
txn.Payload = &payload.SideChainPow{
SideBlockHash: common.Uint256{1, 1, 1},
SideGenesisHash: common.Uint256{2, 2, 2},
BlockHeight: uint32(10),
}
//2. Get arbitrator
password1 := "1234"
privateKey1, _ := common.HexStringToBytes(password1)
publicKey := new(crypto.PublicKey)
publicKey.X, publicKey.Y = elliptic.P256().ScalarBaseMult(privateKey1)
arbitrator1, _ := publicKey.EncodePoint(true)
password2 := "5678"
privateKey2, _ := common.HexStringToBytes(password2)
publicKey2 := new(crypto.PublicKey)
publicKey2.X, publicKey2.Y = elliptic.P256().ScalarBaseMult(privateKey2)
arbitrator2, _ := publicKey2.EncodePoint(true)
//3. Sign transaction by arbitrator1
buf := new(bytes.Buffer)
txn.Payload.Serialize(buf, payload.SideChainPowVersion)
signature, _ := crypto.Sign(privateKey1, buf.Bytes()[0:68])
txn.Payload.(*payload.SideChainPow).SignedData = signature
//4. Run CheckSideChainPowConsensus
s.NoError(CheckSideChainPowConsensus(txn, arbitrator1), "TestCheckSideChainPowConsensus failed.")
s.Error(CheckSideChainPowConsensus(txn, arbitrator2), "TestCheckSideChainPowConsensus failed.")
}
func (s *txValidatorTestSuite) TestCheckDestructionAddress() {
destructionAddress := "ELANULLXXXXXXXXXXXXXXXXXXXXXYvs3rr"
txID, _ := common.Uint256FromHexString("7e8863a503e90e6464529feb1c25d98c903e01bec00ccfea2475db4e37d7328b")
programHash, _ := common.Uint168FromAddress(destructionAddress)
reference := map[*types.Input]*types.Output{
&types.Input{Previous: types.OutPoint{*txID, 1234}, Sequence: 123456}: &types.Output{ProgramHash: *programHash},
}
err := checkDestructionAddress(reference)
s.EqualError(err, fmt.Sprintf("cannot use utxo in the Elastos foundation destruction address"))
}
func (s *txValidatorTestSuite) TestCheckRegisterProducerTransaction() {
// Generate a register producer transaction
publicKeyStr1 := "03c77af162438d4b7140f8544ad6523b9734cca9c7a62476d54ed5d1bddc7a39c3"
publicKey1, _ := common.HexStringToBytes(publicKeyStr1)
privateKeyStr1 := "7638c2a799d93185279a4a6ae84a5b76bd89e41fa9f465d9ae9b2120533983a1"
privateKey1, _ := common.HexStringToBytes(privateKeyStr1)
publicKeyStr2 := "027c4f35081821da858f5c7197bac5e33e77e5af4a3551285f8a8da0a59bd37c45"
publicKey2, _ := common.HexStringToBytes(publicKeyStr2)
errPublicKeyStr := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd4"
errPublicKey, _ := common.HexStringToBytes(errPublicKeyStr)
txn := new(types.Transaction)
txn.TxType = types.RegisterProducer
rpPayload := &payload.ProducerInfo{
OwnerPublicKey: publicKey1,
NickName: "nickname 1",
Url: "http://www.elastos_test.com",
Location: 1,
NetAddress: "127.0.0.1:20338",
}
rpSignBuf := new(bytes.Buffer)
err := rpPayload.SerializeUnsigned(rpSignBuf, payload.ProducerInfoVersion)
s.NoError(err)
rpSig, err := crypto.Sign(privateKey1, rpSignBuf.Bytes())
s.NoError(err)
rpPayload.Signature = rpSig
txn.Payload = rpPayload
txn.Programs = []*program.Program{&program.Program{
Code: getCode(publicKeyStr1),
Parameter: nil,
}}
publicKeyDeposit1, _ := contract.PublicKeyToDepositProgramHash(publicKey1)
txn.Outputs = []*types.Output{&types.Output{
AssetID: common.Uint256{},
Value: 5000 * 100000000,
OutputLock: 0,
ProgramHash: *publicKeyDeposit1,
}}
err = s.Chain.checkRegisterProducerTransaction(txn)
s.NoError(err)
// Give an invalid public key in payload
txn.Payload.(*payload.ProducerInfo).OwnerPublicKey = errPublicKey
err = s.Chain.checkRegisterProducerTransaction(txn)
s.EqualError(err, "invalid public key in payload")
// Invalidates the signature in payload
txn.Payload.(*payload.ProducerInfo).OwnerPublicKey = publicKey2
err = s.Chain.checkRegisterProducerTransaction(txn)
s.EqualError(err, "invalid signature in payload")
// Give an invalid url in payload
txn.Payload.(*payload.ProducerInfo).OwnerPublicKey = publicKey1
txn.Payload.(*payload.ProducerInfo).Url = ""
err = s.Chain.checkRegisterProducerTransaction(txn)
s.EqualError(err, "Field Url has invalid string length.")
// Give a mismatching deposit address
rpPayload.OwnerPublicKey = publicKey1
rpPayload.Url = "www.test.com"
rpSignBuf = new(bytes.Buffer)
err = rpPayload.SerializeUnsigned(rpSignBuf, payload.ProducerInfoVersion)
s.NoError(err)
rpSig, err = crypto.Sign(privateKey1, rpSignBuf.Bytes())
s.NoError(err)
rpPayload.Signature = rpSig
txn.Payload = rpPayload
publicKeyDeposit2, _ := contract.PublicKeyToDepositProgramHash(publicKey2)
txn.Outputs = []*types.Output{&types.Output{
AssetID: common.Uint256{},
Value: 5000 * 100000000,
OutputLock: 0,
ProgramHash: *publicKeyDeposit2,
}}
err = s.Chain.checkRegisterProducerTransaction(txn)
s.EqualError(err, "deposit address does not match the public key in payload")
// Give a insufficient deposit coin
txn.Outputs = []*types.Output{&types.Output{
AssetID: common.Uint256{},
Value: 4000,
OutputLock: 0,
ProgramHash: *publicKeyDeposit1,
}}
err = s.Chain.checkRegisterProducerTransaction(txn)
s.EqualError(err, "producer deposit amount is insufficient")
// Multi deposit addresses
txn.Outputs = []*types.Output{
&types.Output{
AssetID: common.Uint256{},
Value: 5000 * 100000000,
OutputLock: 0,
ProgramHash: *publicKeyDeposit1,
},
&types.Output{
AssetID: common.Uint256{},
Value: 5000 * 100000000,
OutputLock: 0,
ProgramHash: *publicKeyDeposit1,
}}
err = s.Chain.checkRegisterProducerTransaction(txn)
s.EqualError(err, "there must be only one deposit address in outputs")
}
func getCode(publicKey string) []byte {
pkBytes, _ := common.HexStringToBytes(publicKey)
pk, _ := crypto.DecodePoint(pkBytes)
redeemScript, _ := contract.CreateStandardRedeemScript(pk)
return redeemScript
}
func (s *txValidatorTestSuite) TestCheckVoteProducerOutput() {
// 1. Generate a vote output
publicKeyStr1 := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd47e944507292ea08dd"
publicKey1, _ := common.HexStringToBytes(publicKeyStr1)
outputs := []*types.Output{
&types.Output{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: common.Uint168{123},
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
outputpayload.VoteContent{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{
publicKey1,
},
},
},
},
},
&types.Output{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: common.Uint168{123},
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
outputpayload.VoteContent{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{},
},
},
},
},
&types.Output{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: common.Uint168{123},
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
outputpayload.VoteContent{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{
publicKey1,
publicKey1,
},
},
},
},
},
}
// 2. Check output payload
err := outputs[0].Payload.(*outputpayload.VoteOutput).Validate()
s.NoError(err)
err = outputs[1].Payload.(*outputpayload.VoteOutput).Validate()
s.EqualError(err, "invalid public key count")
err = outputs[2].Payload.(*outputpayload.VoteOutput).Validate()
s.EqualError(err, "duplicate candidate")
}
func (s *txValidatorTestSuite) TestCheckUpdateProducerTransaction() {
publicKeyStr1 := "03c77af162438d4b7140f8544ad6523b9734cca9c7a62476d54ed5d1bddc7a39c3"
publicKey1, _ := common.HexStringToBytes(publicKeyStr1)
privateKeyStr1 := "7638c2a799d93185279a4a6ae84a5b76bd89e41fa9f465d9ae9b2120533983a1"
privateKey1, _ := common.HexStringToBytes(privateKeyStr1)
publicKeyStr2 := "027c4f35081821da858f5c7197bac5e33e77e5af4a3551285f8a8da0a59bd37c45"
publicKey2, _ := common.HexStringToBytes(publicKeyStr2)
errPublicKeyStr := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd4"
errPublicKey, _ := common.HexStringToBytes(errPublicKeyStr)
txn := new(types.Transaction)
txn.TxType = types.RegisterProducer
updatePayload := &payload.ProducerInfo{
OwnerPublicKey: publicKey1,
NickName: "",
Url: "",
Location: 1,
NetAddress: "",
}
txn.Payload = updatePayload
txn.Programs = []*program.Program{{
Code: getCode(publicKeyStr1),
Parameter: nil,
}}
block := &types.Block{
Transactions: []*types.Transaction{
txn,
},
}
s.Chain.state.ProcessBlock(block, nil)
s.EqualError(s.Chain.checkUpdateProducerTransaction(txn), "Field NickName has invalid string length.")
updatePayload.NickName = "nick name"
s.EqualError(s.Chain.checkUpdateProducerTransaction(txn), "Field Url has invalid string length.")
updatePayload.Url = "www.elastos.org"
updatePayload.OwnerPublicKey = errPublicKey
s.EqualError(s.Chain.checkUpdateProducerTransaction(txn), "invalid public key in payload")
updatePayload.OwnerPublicKey = publicKey2
s.EqualError(s.Chain.checkUpdateProducerTransaction(txn), "invalid signature in payload")
updatePayload.OwnerPublicKey = publicKey1
updateSignBuf := new(bytes.Buffer)
err := updatePayload.SerializeUnsigned(updateSignBuf, payload.ProducerInfoVersion)
s.NoError(err)
updateSig, err := crypto.Sign(privateKey1, updateSignBuf.Bytes())
s.NoError(err)
updatePayload.Signature = updateSig
s.NoError(s.Chain.checkUpdateProducerTransaction(txn))
//rest of check test will be continued in chain test
}
func (s *txValidatorTestSuite) TestCheckCancelProducerTransaction() {
publicKeyStr1 := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd47e944507292ea08dd"
publicKey1, _ := common.HexStringToBytes(publicKeyStr1)
publicKeyStr2 := "027c4f35081821da858f5c7197bac5e33e77e5af4a3551285f8a8da0a59bd37c45"
publicKey2, _ := common.HexStringToBytes(publicKeyStr2)
errPublicKeyStr := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd4"
errPublicKey, _ := common.HexStringToBytes(errPublicKeyStr)
txn := new(types.Transaction)
txn.TxType = types.CancelProducer
cancelPayload := &payload.ProcessProducer{
OwnerPublicKey: publicKey1,
}
txn.Payload = cancelPayload
txn.Programs = []*program.Program{{
Code: getCode(publicKeyStr1),
Parameter: nil,
}}
cancelPayload.OwnerPublicKey = errPublicKey
s.EqualError(s.Chain.checkCancelProducerTransaction(txn), "invalid public key in payload")
cancelPayload.OwnerPublicKey = publicKey2
s.EqualError(s.Chain.checkCancelProducerTransaction(txn), "invalid signature in payload")
}
func (s *txValidatorTestSuite) TestCheckActivateProducerTransaction() {
publicKeyStr1 := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd47e944507292ea08dd"
publicKey1, _ := common.HexStringToBytes(publicKeyStr1)
publicKeyStr2 := "027c4f35081821da858f5c7197bac5e33e77e5af4a3551285f8a8da0a59bd37c45"
publicKey2, _ := common.HexStringToBytes(publicKeyStr2)
errPublicKeyStr := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd4"
errPublicKey, _ := common.HexStringToBytes(errPublicKeyStr)
txn := new(types.Transaction)
txn.TxType = types.ActivateProducer
activatePayload := &payload.ProcessProducer{
OwnerPublicKey: publicKey1,
}
txn.Payload = activatePayload
txn.Programs = []*program.Program{{
Code: getCode(publicKeyStr1),
Parameter: nil,
}}
activatePayload.OwnerPublicKey = errPublicKey
s.EqualError(s.Chain.checkActivateProducerTransaction(txn, 0),
"invalid public key in payload")
activatePayload.OwnerPublicKey = publicKey2
s.EqualError(s.Chain.checkActivateProducerTransaction(txn, 0),
"invalid signature in payload")
}
func (s *txValidatorTestSuite) TestCheckStringField() {
s.NoError(checkStringField("Normal", "test"))
s.EqualError(checkStringField("", "test"), "Field test has invalid string length.")
s.EqualError(checkStringField("I am more than 100, 1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890", "test"), "Field test has invalid string length.")
}
func (s *txValidatorTestSuite) TestCheckTransactionDepositUTXO() {
references := make(map[*types.Input]*types.Output)
input := &types.Input{}
var txn types.Transaction
// Use the deposit UTXO in a TransferAsset transaction
depositHash, _ := common.Uint168FromAddress("DVgnDnVfPVuPa2y2E4JitaWjWgRGJDuyrD")
depositOutput := &types.Output{
ProgramHash: *depositHash,
}
references[input] = depositOutput
txn.TxType = types.TransferAsset
err := checkTransactionDepositUTXO(&txn, references)
s.EqualError(err, "only the ReturnDepositCoin transaction can use the deposit UTXO")
// Use the deposit UTXO in a ReturnDepositCoin transaction
txn.TxType = types.ReturnDepositCoin
err = checkTransactionDepositUTXO(&txn, references)
s.NoError(err)
// Use the standard UTXO in a ReturnDepositCoin transaction
normalHash, _ := common.Uint168FromAddress("EJMzC16Eorq9CuFCGtyMrq4Jmgw9jYCHQR")
normalOutput := &types.Output{
ProgramHash: *normalHash,
}
references[input] = normalOutput
txn.TxType = types.ReturnDepositCoin
err = checkTransactionDepositUTXO(&txn, references)
s.EqualError(err, "the ReturnDepositCoin transaction can only use the deposit UTXO")
}
func (s *txValidatorTestSuite) TestCheckOutputPayload() {
publicKeyStr1 := "02b611f07341d5ddce51b5c4366aca7b889cfe0993bd63fd47e944507292ea08dd"
publicKey1, _ := common.HexStringToBytes(publicKeyStr1)
programHash, _ := common.Uint168FromAddress("EJMzC16Eorq9CuFCGtyMrq4Jmgw9jYCHQR")
outputs := []*types.Output{
{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: *programHash,
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{
publicKey1,
},
},
},
},
},
{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: *programHash,
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{},
},
},
},
},
{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: *programHash,
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{
publicKey1,
publicKey1,
},
},
},
},
},
{
AssetID: common.Uint256{},
Value: 1.0,
OutputLock: 0,
ProgramHash: common.Uint168{123},
Type: types.OTVote,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
{
VoteType: outputpayload.Delegate,
Candidates: [][]byte{
publicKey1,
},
},
},
},
},
}
err := checkOutputPayload(types.TransferAsset, outputs[0])
s.NoError(err)
err = checkOutputPayload(types.RechargeToSideChain, outputs[0])
s.EqualError(err, "transaction type dose not match the output payload type")
err = checkOutputPayload(types.TransferAsset, outputs[1])
s.EqualError(err, "invalid public key count")
err = checkOutputPayload(types.TransferAsset, outputs[2])
s.EqualError(err, "duplicate candidate")
err = checkOutputPayload(types.TransferAsset, outputs[3])
s.EqualError(err, "output address should be standard")
}
func (s *txValidatorTestSuite) TestCheckVoteProducerOutputs() {
outputs := []*types.Output{
{
Type: types.OTNone,
},
}
references := make(map[*types.Input]*types.Output)
s.NoError(checkVoteProducerOutputs(outputs, references, nil))
publicKey1 := "023a133480176214f88848c6eaa684a54b316849df2b8570b57f3a917f19bbc77a"
publicKey2 := "030a26f8b4ab0ea219eb461d1e454ce5f0bd0d289a6a64ffc0743dab7bd5be0be9"
candidate1, _ := common.HexStringToBytes(publicKey1)
candidate2, _ := common.HexStringToBytes(publicKey2)
producers := [][]byte{candidate1}
hashStr := "21c5656c65028fe21f2222e8f0cd46a1ec734cbdb6"
hashByte, _ := common.HexStringToBytes(hashStr)
hash, _ := common.Uint168FromBytes(hashByte)
outputs = append(outputs, &types.Output{
Type: types.OTVote,
ProgramHash: *hash,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
{
VoteType: 0,
Candidates: [][]byte{candidate1},
},
},
},
})
s.Error(checkVoteProducerOutputs(outputs, references, producers))
references[&types.Input{}] = &types.Output{
ProgramHash: *hash,
}
s.NoError(checkVoteProducerOutputs(outputs, references, producers))
outputs = append(outputs, &types.Output{
Type: types.OTVote,
ProgramHash: *hash,
Payload: &outputpayload.VoteOutput{
Version: 0,
Contents: []outputpayload.VoteContent{
{
VoteType: 0,
Candidates: [][]byte{candidate2},
},
},
},
})
s.Error(checkVoteProducerOutputs(outputs, references, producers))
}
func | (t *testing.T) {
suite.Run(t, new(txValidatorTestSuite))
}
func newCoinBaseTransaction(coinBasePayload *payload.CoinBase,
currentHeight uint32) *types.Transaction {
return &types.Transaction{
Version: 0,
TxType: types.CoinBase,
PayloadVersion: payload.CoinBaseVersion,
Payload: coinBasePayload,
Inputs: []*types.Input{
{
Previous: types.OutPoint{
TxID: common.EmptyHash,
Index: 0x0000,
},
Sequence: 0x00000000,
},
},
Attributes: []*types.Attribute{},
LockTime: currentHeight,
Programs: []*program.Program{},
}
}
func TestCheckOutputProgramHash(t *testing.T) {
programHash := common.Uint168{}
// empty program hash should pass
assert.NoError(t, checkOutputProgramHash(88813, programHash))
// prefix standard program hash should pass
programHash[0] = uint8(contract.PrefixStandard)
assert.NoError(t, checkOutputProgramHash(88813, programHash))
// prefix multisig program hash should pass
programHash[0] = uint8(contract.PrefixMultiSig)
assert.NoError(t, checkOutputProgramHash(88813, programHash))
// prefix crosschain program hash should pass
programHash[0] = uint8(contract.PrefixCrossChain)
assert.NoError(t, checkOutputProgramHash(88813, programHash))
// other prefix program hash should not pass
programHash[0] = 0x34
assert.Error(t, checkOutputProgramHash(88813, programHash))
// other prefix program hash should pass in old version
programHash[0] = 0x34
assert.NoError(t, checkOutputProgramHash(88811, programHash))
}
| TestTxValidatorSuite |
url.go | package requester | "net/url"
"github.com/JabinGP/mdout/model"
"github.com/JabinGP/mdout/tool"
)
func buildURLReq(inPath string, params model.Params) (*Request, error) {
escapedURL := url.QueryEscape(inPath)
absOutPath, err := tool.GetOutFullName(escapedURL, params)
if err != nil {
return nil, err
}
var req = Request{
Params: params,
InType: "url",
InPath: inPath,
AbsInPath: inPath,
}
req.OutType = "pdf" // Must be pdf when input type is url
req.AbsOutPath = absOutPath
return &req, nil
} |
import ( |
Opengauss_Function_Tools_gs_check_Case0378.py | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 服务端工具
Case Name : 例行巡检场景检查跳过指定多个检查项
Description :
例行巡检场景检查跳过指定多个检查项:
gs_check -e inspect --skip-items CheckSshdService,CheckSshdConfig,
CheckHashIndex,CheckFilehandle
Expect :
检查完成
History :
"""
import os
import unittest
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
from yat.test import Node
from yat.test import macro
class Tools(unittest.TestCase):
def setUp(self):
self.log = Logger()
self.log.info('---Opengauss_Function_Tools_gs_check_Case0378_开始---')
self.dbuser_node = Node('dbuser')
self.root_node = Node('default')
self.clear_path = os.path.join(
os.path.dirname(macro.DB_INSTANCE_PATH), 'tool', 'script',
'gspylib', 'inspection', 'output', 'CheckReport*')
self.Constant = Constant()
self.skip_options = f'CheckSshdService,CheckSshdConfig,' \
f'CheckHashIndex,CheckFilehandle'
def test_server_tools1(self):
text = '------step1:例行巡检场景检查跳过指定多个检查项;expect:检查完成------'
| fo(text)
check_cmd = f'''su - {self.dbuser_node.ssh_user} -c "
source {macro.DB_ENV_PATH};
expect -c \\\"set timeout -1
spawn gs_check -e inspect --skip-items {self.skip_options}
expect *]:
send {self.root_node.ssh_user}\\n
expect *]:
send {self.root_node.ssh_password}\\n
expect eof\\\""'''
self.log.info(check_cmd)
shell_res = os.popen(check_cmd)
str_res = ''.join(shell_res.readlines())
self.log.info(str_res)
flag = (self.Constant.GS_CHECK_SUCCESS_MSG2[0] in str_res or
self.Constant.GS_CHECK_SUCCESS_MSG2[1] in str_res) and \
self.Constant.GS_CHECK_SUCCESS_MSG2[2] in str_res
self.assertTrue(flag, '执行失败:' + text)
def tearDown(self):
text = '----------清理环境----------'
self.log.info(text)
clear_cmd = f'rm -rf {self.clear_path};'
self.log.info(clear_cmd)
clear_msg = self.root_node.sh(clear_cmd).result()
self.log.info(clear_msg)
self.assertEqual('', clear_msg, '执行失败:' + text)
self.log.info(
'---Opengauss_Function_Tools_gs_check_Case0378_结束---')
| self.log.in |
config_test.go | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"sync"
"testing"
"text/template"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
pipelinev1alpha1 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha1"
v1 "k8s.io/api/core/v1"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
"k8s.io/apimachinery/pkg/util/sets"
utilpointer "k8s.io/utils/pointer"
prowapi "k8s.io/test-infra/prow/apis/prowjobs/v1"
prowjobv1 "k8s.io/test-infra/prow/apis/prowjobs/v1"
"k8s.io/test-infra/prow/config/secret"
gerrit "k8s.io/test-infra/prow/gerrit/client"
"k8s.io/test-infra/prow/github"
"k8s.io/test-infra/prow/github/fakegithub"
"k8s.io/test-infra/prow/kube"
"k8s.io/test-infra/prow/pod-utils/decorate"
"k8s.io/test-infra/prow/pod-utils/downwardapi"
)
func TestDefaultJobBase(t *testing.T) {
bar := "bar"
filled := JobBase{
Agent: "foo",
Namespace: &bar,
Cluster: "build",
}
cases := []struct {
name string
config ProwConfig
base func(j *JobBase)
expected func(j *JobBase)
}{
{
name: "no changes when fields are already set",
},
{
name: "empty agent results in kubernetes",
base: func(j *JobBase) {
j.Agent = ""
},
expected: func(j *JobBase) {
j.Agent = string(prowapi.KubernetesAgent)
},
},
{
name: "nil namespace becomes PodNamespace",
config: ProwConfig{
PodNamespace: "pod-namespace",
ProwJobNamespace: "wrong",
},
base: func(j *JobBase) {
j.Namespace = nil
},
expected: func(j *JobBase) {
p := "pod-namespace"
j.Namespace = &p
},
},
{
name: "empty namespace becomes PodNamespace",
config: ProwConfig{
PodNamespace: "new-pod-namespace",
ProwJobNamespace: "still-wrong",
},
base: func(j *JobBase) {
var empty string
j.Namespace = &empty
},
expected: func(j *JobBase) {
p := "new-pod-namespace"
j.Namespace = &p
},
},
{
name: "empty cluster becomes DefaultClusterAlias",
base: func(j *JobBase) {
j.Cluster = ""
},
expected: func(j *JobBase) {
j.Cluster = kube.DefaultClusterAlias
},
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
actual := filled
if tc.base != nil {
tc.base(&actual)
}
expected := actual
if tc.expected != nil {
tc.expected(&expected)
}
tc.config.defaultJobBase(&actual)
if !reflect.DeepEqual(actual, expected) {
t.Errorf("expected %#v\n!=\nactual %#v", expected, actual)
}
})
}
}
func TestSpyglassConfig(t *testing.T) {
testCases := []struct {
name string
spyglassConfig string
expectedViewers map[string][]string
expectedRegexMatches map[string][]string
expectedSizeLimit int64
expectError bool
}{
{
name: "Default: build log, metadata, junit",
spyglassConfig: `
deck:
spyglass:
size_limit: 500e+6
viewers:
"started.json|finished.json":
- "metadata"
"build-log.txt":
- "buildlog"
"artifacts/junit.*\\.xml":
- "junit"
`,
expectedViewers: map[string][]string{
"started.json|finished.json": {"metadata"},
"build-log.txt": {"buildlog"},
"artifacts/junit.*\\.xml": {"junit"},
},
expectedRegexMatches: map[string][]string{
"started.json|finished.json": {"started.json", "finished.json"},
"build-log.txt": {"build-log.txt"},
"artifacts/junit.*\\.xml": {"artifacts/junit01.xml", "artifacts/junit_runner.xml"},
},
expectedSizeLimit: 500e6,
expectError: false,
},
{
name: "Backwards compatibility",
spyglassConfig: `
deck:
spyglass:
size_limit: 500e+6
viewers:
"started.json|finished.json":
- "metadata-viewer"
"build-log.txt":
- "build-log-viewer"
"artifacts/junit.*\\.xml":
- "junit-viewer"
`,
expectedViewers: map[string][]string{
"started.json|finished.json": {"metadata"},
"build-log.txt": {"buildlog"},
"artifacts/junit.*\\.xml": {"junit"},
},
expectedSizeLimit: 500e6,
expectError: false,
},
{
name: "Invalid spyglass size limit",
spyglassConfig: `
deck:
spyglass:
size_limit: -4
viewers:
"started.json|finished.json":
- "metadata-viewer"
"build-log.txt":
- "build-log-viewer"
"artifacts/junit.*\\.xml":
- "junit-viewer"
`,
expectError: true,
},
{
name: "Invalid Spyglass regexp",
spyglassConfig: `
deck:
spyglass:
size_limit: 5
viewers:
"started.json\|]finished.json":
- "metadata-viewer"
`,
expectError: true,
},
}
for _, tc := range testCases {
// save the config
spyglassConfigDir, err := ioutil.TempDir("", "spyglassConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(spyglassConfigDir)
spyglassConfig := filepath.Join(spyglassConfigDir, "config.yaml")
if err := ioutil.WriteFile(spyglassConfig, []byte(tc.spyglassConfig), 0666); err != nil {
t.Fatalf("fail to write spyglass config: %v", err)
}
cfg, err := Load(spyglassConfig, "")
if (err != nil) != tc.expectError {
t.Fatalf("tc %s: expected error: %v, got: %v, error: %v", tc.name, tc.expectError, (err != nil), err)
}
if err != nil {
continue
}
got := cfg.Deck.Spyglass.Viewers
for re, viewNames := range got {
expected, ok := tc.expectedViewers[re]
if !ok {
t.Errorf("With re %s, got %s, was not found in expected.", re, viewNames)
continue
}
if !reflect.DeepEqual(expected, viewNames) {
t.Errorf("With re %s, got %s, expected view name %s", re, viewNames, expected)
}
}
for re, viewNames := range tc.expectedViewers {
gotNames, ok := got[re]
if !ok {
t.Errorf("With re %s, expected %s, was not found in got.", re, viewNames)
continue
}
if !reflect.DeepEqual(gotNames, viewNames) {
t.Errorf("With re %s, got %s, expected view name %s", re, gotNames, viewNames)
}
}
for expectedRegex, matches := range tc.expectedRegexMatches {
compiledRegex, ok := cfg.Deck.Spyglass.RegexCache[expectedRegex]
if !ok {
t.Errorf("tc %s, regex %s was not found in the spyglass regex cache", tc.name, expectedRegex)
continue
}
for _, match := range matches {
if !compiledRegex.MatchString(match) {
t.Errorf("tc %s expected compiled regex %s to match %s, did not match.", tc.name, expectedRegex, match)
}
}
}
if cfg.Deck.Spyglass.SizeLimit != tc.expectedSizeLimit {
t.Errorf("%s expected SizeLimit %d, got %d", tc.name, tc.expectedSizeLimit, cfg.Deck.Spyglass.SizeLimit)
}
}
}
func TestDecorationRawYaml(t *testing.T) {
var testCases = []struct {
name string
expectError bool
rawConfig string
expected *prowapi.DecorationConfig
}{
{
name: "no default",
expectError: true,
rawConfig: `
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
},
{
name: "with bad default",
rawConfig: `
plank:
default_decoration_configs:
'*':
timeout: 2h
grace_period: 15s
utility_images:
# clonerefs: "clonerefs:default"
initupload: "initupload:default"
entrypoint: "entrypoint:default"
sidecar: "sidecar:default"
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
gcs_credentials_secret: "default-service-account"
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
expectError: true,
},
{
name: "repo should inherit from default config",
rawConfig: `
plank:
default_decoration_configs:
'*':
timeout: 2h
grace_period: 15s
utility_images:
clonerefs: "clonerefs:default"
initupload: "initupload:default"
entrypoint: "entrypoint:default"
sidecar: "sidecar:default"
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
gcs_credentials_secret: "default-service-account"
'org/inherit':
timeout: 2h
grace_period: 15s
utility_images: {}
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
gcs_credentials_secret: "default-service-account"
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
},
{
name: "with default and repo, use default",
rawConfig: `
plank:
default_decoration_configs:
'*':
timeout: 2h
grace_period: 15s
utility_images:
clonerefs: "clonerefs:default"
initupload: "initupload:default"
entrypoint: "entrypoint:default"
sidecar: "sidecar:default"
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
gcs_credentials_secret: "default-service-account"
'random/repo':
timeout: 2h
grace_period: 15s
utility_images:
clonerefs: "clonerefs:random"
initupload: "initupload:random"
entrypoint: "entrypoint:random"
sidecar: "sidecar:org"
gcs_configuration:
bucket: "ignore"
path_strategy: "legacy"
default_org: "random"
default_repo: "repo"
gcs_credentials_secret: "random-service-account"
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
expected: &prowapi.DecorationConfig{
Timeout: &prowapi.Duration{Duration: 2 * time.Hour},
GracePeriod: &prowapi.Duration{Duration: 15 * time.Second},
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:default",
InitUpload: "initupload:default",
Entrypoint: "entrypoint:default",
Sidecar: "sidecar:default",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "default-bucket",
PathStrategy: prowapi.PathStrategyLegacy,
DefaultOrg: "kubernetes",
DefaultRepo: "kubernetes",
},
GCSCredentialsSecret: "default-service-account",
},
},
{
name: "with default, no explicit decorate",
rawConfig: `
plank:
default_decoration_configs:
'*':
timeout: 2h
grace_period: 15s
utility_images:
clonerefs: "clonerefs:default"
initupload: "initupload:default"
entrypoint: "entrypoint:default"
sidecar: "sidecar:default"
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
gcs_credentials_secret: "default-service-account"
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
expected: &prowapi.DecorationConfig{
Timeout: &prowapi.Duration{Duration: 2 * time.Hour},
GracePeriod: &prowapi.Duration{Duration: 15 * time.Second},
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:default",
InitUpload: "initupload:default",
Entrypoint: "entrypoint:default",
Sidecar: "sidecar:default",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "default-bucket",
PathStrategy: prowapi.PathStrategyLegacy,
DefaultOrg: "kubernetes",
DefaultRepo: "kubernetes",
},
GCSCredentialsSecret: "default-service-account",
},
},
{
name: "with default, has explicit decorate",
rawConfig: `
plank:
default_decoration_configs:
'*':
timeout: 2h
grace_period: 15s
utility_images:
clonerefs: "clonerefs:default"
initupload: "initupload:default"
entrypoint: "entrypoint:default"
sidecar: "sidecar:default"
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
gcs_credentials_secret: "default-service-account"
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
decoration_config:
timeout: 1
grace_period: 1
utility_images:
clonerefs: "clonerefs:explicit"
initupload: "initupload:explicit"
entrypoint: "entrypoint:explicit"
sidecar: "sidecar:explicit"
gcs_configuration:
bucket: "explicit-bucket"
path_strategy: "explicit"
gcs_credentials_secret: "explicit-service-account"
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
expected: &prowapi.DecorationConfig{
Timeout: &prowapi.Duration{Duration: 1 * time.Nanosecond},
GracePeriod: &prowapi.Duration{Duration: 1 * time.Nanosecond},
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:explicit",
InitUpload: "initupload:explicit",
Entrypoint: "entrypoint:explicit",
Sidecar: "sidecar:explicit",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "explicit-bucket",
PathStrategy: prowapi.PathStrategyExplicit,
DefaultOrg: "kubernetes",
DefaultRepo: "kubernetes",
},
GCSCredentialsSecret: "explicit-service-account",
},
},
{
name: "with default, configures bucket explicitly",
rawConfig: `
plank:
default_decoration_configs:
'*':
timeout: 2h
grace_period: 15s
utility_images:
clonerefs: "clonerefs:default"
initupload: "initupload:default"
entrypoint: "entrypoint:default"
sidecar: "sidecar:default"
gcs_configuration:
bucket: "default-bucket"
path_strategy: "legacy"
default_org: "kubernetes"
default_repo: "kubernetes"
mediaTypes:
log: text/plain
gcs_credentials_secret: "default-service-account"
periodics:
- name: kubernetes-defaulted-decoration
interval: 1h
decorate: true
decoration_config:
gcs_configuration:
bucket: "explicit-bucket"
gcs_credentials_secret: "explicit-service-account"
spec:
containers:
- image: golang:latest
args:
- "test"
- "./..."`,
expected: &prowapi.DecorationConfig{
Timeout: &prowapi.Duration{Duration: 2 * time.Hour},
GracePeriod: &prowapi.Duration{Duration: 15 * time.Second},
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:default",
InitUpload: "initupload:default",
Entrypoint: "entrypoint:default",
Sidecar: "sidecar:default",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "explicit-bucket",
PathStrategy: prowapi.PathStrategyLegacy,
DefaultOrg: "kubernetes",
DefaultRepo: "kubernetes",
MediaTypes: map[string]string{"log": "text/plain"},
},
GCSCredentialsSecret: "explicit-service-account",
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// save the config
prowConfigDir, err := ioutil.TempDir("", "prowConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(prowConfigDir)
prowConfig := filepath.Join(prowConfigDir, "config.yaml")
if err := ioutil.WriteFile(prowConfig, []byte(tc.rawConfig), 0666); err != nil {
t.Fatalf("fail to write prow config: %v", err)
}
cfg, err := Load(prowConfig, "")
if tc.expectError && err == nil {
t.Errorf("tc %s: Expect error, but got nil", tc.name)
} else if !tc.expectError && err != nil {
t.Fatalf("tc %s: Expect no error, but got error %v", tc.name, err)
}
if tc.expected != nil {
if len(cfg.Periodics) != 1 {
t.Fatalf("tc %s: Expect to have one periodic job, got none", tc.name)
}
if diff := cmp.Diff(cfg.Periodics[0].DecorationConfig, tc.expected, cmpopts.EquateEmpty()); diff != "" {
t.Errorf("got diff: %s", diff)
}
}
})
}
}
func TestValidateAgent(t *testing.T) {
jenk := string(prowjobv1.JenkinsAgent)
k := string(prowjobv1.KubernetesAgent)
ns := "default"
base := JobBase{
Agent: k,
Namespace: &ns,
Spec: &v1.PodSpec{},
UtilityConfig: UtilityConfig{
DecorationConfig: &prowapi.DecorationConfig{},
},
}
cases := []struct {
name string
base func(j *JobBase)
pass bool
}{
{
name: "accept unknown agent",
base: func(j *JobBase) {
j.Agent = "random-agent"
},
pass: true,
},
{
name: "kubernetes agent requires spec",
base: func(j *JobBase) {
j.Spec = nil
},
},
{
name: "non-nil namespace required",
base: func(j *JobBase) {
j.Namespace = nil
},
},
{
name: "filled namespace required",
base: func(j *JobBase) {
var s string
j.Namespace = &s
},
},
{
name: "custom namespace requires knative-build agent",
base: func(j *JobBase) {
s := "custom-namespace"
j.Namespace = &s
},
},
{
name: "accept kubernetes agent",
pass: true,
},
{
name: "accept kubernetes agent without decoration",
base: func(j *JobBase) {
j.DecorationConfig = nil
},
pass: true,
},
{
name: "accept jenkins agent",
base: func(j *JobBase) {
j.Agent = jenk
j.Spec = nil
j.DecorationConfig = nil
},
pass: true,
},
{
name: "error_on_eviction allowed for kubernetes agent",
base: func(j *JobBase) {
j.ErrorOnEviction = true
},
pass: true,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
jb := base
if tc.base != nil {
tc.base(&jb)
}
switch err := validateAgent(jb, ns); {
case err == nil && !tc.pass:
t.Error("validation failed to raise an error")
case err != nil && tc.pass:
t.Errorf("validation should have passed, got: %v", err)
}
})
}
}
func TestValidatePodSpec(t *testing.T) {
periodEnv := sets.NewString(downwardapi.EnvForType(prowapi.PeriodicJob)...)
postEnv := sets.NewString(downwardapi.EnvForType(prowapi.PostsubmitJob)...)
preEnv := sets.NewString(downwardapi.EnvForType(prowapi.PresubmitJob)...)
cases := []struct {
name string
jobType prowapi.ProwJobType
spec func(s *v1.PodSpec)
noSpec bool
pass bool
}{
{
name: "allow nil spec",
noSpec: true,
pass: true,
},
{
name: "happy case",
pass: true,
},
{
name: "reject init containers",
spec: func(s *v1.PodSpec) {
s.InitContainers = []v1.Container{
{},
}
},
},
{
name: "reject 0 containers",
spec: func(s *v1.PodSpec) {
s.Containers = nil
},
},
{
name: "reject 2 containers",
spec: func(s *v1.PodSpec) {
s.Containers = append(s.Containers, v1.Container{})
},
},
{
name: "reject reserved presubmit env",
jobType: prowapi.PresubmitJob,
spec: func(s *v1.PodSpec) {
// find a presubmit value
for n := range preEnv.Difference(postEnv).Difference(periodEnv) {
s.Containers[0].Env = append(s.Containers[0].Env, v1.EnvVar{Name: n, Value: "whatever"})
}
if len(s.Containers[0].Env) == 0 {
t.Fatal("empty env")
}
},
},
{
name: "reject reserved postsubmit env",
jobType: prowapi.PostsubmitJob,
spec: func(s *v1.PodSpec) {
// find a postsubmit value
for n := range postEnv.Difference(periodEnv) {
s.Containers[0].Env = append(s.Containers[0].Env, v1.EnvVar{Name: n, Value: "whatever"})
}
if len(s.Containers[0].Env) == 0 {
t.Fatal("empty env")
}
},
},
{
name: "reject reserved periodic env",
jobType: prowapi.PeriodicJob,
spec: func(s *v1.PodSpec) {
// find a postsubmit value
for n := range periodEnv {
s.Containers[0].Env = append(s.Containers[0].Env, v1.EnvVar{Name: n, Value: "whatever"})
}
if len(s.Containers[0].Env) == 0 {
t.Fatal("empty env")
}
},
},
{
name: "reject reserved mount name",
spec: func(s *v1.PodSpec) {
s.Containers[0].VolumeMounts = append(s.Containers[0].VolumeMounts, v1.VolumeMount{
Name: decorate.VolumeMounts()[0],
MountPath: "/whatever",
})
},
},
{
name: "reject reserved mount path",
spec: func(s *v1.PodSpec) {
s.Containers[0].VolumeMounts = append(s.Containers[0].VolumeMounts, v1.VolumeMount{
Name: "fun",
MountPath: decorate.VolumeMountPaths()[0],
})
},
},
{
name: "accept conflicting mount path parent",
spec: func(s *v1.PodSpec) {
s.Containers[0].VolumeMounts = append(s.Containers[0].VolumeMounts, v1.VolumeMount{
Name: "foo",
MountPath: filepath.Dir(decorate.VolumeMountPaths()[0]),
})
},
},
{
name: "accept conflicting mount path child",
spec: func(s *v1.PodSpec) {
s.Containers[0].VolumeMounts = append(s.Containers[0].VolumeMounts, v1.VolumeMount{
Name: "foo",
MountPath: filepath.Join(decorate.VolumeMountPaths()[0], "extra"),
})
},
},
{
name: "reject reserved volume",
spec: func(s *v1.PodSpec) {
s.Volumes = append(s.Volumes, v1.Volume{Name: decorate.VolumeMounts()[0]})
},
},
{
name: "reject duplicate env",
spec: func(s *v1.PodSpec) {
s.Containers[0].Env = append(s.Containers[0].Env, v1.EnvVar{Name: "foo", Value: "bar"})
s.Containers[0].Env = append(s.Containers[0].Env, v1.EnvVar{Name: "foo", Value: "baz"})
},
},
{
name: "reject duplicate volume",
spec: func(s *v1.PodSpec) {
s.Volumes = append(s.Volumes, v1.Volume{Name: "foo"})
s.Volumes = append(s.Volumes, v1.Volume{Name: "foo"})
},
},
{
name: "reject undefined volume reference",
spec: func(s *v1.PodSpec) {
s.Containers[0].VolumeMounts = append(s.Containers[0].VolumeMounts, v1.VolumeMount{Name: "foo", MountPath: "/not-used-by-decoration-utils"})
},
},
}
spec := v1.PodSpec{
Containers: []v1.Container{
{},
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
jt := prowapi.PresubmitJob
if tc.jobType != "" {
jt = tc.jobType
}
current := spec.DeepCopy()
if tc.noSpec {
current = nil
} else if tc.spec != nil {
tc.spec(current)
}
switch err := validatePodSpec(jt, current); {
case err == nil && !tc.pass:
t.Error("validation failed to raise an error")
case err != nil && tc.pass:
t.Errorf("validation should have passed, got: %v", err)
}
})
}
}
func TestValidatePipelineRunSpec(t *testing.T) {
cases := []struct {
name string
jobType prowapi.ProwJobType
spec func(s *pipelinev1alpha1.PipelineRunSpec)
extraRefs []prowapi.Refs
noSpec bool
pass bool
}{
{
name: "allow nil spec",
noSpec: true,
pass: true,
},
{
name: "happy case",
pass: true,
},
{
name: "reject implicit ref for periodic",
jobType: prowapi.PeriodicJob,
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_IMPLICIT_GIT_REF"},
})
},
pass: false,
},
{
name: "allow implicit ref for presubmit",
jobType: prowapi.PresubmitJob,
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_IMPLICIT_GIT_REF"},
})
},
pass: true,
},
{
name: "allow implicit ref for postsubmit",
jobType: prowapi.PostsubmitJob,
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_IMPLICIT_GIT_REF"},
})
},
pass: true,
},
{
name: "reject extra refs usage with no extra refs",
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_EXTRA_GIT_REF_0"},
})
},
pass: false,
},
{
name: "allow extra refs usage with extra refs",
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_EXTRA_GIT_REF_0"},
})
},
extraRefs: []prowapi.Refs{{Org: "o", Repo: "r"}},
pass: true,
},
{
name: "reject wrong extra refs index usage",
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_EXTRA_GIT_REF_1"},
})
},
extraRefs: []prowapi.Refs{{Org: "o", Repo: "r"}},
pass: false,
},
{
name: "reject extra refs without usage",
extraRefs: []prowapi.Refs{{Org: "o", Repo: "r"}},
pass: false,
},
{
name: "allow unrelated resource refs",
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "some-other-ref"},
})
},
pass: true,
},
{
name: "reject leading zeros when extra ref usage is otherwise valid",
spec: func(s *pipelinev1alpha1.PipelineRunSpec) {
s.Resources = append(s.Resources, pipelinev1alpha1.PipelineResourceBinding{
Name: "git ref",
ResourceRef: &pipelinev1alpha1.PipelineResourceRef{Name: "PROW_EXTRA_GIT_REF_000"},
})
},
extraRefs: []prowapi.Refs{{Org: "o", Repo: "r"}},
pass: false,
},
}
spec := pipelinev1alpha1.PipelineRunSpec{}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
jt := prowapi.PresubmitJob
if tc.jobType != "" {
jt = tc.jobType
}
current := spec.DeepCopy()
if tc.noSpec {
current = nil
} else if tc.spec != nil {
tc.spec(current)
}
switch err := ValidatePipelineRunSpec(jt, tc.extraRefs, current); {
case err == nil && !tc.pass:
t.Error("validation failed to raise an error")
case err != nil && tc.pass:
t.Errorf("validation should have passed, got: %v", err)
}
})
}
}
func TestValidateDecoration(t *testing.T) {
defCfg := prowapi.DecorationConfig{
UtilityImages: &prowjobv1.UtilityImages{
CloneRefs: "clone-me",
InitUpload: "upload-me",
Entrypoint: "enter-me",
Sidecar: "official-drink-of-the-org",
},
GCSCredentialsSecret: "upload-secret",
GCSConfiguration: &prowjobv1.GCSConfiguration{
PathStrategy: prowjobv1.PathStrategyExplicit,
DefaultOrg: "so-org",
DefaultRepo: "very-repo",
},
}
cases := []struct {
name string
container v1.Container
config *prowapi.DecorationConfig
pass bool
}{
{
name: "allow no decoration",
pass: true,
},
{
name: "happy case with cmd",
config: &defCfg,
container: v1.Container{
Command: []string{"hello", "world"},
},
pass: true,
},
{
name: "happy case with args",
config: &defCfg,
container: v1.Container{
Args: []string{"hello", "world"},
},
pass: true,
},
{
name: "reject invalid decoration config",
config: &prowapi.DecorationConfig{},
container: v1.Container{
Command: []string{"hello", "world"},
},
},
{
name: "reject container that has no cmd, no args",
config: &defCfg,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
switch err := validateDecoration(tc.container, tc.config); {
case err == nil && !tc.pass:
t.Error("validation failed to raise an error")
case err != nil && tc.pass:
t.Errorf("validation should have passed, got: %v", err)
}
})
}
}
func TestValidateLabels(t *testing.T) {
cases := []struct {
name string
labels map[string]string
pass bool
}{
{
name: "happy case",
pass: true,
},
{
name: "reject reserved label",
labels: map[string]string{
decorate.Labels()[0]: "anything",
},
},
{
name: "reject bad label key",
labels: map[string]string{
"_underscore-prefix": "annoying",
},
},
{
name: "reject bad label value",
labels: map[string]string{
"whatever": "_private-is-rejected",
},
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
switch err := validateLabels(tc.labels); {
case err == nil && !tc.pass:
t.Error("validation failed to raise an error")
case err != nil && tc.pass:
t.Errorf("validation should have passed, got: %v", err)
}
})
}
}
func TestValidateJobBase(t *testing.T) {
ka := string(prowjobv1.KubernetesAgent)
ja := string(prowjobv1.JenkinsAgent)
goodSpec := v1.PodSpec{
Containers: []v1.Container{
{},
},
}
ns := "target-namespace"
cases := []struct {
name string
base JobBase
pass bool
}{
{
name: "valid kubernetes job",
base: JobBase{
Name: "name",
Agent: ka,
Spec: &goodSpec,
Namespace: &ns,
},
pass: true,
},
{
name: "valid jenkins job",
base: JobBase{
Name: "name",
Agent: ja,
Namespace: &ns,
},
pass: true,
},
{
name: "invalid concurrency",
base: JobBase{
Name: "name",
MaxConcurrency: -1,
Agent: ka,
Spec: &goodSpec,
Namespace: &ns,
},
},
{
name: "invalid pod spec",
base: JobBase{
Name: "name",
Agent: ka,
Namespace: &ns,
Spec: &v1.PodSpec{}, // no containers
},
},
{
name: "invalid decoration",
base: JobBase{
Name: "name",
Agent: ka,
Spec: &goodSpec,
UtilityConfig: UtilityConfig{
DecorationConfig: &prowjobv1.DecorationConfig{}, // missing many fields
},
Namespace: &ns,
},
},
{
name: "invalid labels",
base: JobBase{
Name: "name",
Agent: ka,
Spec: &goodSpec,
Labels: map[string]string{
"_leading_underscore": "_rejected",
},
Namespace: &ns,
},
},
{
name: "invalid name",
base: JobBase{
Name: "a/b",
Agent: ka,
Spec: &goodSpec,
Namespace: &ns,
},
pass: false,
},
{
name: "valid complex name",
base: JobBase{
Name: "a-b.c",
Agent: ka,
Spec: &goodSpec,
Namespace: &ns,
},
pass: true,
},
{
name: "invalid rerun_permissions",
base: JobBase{
RerunAuthConfig: &prowapi.RerunAuthConfig{
AllowAnyone: true,
GitHubUsers: []string{"user"},
},
},
pass: false,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
switch err := validateJobBase(tc.base, prowjobv1.PresubmitJob, ns); {
case err == nil && !tc.pass:
t.Error("validation failed to raise an error")
case err != nil && tc.pass:
t.Errorf("validation should have passed, got: %v", err)
}
})
}
}
func TestValidateRefs(t *testing.T) {
cases := []struct {
name string
extraRefs []prowapi.Refs
expected error
}{
{
name: "validation error for extra ref specifying the same repo for which the job is configured",
extraRefs: []prowapi.Refs{
{
Org: "org",
Repo: "repo",
},
},
expected: fmt.Errorf("Invalid job test on repo org/repo: the following refs specified more than once: %s",
"org/repo"),
},
{
name: "validation error lists all duplications",
extraRefs: []prowapi.Refs{
{
Org: "org",
Repo: "repo",
},
{
Org: "org",
Repo: "foo",
},
{
Org: "org",
Repo: "bar",
},
{
Org: "org",
Repo: "foo",
},
},
expected: fmt.Errorf("Invalid job test on repo org/repo: the following refs specified more than once: %s",
"org/foo,org/repo"),
},
{
name: "no errors if there are no duplications",
extraRefs: []prowapi.Refs{
{
Org: "org",
Repo: "foo",
},
},
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
job := JobBase{
Name: "test",
UtilityConfig: UtilityConfig{
ExtraRefs: tc.extraRefs,
},
}
if err := ValidateRefs("org/repo", job); !reflect.DeepEqual(err, tc.expected) {
t.Errorf("expected %#v\n!=\nactual %#v", tc.expected, err)
}
})
}
}
func TestValidateReportingWithGerritLabel(t *testing.T) {
cases := []struct {
name string
labels map[string]string
reporter Reporter
expected error
}{
{
name: "no errors if job is set to report",
reporter: Reporter{
Context: "context",
},
labels: map[string]string{
gerrit.GerritReportLabel: "label",
},
},
{
name: "no errors if Gerrit report label is not defined",
reporter: Reporter{SkipReport: true},
labels: map[string]string{
"label": "value",
},
},
{
name: "no errors if job is set to skip report and Gerrit report label is empty",
reporter: Reporter{SkipReport: true},
labels: map[string]string{
gerrit.GerritReportLabel: "",
},
},
{
name: "error if job is set to skip report and Gerrit report label is set to non-empty",
reporter: Reporter{SkipReport: true},
labels: map[string]string{
gerrit.GerritReportLabel: "label",
},
expected: fmt.Errorf("Gerrit report label %s set to non-empty string but job is configured to skip reporting.", gerrit.GerritReportLabel),
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
base := JobBase{
Name: "test-job",
Labels: tc.labels,
}
presubmits := []Presubmit{
{
JobBase: base,
Reporter: tc.reporter,
},
}
var expected error
if tc.expected != nil {
expected = fmt.Errorf("invalid presubmit job %s: %v", "test-job", tc.expected)
}
if err := validatePresubmits(presubmits, "default-namespace"); !reflect.DeepEqual(err, utilerrors.NewAggregate([]error{expected})) {
t.Errorf("did not get expected validation result:\n%v", cmp.Diff(expected, err))
}
postsubmits := []Postsubmit{
{
JobBase: base,
Reporter: tc.reporter,
},
}
if tc.expected != nil {
expected = fmt.Errorf("invalid postsubmit job %s: %v", "test-job", tc.expected)
}
if err := validatePostsubmits(postsubmits, "default-namespace"); !reflect.DeepEqual(err, utilerrors.NewAggregate([]error{expected})) {
t.Errorf("did not get expected validation result:\n%v", cmp.Diff(expected, err))
}
})
}
}
// integration test for fake config loading
func TestValidConfigLoading(t *testing.T) {
var testCases = []struct {
name string
prowConfig string
jobConfigs []string
expectError bool
expectPodNameSpace string
expectEnv map[string][]v1.EnvVar
verify func(*Config) error
}{
{
name: "one config",
prowConfig: ``,
},
{
name: "reject invalid kubernetes periodic",
prowConfig: ``,
jobConfigs: []string{
`
periodics:
- interval: 10m
agent: kubernetes
build_spec:
name: foo`,
},
expectError: true,
},
{
name: "one periodic",
prowConfig: ``,
jobConfigs: []string{
`
periodics:
- interval: 10m
agent: kubernetes
name: foo
spec:
containers:
- image: alpine`,
},
},
{
name: "one periodic no agent, should default",
prowConfig: ``,
jobConfigs: []string{
`
periodics:
- interval: 10m
name: foo
spec:
containers:
- image: alpine`,
},
},
{
name: "two periodics",
prowConfig: ``,
jobConfigs: []string{
`
periodics:
- interval: 10m
agent: kubernetes
name: foo
spec:
containers:
- image: alpine`,
`
periodics:
- interval: 10m
agent: kubernetes
name: bar
spec:
containers:
- image: alpine`,
},
},
{
name: "duplicated periodics",
prowConfig: ``,
jobConfigs: []string{
`
periodics:
- interval: 10m
agent: kubernetes
name: foo
spec:
containers:
- image: alpine`,
`
periodics:
- interval: 10m
agent: kubernetes
name: foo
spec:
containers:
- image: alpine`,
},
expectError: true,
},
{
name: "one presubmit no context should default",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
spec:
containers:
- image: alpine`,
},
},
{
name: "one presubmit no agent should default",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- context: bar
name: presubmit-bar
spec:
containers:
- image: alpine`,
},
},
{
name: "one presubmit, ok",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
spec:
containers:
- image: alpine`,
},
},
{
name: "two presubmits",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
spec:
containers:
- image: alpine`,
`
presubmits:
foo/baz:
- agent: kubernetes
name: presubmit-baz
context: baz
spec:
containers:
- image: alpine`,
},
},
{
name: "dup presubmits, one file",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
spec:
containers:
- image: alpine
- agent: kubernetes
name: presubmit-bar
context: bar
spec:
containers:
- image: alpine`,
},
expectError: true,
},
{
name: "dup presubmits, two files",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
spec:
containers:
- image: alpine`,
`
presubmits:
foo/bar:
- agent: kubernetes
context: bar
name: presubmit-bar
spec:
containers:
- image: alpine`,
},
expectError: true,
},
{
name: "dup presubmits not the same branch, two files",
prowConfig: ``,
jobConfigs: []string{
`
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
branches:
- master
spec:
containers:
- image: alpine`,
`
presubmits:
foo/bar:
- agent: kubernetes
context: bar
branches:
- other
name: presubmit-bar
spec:
containers:
- image: alpine`,
},
expectError: false,
},
{
name: "dup presubmits main file",
prowConfig: `
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
spec:
containers:
- image: alpine
- agent: kubernetes
context: bar
name: presubmit-bar
spec:
containers:
- image: alpine`,
expectError: true,
},
{
name: "dup presubmits main file not on the same branch",
prowConfig: `
presubmits:
foo/bar:
- agent: kubernetes
name: presubmit-bar
context: bar
branches:
- other
spec:
containers:
- image: alpine
- agent: kubernetes
context: bar
branches:
- master
name: presubmit-bar
spec:
containers:
- image: alpine`,
expectError: false,
},
{
name: "one postsubmit, ok",
prowConfig: ``,
jobConfigs: []string{
`
postsubmits:
foo/bar:
- agent: kubernetes
name: postsubmit-bar
spec:
containers:
- image: alpine`,
},
},
{
name: "one postsubmit no agent, should default",
prowConfig: ``,
jobConfigs: []string{
`
postsubmits:
foo/bar:
- name: postsubmit-bar
spec:
containers:
- image: alpine`,
},
},
{
name: "two postsubmits",
prowConfig: ``,
jobConfigs: []string{
`
postsubmits:
foo/bar:
- agent: kubernetes
name: postsubmit-bar
spec:
containers:
- image: alpine`,
`
postsubmits:
foo/baz:
- agent: kubernetes
name: postsubmit-baz
spec:
containers:
- image: alpine`,
},
},
{
name: "dup postsubmits, one file",
prowConfig: ``,
jobConfigs: []string{
`
postsubmits:
foo/bar:
- agent: kubernetes
name: postsubmit-bar
spec:
containers:
- image: alpine
- agent: kubernetes
name: postsubmit-bar
spec:
containers:
- image: alpine`,
},
expectError: true,
},
{
name: "dup postsubmits, two files",
prowConfig: ``,
jobConfigs: []string{
`
postsubmits:
foo/bar:
- agent: kubernetes
name: postsubmit-bar
spec:
containers:
- image: alpine`,
`
postsubmits:
foo/bar:
- agent: kubernetes
name: postsubmit-bar
spec:
containers:
- image: alpine`,
},
expectError: true,
},
{
name: "test valid presets in main config",
prowConfig: `
presets:
- labels:
preset-baz: "true"
env:
- name: baz
value: fejtaverse`,
jobConfigs: []string{
`periodics:
- interval: 10m
agent: kubernetes
name: foo
labels:
preset-baz: "true"
spec:
containers:
- image: alpine`,
`
periodics:
- interval: 10m
agent: kubernetes
name: bar
labels:
preset-baz: "true"
spec:
containers:
- image: alpine`,
},
expectEnv: map[string][]v1.EnvVar{
"foo": {
{
Name: "baz",
Value: "fejtaverse",
},
},
"bar": {
{
Name: "baz",
Value: "fejtaverse",
},
},
},
},
{
name: "test valid presets in job configs",
prowConfig: ``,
jobConfigs: []string{
`
presets:
- labels:
preset-baz: "true"
env:
- name: baz
value: fejtaverse
periodics:
- interval: 10m
agent: kubernetes
name: foo
labels:
preset-baz: "true"
spec:
containers:
- image: alpine`,
`
periodics:
- interval: 10m
agent: kubernetes
name: bar
labels:
preset-baz: "true"
spec:
containers:
- image: alpine`,
},
expectEnv: map[string][]v1.EnvVar{
"foo": {
{
Name: "baz",
Value: "fejtaverse",
},
},
"bar": {
{
Name: "baz",
Value: "fejtaverse",
},
},
},
},
{
name: "test valid presets in both main & job configs",
prowConfig: `
presets:
- labels:
preset-baz: "true"
env:
- name: baz
value: fejtaverse`,
jobConfigs: []string{
`
presets:
- labels:
preset-k8s: "true"
env:
- name: k8s
value: kubernetes
periodics:
- interval: 10m
agent: kubernetes
name: foo
labels:
preset-baz: "true"
preset-k8s: "true"
spec:
containers:
- image: alpine`,
`
periodics:
- interval: 10m
agent: kubernetes
name: bar
labels:
preset-baz: "true"
spec:
containers:
- image: alpine`,
},
expectEnv: map[string][]v1.EnvVar{
"foo": {
{
Name: "baz",
Value: "fejtaverse",
},
{
Name: "k8s",
Value: "kubernetes",
},
},
"bar": {
{
Name: "baz",
Value: "fejtaverse",
},
},
},
},
{
name: "decorated periodic missing `command`",
prowConfig: ``,
jobConfigs: []string{
`
periodics:
- interval: 10m
agent: kubernetes
name: foo
decorate: true
spec:
containers:
- image: alpine`,
},
expectError: true,
},
{
name: "all repos contains repos from tide, presubmits and postsubmits",
prowConfig: `
tide:
queries:
- repos:
- stranded/fish`,
jobConfigs: []string{`
presubmits:
k/k:
- name: my-job
spec:
containers:
- name: lost-vessel
image: vessel:latest
command: ["ride"]`,
`
postsubmits:
k/test-infra:
- name: my-job
spec:
containers:
- name: lost-vessel
image: vessel:latest
command: ["ride"]`,
},
verify: func(c *Config) error {
if diff := c.AllRepos.Difference(sets.NewString("k/k", "k/test-infra", "stranded/fish")); len(diff) != 0 {
return fmt.Errorf("expected no diff, got %q", diff)
}
return nil
},
},
{
name: "no jobs doesn't make AllRepos a nilpointer",
verify: func(c *Config) error {
if c.AllRepos == nil {
return errors.New("config.AllRepos is nil")
}
return nil
},
},
{
name: "prowYAMLGetter gets set",
verify: func(c *Config) error {
if c.ProwYAMLGetter == nil {
return errors.New("config.ProwYAMLGetter is nil")
}
return nil
},
},
{
name: "InRepoConfigAllowedClusters gets defaulted if unset",
verify: func(c *Config) error {
if len(c.InRepoConfig.AllowedClusters) != 1 ||
len(c.InRepoConfig.AllowedClusters["*"]) != 1 ||
c.InRepoConfig.AllowedClusters["*"][0] != kube.DefaultClusterAlias {
return fmt.Errorf("expected c.InRepoConfig.AllowedClusters to contain exactly one global entry to allow the buildcluster, was %v", c.InRepoConfig.AllowedClusters)
}
return nil
},
},
{
name: "InRepoConfigAllowedClusters gets defaulted if no global setting",
prowConfig: `
in_repo_config:
allowed_clusters:
foo/bar: ["my-cluster"]
`,
verify: func(c *Config) error {
if len(c.InRepoConfig.AllowedClusters) != 2 ||
len(c.InRepoConfig.AllowedClusters["*"]) != 1 ||
c.InRepoConfig.AllowedClusters["*"][0] != kube.DefaultClusterAlias {
return fmt.Errorf("expected c.InRepoConfig.AllowedClusters to contain exactly one global entry to allow the buildcluster, was %v", c.InRepoConfig.AllowedClusters)
}
return nil
},
},
{
name: "InRepoConfigAllowedClusters doesn't get overwritten",
prowConfig: `
in_repo_config:
allowed_clusters:
foo/bar: ["my-cluster"]
`,
verify: func(c *Config) error {
if len(c.InRepoConfig.AllowedClusters) != 2 ||
len(c.InRepoConfig.AllowedClusters["foo/bar"]) != 1 ||
c.InRepoConfig.AllowedClusters["foo/bar"][0] != "my-cluster" {
return fmt.Errorf("expected c.InRepoConfig.AllowedClusters to contain exactly one entry for foo/bar, was %v", c.InRepoConfig.AllowedClusters)
}
return nil
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// save the config
prowConfigDir, err := ioutil.TempDir("", "prowConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(prowConfigDir)
prowConfig := filepath.Join(prowConfigDir, "config.yaml")
if err := ioutil.WriteFile(prowConfig, []byte(tc.prowConfig), 0666); err != nil {
t.Fatalf("fail to write prow config: %v", err)
}
jobConfig := ""
if len(tc.jobConfigs) > 0 {
jobConfigDir, err := ioutil.TempDir("", "jobConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(jobConfigDir)
// cover both job config as a file & a dir
if len(tc.jobConfigs) == 1 {
// a single file
jobConfig = filepath.Join(jobConfigDir, "config.yaml")
if err := ioutil.WriteFile(jobConfig, []byte(tc.jobConfigs[0]), 0666); err != nil {
t.Fatalf("fail to write job config: %v", err)
}
} else {
// a dir
jobConfig = jobConfigDir
for idx, config := range tc.jobConfigs {
subConfig := filepath.Join(jobConfigDir, fmt.Sprintf("config_%d.yaml", idx))
if err := ioutil.WriteFile(subConfig, []byte(config), 0666); err != nil {
t.Fatalf("fail to write job config: %v", err)
}
}
}
}
cfg, err := Load(prowConfig, jobConfig)
if tc.expectError && err == nil {
t.Errorf("tc %s: Expect error, but got nil", tc.name)
} else if !tc.expectError && err != nil {
t.Errorf("tc %s: Expect no error, but got error %v", tc.name, err)
}
if err == nil {
if tc.expectPodNameSpace == "" {
tc.expectPodNameSpace = "default"
}
if cfg.PodNamespace != tc.expectPodNameSpace {
t.Errorf("tc %s: Expect PodNamespace %s, but got %v", tc.name, tc.expectPodNameSpace, cfg.PodNamespace)
}
if len(tc.expectEnv) > 0 |
}
if tc.verify != nil {
if err := tc.verify(cfg); err != nil {
t.Fatalf("verify failed: %v", err)
}
}
})
}
}
func TestBrancher_Intersects(t *testing.T) {
testCases := []struct {
name string
a, b Brancher
result bool
}{
{
name: "TwodifferentBranches",
a: Brancher{
Branches: []string{"a"},
},
b: Brancher{
Branches: []string{"b"},
},
},
{
name: "Opposite",
a: Brancher{
SkipBranches: []string{"b"},
},
b: Brancher{
Branches: []string{"b"},
},
},
{
name: "BothRunOnAllBranches",
a: Brancher{},
b: Brancher{},
result: true,
},
{
name: "RunsOnAllBranchesAndSpecified",
a: Brancher{},
b: Brancher{
Branches: []string{"b"},
},
result: true,
},
{
name: "SkipBranchesAndSet",
a: Brancher{
SkipBranches: []string{"a", "b", "c"},
},
b: Brancher{
Branches: []string{"a"},
},
},
{
name: "SkipBranchesAndSet",
a: Brancher{
Branches: []string{"c"},
},
b: Brancher{
Branches: []string{"a"},
},
},
{
name: "BothSkipBranches",
a: Brancher{
SkipBranches: []string{"a", "b", "c"},
},
b: Brancher{
SkipBranches: []string{"d", "e", "f"},
},
result: true,
},
{
name: "BothSkipCommonBranches",
a: Brancher{
SkipBranches: []string{"a", "b", "c"},
},
b: Brancher{
SkipBranches: []string{"b", "e", "f"},
},
result: true,
},
{
name: "NoIntersectionBecauseRegexSkip",
a: Brancher{
SkipBranches: []string{`release-\d+\.\d+`},
},
b: Brancher{
Branches: []string{`release-1.14`, `release-1.13`},
},
result: false,
},
{
name: "IntersectionDespiteRegexSkip",
a: Brancher{
SkipBranches: []string{`release-\d+\.\d+`},
},
b: Brancher{
Branches: []string{`release-1.14`, `master`},
},
result: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(st *testing.T) {
a, err := setBrancherRegexes(tc.a)
if err != nil {
st.Fatalf("Failed to set brancher A regexes: %v", err)
}
b, err := setBrancherRegexes(tc.b)
if err != nil {
st.Fatalf("Failed to set brancher B regexes: %v", err)
}
r1 := a.Intersects(b)
r2 := b.Intersects(a)
for _, result := range []bool{r1, r2} {
if result != tc.result {
st.Errorf("Expected %v got %v", tc.result, result)
}
}
})
}
}
// Integration test for fake secrets loading in a secret agent.
// Checking also if the agent changes the secret's values as expected.
func TestSecretAgentLoading(t *testing.T) {
tempTokenValue := "121f3cb3e7f70feeb35f9204f5a988d7292c7ba1"
changedTokenValue := "121f3cb3e7f70feeb35f9204f5a988d7292c7ba0"
// Creating a temporary directory.
secretDir, err := ioutil.TempDir("", "secretDir")
if err != nil {
t.Fatalf("fail to create a temporary directory: %v", err)
}
defer os.RemoveAll(secretDir)
// Create the first temporary secret.
firstTempSecret := filepath.Join(secretDir, "firstTempSecret")
if err := ioutil.WriteFile(firstTempSecret, []byte(tempTokenValue), 0666); err != nil {
t.Fatalf("fail to write secret: %v", err)
}
// Create the second temporary secret.
secondTempSecret := filepath.Join(secretDir, "secondTempSecret")
if err := ioutil.WriteFile(secondTempSecret, []byte(tempTokenValue), 0666); err != nil {
t.Fatalf("fail to write secret: %v", err)
}
tempSecrets := []string{firstTempSecret, secondTempSecret}
// Starting the agent and add the two temporary secrets.
secretAgent := &secret.Agent{}
if err := secretAgent.Start(tempSecrets); err != nil {
t.Fatalf("Error starting secrets agent. %v", err)
}
// Check if the values are as expected.
for _, tempSecret := range tempSecrets {
tempSecretValue := secretAgent.GetSecret(tempSecret)
if string(tempSecretValue) != tempTokenValue {
t.Fatalf("In secret %s it was expected %s but found %s",
tempSecret, tempTokenValue, tempSecretValue)
}
}
// Change the values of the files.
if err := ioutil.WriteFile(firstTempSecret, []byte(changedTokenValue), 0666); err != nil {
t.Fatalf("fail to write secret: %v", err)
}
if err := ioutil.WriteFile(secondTempSecret, []byte(changedTokenValue), 0666); err != nil {
t.Fatalf("fail to write secret: %v", err)
}
retries := 10
var errors []string
// Check if the values changed as expected.
for _, tempSecret := range tempSecrets {
// Reset counter
counter := 0
for counter <= retries {
tempSecretValue := secretAgent.GetSecret(tempSecret)
if string(tempSecretValue) != changedTokenValue {
if counter == retries {
errors = append(errors, fmt.Sprintf("In secret %s it was expected %s but found %s\n",
tempSecret, changedTokenValue, tempSecretValue))
} else {
// Secret agent needs some time to update the values. So wait and retry.
time.Sleep(400 * time.Millisecond)
}
} else {
break
}
counter++
}
}
if len(errors) > 0 {
t.Fatal(errors)
}
}
func TestValidGitHubReportType(t *testing.T) {
var testCases = []struct {
name string
prowConfig string
expectError bool
expectTypes []prowapi.ProwJobType
}{
{
name: "empty config should default to report for both presubmit and postsubmit",
prowConfig: ``,
expectTypes: []prowapi.ProwJobType{prowapi.PresubmitJob, prowapi.PostsubmitJob},
},
{
name: "reject unsupported job types",
prowConfig: `
github_reporter:
job_types_to_report:
- presubmit
- batch
`,
expectError: true,
},
{
name: "accept valid job types",
prowConfig: `
github_reporter:
job_types_to_report:
- presubmit
- postsubmit
`,
expectTypes: []prowapi.ProwJobType{prowapi.PresubmitJob, prowapi.PostsubmitJob},
},
}
for _, tc := range testCases {
// save the config
prowConfigDir, err := ioutil.TempDir("", "prowConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(prowConfigDir)
prowConfig := filepath.Join(prowConfigDir, "config.yaml")
if err := ioutil.WriteFile(prowConfig, []byte(tc.prowConfig), 0666); err != nil {
t.Fatalf("fail to write prow config: %v", err)
}
cfg, err := Load(prowConfig, "")
if tc.expectError && err == nil {
t.Errorf("tc %s: Expect error, but got nil", tc.name)
} else if !tc.expectError && err != nil {
t.Errorf("tc %s: Expect no error, but got error %v", tc.name, err)
}
if err == nil {
if !reflect.DeepEqual(cfg.GitHubReporter.JobTypesToReport, tc.expectTypes) {
t.Errorf("tc %s: expected %#v\n!=\nactual %#v", tc.name, tc.expectTypes, cfg.GitHubReporter.JobTypesToReport)
}
}
}
}
func TestValidRerunAuthConfig(t *testing.T) {
var testCases = []struct {
name string
prowConfig string
expectError bool
}{
{
name: "valid rerun auth config",
prowConfig: `
deck:
rerun_auth_config:
allow_anyone: false
github_users:
- someperson
- someotherperson
`,
expectError: false,
},
{
name: "allow anyone and whitelist specified",
prowConfig: `
deck:
rerun_auth_config:
allow_anyone: true
github_users:
- someperson
- anotherperson
`,
expectError: true,
},
{
name: "empty config",
prowConfig: `
deck:
rerun_auth_config:
`,
expectError: false,
},
{
name: "allow anyone with empty whitelist",
prowConfig: `
deck:
rerun_auth_config:
allow_anyone: true
github_users:
`,
expectError: false,
},
}
for _, tc := range testCases {
// save the config
prowConfigDir, err := ioutil.TempDir("", "prowConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(prowConfigDir)
prowConfig := filepath.Join(prowConfigDir, "config.yaml")
if err := ioutil.WriteFile(prowConfig, []byte(tc.prowConfig), 0666); err != nil {
t.Fatalf("fail to write prow config: %v", err)
}
_, err = Load(prowConfig, "")
if tc.expectError && err == nil {
t.Errorf("tc %s: Expect error, but got nil", tc.name)
} else if !tc.expectError && err != nil {
t.Errorf("tc %s: Expect no error, but got error %v", tc.name, err)
}
}
}
func TestRerunAuthConfigsGetRerunAuthConfig(t *testing.T) {
var testCases = []struct {
name string
configs RerunAuthConfigs
refs *prowapi.Refs
expected prowapi.RerunAuthConfig
}{
{
name: "default to an empty config",
configs: RerunAuthConfigs{},
refs: &prowapi.Refs{Org: "my-default-org", Repo: "my-default-repo"},
expected: prowapi.RerunAuthConfig{},
},
{
name: "unknown org or org/repo return wildcard",
configs: RerunAuthConfigs{"*": prowapi.RerunAuthConfig{GitHubUsers: []string{"clarketm"}}},
refs: &prowapi.Refs{Org: "my-default-org", Repo: "my-default-repo"},
expected: prowapi.RerunAuthConfig{GitHubUsers: []string{"clarketm"}},
},
{
name: "no refs return wildcard",
configs: RerunAuthConfigs{"*": prowapi.RerunAuthConfig{GitHubUsers: []string{"leonardo"}}},
refs: nil,
expected: prowapi.RerunAuthConfig{GitHubUsers: []string{"leonardo"}},
},
{
name: "use org if defined",
configs: RerunAuthConfigs{
"*": prowapi.RerunAuthConfig{GitHubUsers: []string{"clarketm"}},
"istio": prowapi.RerunAuthConfig{GitHubUsers: []string{"scoobydoo"}},
"istio/test-infra": prowapi.RerunAuthConfig{GitHubUsers: []string{"billybob"}},
},
refs: &prowapi.Refs{Org: "istio", Repo: "istio"},
expected: prowapi.RerunAuthConfig{GitHubUsers: []string{"scoobydoo"}},
},
{
name: "use org/repo if defined",
configs: RerunAuthConfigs{
"*": prowapi.RerunAuthConfig{GitHubUsers: []string{"clarketm"}},
"istio/istio": prowapi.RerunAuthConfig{GitHubUsers: []string{"skywalker"}},
},
refs: &prowapi.Refs{Org: "istio", Repo: "istio"},
expected: prowapi.RerunAuthConfig{GitHubUsers: []string{"skywalker"}},
},
{
name: "org/repo takes precedence over org",
configs: RerunAuthConfigs{
"*": prowapi.RerunAuthConfig{GitHubUsers: []string{"clarketm"}},
"istio": prowapi.RerunAuthConfig{GitHubUsers: []string{"scrappydoo"}},
"istio/istio": prowapi.RerunAuthConfig{GitHubUsers: []string{"airbender"}},
},
refs: &prowapi.Refs{Org: "istio", Repo: "istio"},
expected: prowapi.RerunAuthConfig{GitHubUsers: []string{"airbender"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if actual := tc.configs.GetRerunAuthConfig(tc.refs); !reflect.DeepEqual(actual, tc.expected) {
t.Errorf("Expected %v, got %v", tc.expected, actual)
}
})
}
}
func TestMergeCommitTemplateLoading(t *testing.T) {
var testCases = []struct {
name string
prowConfig string
expectError bool
expect map[string]TideMergeCommitTemplate
}{
{
name: "no template",
prowConfig: `
tide:
merge_commit_template:
`,
expect: nil,
},
{
name: "empty template",
prowConfig: `
tide:
merge_commit_template:
kubernetes/ingress:
`,
expect: map[string]TideMergeCommitTemplate{
"kubernetes/ingress": {},
},
},
{
name: "two proper templates",
prowConfig: `
tide:
merge_commit_template:
kubernetes/ingress:
title: "{{ .Title }}"
body: "{{ .Body }}"
`,
expect: map[string]TideMergeCommitTemplate{
"kubernetes/ingress": {
TitleTemplate: "{{ .Title }}",
BodyTemplate: "{{ .Body }}",
Title: template.Must(template.New("CommitTitle").Parse("{{ .Title }}")),
Body: template.Must(template.New("CommitBody").Parse("{{ .Body }}")),
},
},
},
{
name: "only title template",
prowConfig: `
tide:
merge_commit_template:
kubernetes/ingress:
title: "{{ .Title }}"
`,
expect: map[string]TideMergeCommitTemplate{
"kubernetes/ingress": {
TitleTemplate: "{{ .Title }}",
BodyTemplate: "",
Title: template.Must(template.New("CommitTitle").Parse("{{ .Title }}")),
Body: nil,
},
},
},
{
name: "only body template",
prowConfig: `
tide:
merge_commit_template:
kubernetes/ingress:
body: "{{ .Body }}"
`,
expect: map[string]TideMergeCommitTemplate{
"kubernetes/ingress": {
TitleTemplate: "",
BodyTemplate: "{{ .Body }}",
Title: nil,
Body: template.Must(template.New("CommitBody").Parse("{{ .Body }}")),
},
},
},
{
name: "malformed title template",
prowConfig: `
tide:
merge_commit_template:
kubernetes/ingress:
title: "{{ .Title"
`,
expectError: true,
},
{
name: "malformed body template",
prowConfig: `
tide:
merge_commit_template:
kubernetes/ingress:
body: "{{ .Body"
`,
expectError: true,
},
}
for _, tc := range testCases {
// save the config
prowConfigDir, err := ioutil.TempDir("", "prowConfig")
if err != nil {
t.Fatalf("fail to make tempdir: %v", err)
}
defer os.RemoveAll(prowConfigDir)
prowConfig := filepath.Join(prowConfigDir, "config.yaml")
if err := ioutil.WriteFile(prowConfig, []byte(tc.prowConfig), 0666); err != nil {
t.Fatalf("fail to write prow config: %v", err)
}
cfg, err := Load(prowConfig, "")
if tc.expectError && err == nil {
t.Errorf("tc %s: Expect error, but got nil", tc.name)
} else if !tc.expectError && err != nil {
t.Errorf("tc %s: Expect no error, but got error %v", tc.name, err)
}
if err == nil {
if !reflect.DeepEqual(cfg.Tide.MergeTemplate, tc.expect) {
t.Errorf("tc %s: expected %#v\n!=\nactual %#v", tc.name, tc.expect, cfg.Tide.MergeTemplate)
}
}
}
}
func TestPlankJobURLPrefix(t *testing.T) {
testCases := []struct {
name string
plank Plank
refs *prowapi.Refs
expectedJobURLPrefix string
}{
{
name: "Nil refs returns default JobURLPrefix",
plank: Plank{JobURLPrefixConfig: map[string]string{"*": "https://my-prow"}},
expectedJobURLPrefix: "https://my-prow",
},
{
name: "No matching refs returns default JobURLPrefx",
plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-org": "https://my-alternate-prow",
},
},
refs: &prowapi.Refs{Org: "my-default-org", Repo: "my-default-repo"},
expectedJobURLPrefix: "https://my-prow",
},
{
name: "Matching repo returns JobURLPrefix from repo",
plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-alternate-org": "https://my-third-prow",
"my-alternate-org/my-repo": "https://my-alternate-prow",
},
},
refs: &prowapi.Refs{Org: "my-alternate-org", Repo: "my-repo"},
expectedJobURLPrefix: "https://my-alternate-prow",
},
{
name: "Matching org and not matching repo returns JobURLPrefix from org",
plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-alternate-org": "https://my-third-prow",
"my-alternate-org/my-repo": "https://my-alternate-prow",
},
},
refs: &prowapi.Refs{Org: "my-alternate-org", Repo: "my-second-repo"},
expectedJobURLPrefix: "https://my-third-prow",
},
{
name: "Matching org without url returns default JobURLPrefix",
plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-alternate-org/my-repo": "https://my-alternate-prow",
},
},
refs: &prowapi.Refs{Org: "my-alternate-org", Repo: "my-second-repo"},
expectedJobURLPrefix: "https://my-prow",
},
{
name: "gcs/ suffix in JobURLPrefix will be automatically trimmed",
plank: Plank{JobURLPrefixConfig: map[string]string{"*": "https://my-prow/view/gcs/"}},
expectedJobURLPrefix: "https://my-prow/view/",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if prefix := tc.plank.GetJobURLPrefix(tc.refs); prefix != tc.expectedJobURLPrefix {
t.Errorf("expected JobURLPrefix to be %q but was %q", tc.expectedJobURLPrefix, prefix)
}
})
}
}
func TestValidateComponentConfig(t *testing.T) {
testCases := []struct {
name string
config *Config
errExpected bool
}{
{
name: "Valid default URL, no err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{"*": "https://my-prow"}}}},
errExpected: false,
},
{
name: "Invalid default URL, err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{"*": "https:// my-prow"}}}},
errExpected: true,
},
{
name: "Org config, valid URLs, no err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-org": "https://my-alternate-prow",
},
}}},
errExpected: false,
},
{
name: "Org override, invalid default jobURLPrefix URL, err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https:// my-prow",
"my-org": "https://my-alternate-prow",
},
}}},
errExpected: true,
},
{
name: "Org override, invalid org URL, err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-org": "https:// my-alternate-prow",
},
}}},
errExpected: true,
},
{
name: "Org override, invalid URLs, err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https:// my-prow",
"my-org": "https:// my-alternate-prow",
},
}}},
errExpected: true,
},
{
name: "Repo override, valid URLs, no err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-org": "https://my-alternate-prow",
"my-org/my-repo": "https://my-third-prow",
}}}},
errExpected: false,
},
{
name: "Repo override, invalid repo URL, err",
config: &Config{ProwConfig: ProwConfig{Plank: Plank{
JobURLPrefixConfig: map[string]string{
"*": "https://my-prow",
"my-org": "https://my-alternate-prow",
"my-org/my-repo": "https:// my-third-prow",
}}}},
errExpected: true,
},
{
name: "Both RerunAuthConfig and RerunAuthConfigs are invalid, err",
config: &Config{ProwConfig: ProwConfig{Deck: Deck{
RerunAuthConfig: &prowapi.RerunAuthConfig{AllowAnyone: true},
RerunAuthConfigs: RerunAuthConfigs{"*": prowapi.RerunAuthConfig{AllowAnyone: true}},
}}},
errExpected: true,
},
{
name: "RerunAuthConfig and not RerunAuthConfigs is valid, no err",
config: &Config{ProwConfig: ProwConfig{Deck: Deck{
RerunAuthConfig: &prowapi.RerunAuthConfig{AllowAnyone: false, GitHubUsers: []string{"grantsmith"}},
}}},
errExpected: false,
},
{
name: "RerunAuthConfig only and validation fails, err",
config: &Config{ProwConfig: ProwConfig{Deck: Deck{
RerunAuthConfig: &prowapi.RerunAuthConfig{AllowAnyone: true, GitHubUsers: []string{"grantsmith"}},
}}},
errExpected: true,
},
{
name: "RerunAuthConfigs and not RerunAuthConfig is valid, no err",
config: &Config{ProwConfig: ProwConfig{Deck: Deck{
RerunAuthConfigs: RerunAuthConfigs{
"*": prowapi.RerunAuthConfig{AllowAnyone: true},
"kubernetes": prowapi.RerunAuthConfig{GitHubUsers: []string{"easterbunny"}},
"kubernetes/kubernetes": prowapi.RerunAuthConfig{GitHubOrgs: []string{"kubernetes", "kubernetes-sigs"}},
},
}}},
errExpected: false,
},
{
name: "RerunAuthConfigs only and validation fails, err",
config: &Config{ProwConfig: ProwConfig{Deck: Deck{
RerunAuthConfigs: RerunAuthConfigs{
"*": prowapi.RerunAuthConfig{AllowAnyone: true},
"kubernetes": prowapi.RerunAuthConfig{GitHubUsers: []string{"easterbunny"}},
"kubernetes/kubernetes": prowapi.RerunAuthConfig{AllowAnyone: true, GitHubOrgs: []string{"kubernetes", "kubernetes-sigs"}},
},
}}},
errExpected: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
if hasErr := tc.config.validateComponentConfig() != nil; hasErr != tc.errExpected {
t.Errorf("expected err: %t but was %t", tc.errExpected, hasErr)
}
})
}
}
func TestSlackReporterValidation(t *testing.T) {
testCases := []struct {
name string
config func() Config
successExpected bool
}{
{
name: "Valid config w/ slack_reporter - no error",
config: func() Config {
slack := &SlackReporter{
Channel: "my-channel",
}
return Config{
ProwConfig: ProwConfig{
SlackReporter: slack,
},
}
},
successExpected: true,
},
{
name: "Valid config w/ wildcard slack_reporter_configs - no error",
config: func() Config {
slackCfg := map[string]SlackReporter{
"*": {
Channel: "my-channel",
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: true,
},
{
name: "Valid config w/ org/repo slack_reporter_configs - no error",
config: func() Config {
slackCfg := map[string]SlackReporter{
"istio/proxy": {
Channel: "my-channel",
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: true,
},
{
name: "Valid config w/ repo slack_reporter_configs - no error",
config: func() Config {
slackCfg := map[string]SlackReporter{
"proxy": {
Channel: "my-channel",
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: true,
},
{
name: "Invalid config b/c both slack_reporter and slack_reporter_configs - error",
config: func() Config {
slack := &SlackReporter{
Channel: "my-channel",
}
slackCfg := map[string]SlackReporter{
"*": {
Channel: "my-channel",
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporter: slack,
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: false,
},
{
name: "No channel w/ slack_reporter - error",
config: func() Config {
slack := &SlackReporter{}
return Config{
ProwConfig: ProwConfig{
SlackReporter: slack,
},
}
},
successExpected: false,
},
{
name: "No channel w/ slack_reporter_configs - error",
config: func() Config {
slackCfg := map[string]SlackReporter{
"*": {
JobTypesToReport: []prowapi.ProwJobType{"presubmit"},
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: false,
},
{
name: "Empty config - no error",
config: func() Config {
slackCfg := map[string]SlackReporter{}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: true,
},
{
name: "Invalid template - error",
config: func() Config {
slackCfg := map[string]SlackReporter{
"*": {
Channel: "my-channel",
ReportTemplate: "{{ if .Spec.Name}}",
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: false,
},
{
name: "Template accessed invalid property - error",
config: func() Config {
slackCfg := map[string]SlackReporter{
"*": {
Channel: "my-channel",
ReportTemplate: "{{ .Undef}}",
},
}
return Config{
ProwConfig: ProwConfig{
SlackReporterConfigs: slackCfg,
},
}
},
successExpected: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
cfg := tc.config()
if err := cfg.validateComponentConfig(); (err == nil) != tc.successExpected {
t.Errorf("Expected success=%t but got err=%v", tc.successExpected, err)
}
if tc.successExpected {
for _, config := range cfg.SlackReporterConfigs {
if config.ReportTemplate == "" {
t.Errorf("expected default ReportTemplate to be set")
}
if config.Channel == "" {
t.Errorf("expected Channel to be required")
}
}
}
})
}
}
func TestManagedHmacEntityValidation(t *testing.T) {
testCases := []struct {
name string
prowConfig Config
shouldFail bool
}{
{
name: "Missing managed HmacEntities",
prowConfig: Config{ProwConfig: ProwConfig{ManagedWebhooks: ManagedWebhooks{}}},
shouldFail: false,
},
{
name: "Config with all valid dates",
prowConfig: Config{ProwConfig: ProwConfig{
ManagedWebhooks: ManagedWebhooks{
OrgRepoConfig: map[string]ManagedWebhookInfo{
"foo/bar": {TokenCreatedAfter: time.Now()},
"foo/baz": {TokenCreatedAfter: time.Now()},
},
},
}},
shouldFail: false,
},
{
name: "Config with one invalid dates",
prowConfig: Config{ProwConfig: ProwConfig{
ManagedWebhooks: ManagedWebhooks{
OrgRepoConfig: map[string]ManagedWebhookInfo{
"foo/bar": {TokenCreatedAfter: time.Now()},
"foo/baz": {TokenCreatedAfter: time.Now().Add(time.Hour)},
},
},
}},
shouldFail: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
err := tc.prowConfig.validateComponentConfig()
if tc.shouldFail != (err != nil) {
t.Errorf("%s: Unexpected outcome. Error expected %v, Error found %s", tc.name, tc.shouldFail, err)
}
})
}
}
func TestValidateTriggering(t *testing.T) {
testCases := []struct {
name string
presubmit Presubmit
errExpected bool
}{
{
name: "Trigger set, rerun command unset, err",
presubmit: Presubmit{
Trigger: "my-trigger",
Reporter: Reporter{
Context: "my-context",
},
},
errExpected: true,
},
{
name: "Triger unset, rerun command set, err",
presubmit: Presubmit{
RerunCommand: "my-rerun-command",
Reporter: Reporter{
Context: "my-context",
},
},
errExpected: true,
},
{
name: "Both trigger and rerun command set, no err",
presubmit: Presubmit{
Trigger: "my-trigger",
RerunCommand: "my-rerun-command",
Reporter: Reporter{
Context: "my-context",
},
},
errExpected: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
err := validateTriggering(tc.presubmit)
if err != nil != tc.errExpected {
t.Errorf("Expected err: %t but got err %v", tc.errExpected, err)
}
})
}
}
func TestRefGetterForGitHubPullRequest(t *testing.T) {
testCases := []struct {
name string
rg *RefGetterForGitHubPullRequest
verify func(*RefGetterForGitHubPullRequest) error
}{
{
name: "Existing PullRequest is returned",
rg: &RefGetterForGitHubPullRequest{pr: &github.PullRequest{ID: 123456}},
verify: func(rg *RefGetterForGitHubPullRequest) error {
if rg.pr == nil || rg.pr.ID != 123456 {
return fmt.Errorf("Expected refGetter to contain pr with id 123456, pr was %v", rg.pr)
}
return nil
},
},
{
name: "PullRequest is fetched, stored and returned",
rg: &RefGetterForGitHubPullRequest{
ghc: &fakegithub.FakeClient{
PullRequests: map[int]*github.PullRequest{0: {ID: 123456}}},
},
verify: func(rg *RefGetterForGitHubPullRequest) error {
pr, err := rg.PullRequest()
if err != nil {
return fmt.Errorf("failed to fetch PullRequest: %v", err)
}
if rg.pr == nil || rg.pr.ID != 123456 {
return fmt.Errorf("expected agent to contain pr with id 123456, pr was %v", rg.pr)
}
if pr.ID != 123456 {
return fmt.Errorf("expected returned pr.ID to be 123456, was %d", pr.ID)
}
return nil
},
},
{
name: "Existing baseSHA is returned",
rg: &RefGetterForGitHubPullRequest{baseSHA: "12345", pr: &github.PullRequest{}},
verify: func(rg *RefGetterForGitHubPullRequest) error {
baseSHA, err := rg.BaseSHA()
if err != nil {
return fmt.Errorf("error calling baseSHA: %v", err)
}
if rg.baseSHA != "12345" {
return fmt.Errorf("expected agent baseSHA to be 12345, was %q", rg.baseSHA)
}
if baseSHA != "12345" {
return fmt.Errorf("expected returned baseSHA to be 12345, was %q", baseSHA)
}
return nil
},
},
{
name: "BaseSHA is fetched, stored and returned",
rg: &RefGetterForGitHubPullRequest{
ghc: &fakegithub.FakeClient{
PullRequests: map[int]*github.PullRequest{0: {}},
},
},
verify: func(rg *RefGetterForGitHubPullRequest) error {
baseSHA, err := rg.BaseSHA()
if err != nil {
return fmt.Errorf("expected err to be nil, was %v", err)
}
if rg.baseSHA != fakegithub.TestRef {
return fmt.Errorf("expected baseSHA on agent to be %q, was %q", fakegithub.TestRef, rg.baseSHA)
}
if baseSHA != fakegithub.TestRef {
return fmt.Errorf("expected returned baseSHA to be %q, was %q", fakegithub.TestRef, baseSHA)
}
return nil
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
tc.rg.lock = &sync.Mutex{}
if err := tc.verify(tc.rg); err != nil {
t.Fatal(err)
}
})
}
}
func TestSetDecorationDefaults(t *testing.T) {
yes := true
no := false
testCases := []struct {
id string
repo string
config *Config
utilityConfig UtilityConfig
expected *prowapi.DecorationConfig
}{
{
id: "no dc in presubmit or in plank's config, expect no changes",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{ProwConfig: ProwConfig{}},
expected: nil,
},
{
id: "no dc in presubmit or in plank's by repo config, expect plank's defaults",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
{
id: "no dc in presubmit, part of plank's by repo config, expect merged by repo config and defaults",
utilityConfig: UtilityConfig{Decorate: &yes},
repo: "org/repo",
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
"org/repo": {
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-repo",
PathStrategy: "single-by-repo",
DefaultOrg: "org-by-repo",
DefaultRepo: "repo-by-repo",
},
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-repo",
PathStrategy: "single-by-repo",
DefaultOrg: "org-by-repo",
DefaultRepo: "repo-by-repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
{
id: "dc in presubmit and plank's defaults, expect presubmit's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{
Decorate: &yes,
DecorationConfig: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-from-ps",
InitUpload: "initupload:test-from-ps",
Entrypoint: "entrypoint:test-from-ps",
Sidecar: "sidecar:test-from-ps",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-from-ps",
PathStrategy: "single-from-ps",
DefaultOrg: "org-from-ps",
DefaultRepo: "repo-from-ps",
},
GCSCredentialsSecret: "credentials-gcs-from-ps",
},
},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-from-ps",
InitUpload: "initupload:test-from-ps",
Entrypoint: "entrypoint:test-from-ps",
Sidecar: "sidecar:test-from-ps",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-from-ps",
PathStrategy: "single-from-ps",
DefaultOrg: "org-from-ps",
DefaultRepo: "repo-from-ps",
},
GCSCredentialsSecret: "credentials-gcs-from-ps",
},
},
{
id: "dc in presubmit, plank's by repo config and defaults, expected presubmit's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{
Decorate: &yes,
DecorationConfig: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-from-ps",
InitUpload: "initupload:test-from-ps",
Entrypoint: "entrypoint:test-from-ps",
Sidecar: "sidecar:test-from-ps",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-from-ps",
PathStrategy: "single-from-ps",
DefaultOrg: "org-from-ps",
DefaultRepo: "repo-from-ps",
},
GCSCredentialsSecret: "credentials-gcs-from-ps",
},
},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
"org/repo": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-repo",
InitUpload: "initupload:test-by-repo",
Entrypoint: "entrypoint:test-by-repo",
Sidecar: "sidecar:test-by-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-repo",
PathStrategy: "single",
DefaultOrg: "org-test",
DefaultRepo: "repo-test",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-from-ps",
InitUpload: "initupload:test-from-ps",
Entrypoint: "entrypoint:test-from-ps",
Sidecar: "sidecar:test-from-ps",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-from-ps",
PathStrategy: "single-from-ps",
DefaultOrg: "org-from-ps",
DefaultRepo: "repo-from-ps",
},
GCSCredentialsSecret: "credentials-gcs-from-ps",
},
},
{
id: "no dc in presubmit, dc in plank's by repo config and defaults, expect by repo config's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
"org/repo": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-repo",
InitUpload: "initupload:test-by-repo",
Entrypoint: "entrypoint:test-by-repo",
Sidecar: "sidecar:test-by-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-repo",
PathStrategy: "single-by-repo",
DefaultOrg: "org-by-repo",
DefaultRepo: "repo-by-repo",
},
GCSCredentialsSecret: "credentials-gcs-by-repo",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-repo",
InitUpload: "initupload:test-by-repo",
Entrypoint: "entrypoint:test-by-repo",
Sidecar: "sidecar:test-by-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-repo",
PathStrategy: "single-by-repo",
DefaultOrg: "org-by-repo",
DefaultRepo: "repo-by-repo",
},
GCSCredentialsSecret: "credentials-gcs-by-repo",
},
},
{
id: "no dc in presubmit, dc in plank's by repo config and defaults, expect by org config's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
"org": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
},
{
id: "no dc in presubmit, dc in plank's by repo config and defaults, expect by * config's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
{
id: "no dc in presubmit, dc in plank's by repo config org and org/repo co-exists, expect by org/repo config's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
"org": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
"org/repo": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org-repo",
InitUpload: "initupload:test-by-org-repo",
Entrypoint: "entrypoint:test-by-org-repo",
Sidecar: "sidecar:test-by-org-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org-repo",
PathStrategy: "single-by-org-repo",
DefaultOrg: "org-by-org-repo",
DefaultRepo: "repo-by-org-repo",
},
GCSCredentialsSecret: "credentials-gcs-by-org-repo",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org-repo",
InitUpload: "initupload:test-by-org-repo",
Entrypoint: "entrypoint:test-by-org-repo",
Sidecar: "sidecar:test-by-org-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org-repo",
PathStrategy: "single-by-org-repo",
DefaultOrg: "org-by-org-repo",
DefaultRepo: "repo-by-org-repo",
},
GCSCredentialsSecret: "credentials-gcs-by-org-repo",
},
},
{
id: "no dc in presubmit, dc in plank's by repo config with org and * to co-exists, expect by 'org' config's dc",
repo: "org/repo",
utilityConfig: UtilityConfig{Decorate: &yes},
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
"org": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
},
{
id: "decorate_all_jobs set, no dc in presubmit or in plank's by repo config, expect plank's defaults",
config: &Config{
JobConfig: JobConfig{
DecorateAllJobs: true,
},
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
{
id: "opt out of decorate_all_jobs by setting decorated to false",
utilityConfig: UtilityConfig{Decorate: &no},
config: &Config{
JobConfig: JobConfig{
DecorateAllJobs: true,
},
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test",
InitUpload: "initupload:test",
Entrypoint: "entrypoint:test",
Sidecar: "sidecar:test",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket",
PathStrategy: "single",
DefaultOrg: "org",
DefaultRepo: "repo",
},
GCSCredentialsSecret: "credentials-gcs",
},
},
},
},
},
},
}
for _, tc := range testCases {
t.Run(tc.id, func(t *testing.T) {
presubmit := &Presubmit{JobBase: JobBase{UtilityConfig: tc.utilityConfig}}
postsubmit := &Postsubmit{JobBase: JobBase{UtilityConfig: tc.utilityConfig}}
setPresubmitDecorationDefaults(tc.config, presubmit, tc.repo)
if diff := cmp.Diff(presubmit.DecorationConfig, tc.expected, cmpopts.EquateEmpty()); diff != "" {
t.Errorf("presubmit: %s", diff)
}
setPostsubmitDecorationDefaults(tc.config, postsubmit, tc.repo)
if diff := cmp.Diff(postsubmit.DecorationConfig, tc.expected, cmpopts.EquateEmpty()); diff != "" {
t.Errorf("postsubmit: %s", diff)
}
})
}
}
func TestSetPeriodicDecorationDefaults(t *testing.T) {
yes := true
no := false
testCases := []struct {
id string
config *Config
utilityConfig UtilityConfig
expected *prowapi.DecorationConfig
}{
{
id: "extraRefs[0] not defined, changes from defaultDecorationConfigs[*] expected",
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
},
},
},
utilityConfig: UtilityConfig{Decorate: &yes},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
{
id: "extraRefs[0] defined, only 'org` exists in config, changes from defaultDecorationConfigs[org] expected",
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
"org": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
},
},
},
},
utilityConfig: UtilityConfig{
Decorate: &yes,
ExtraRefs: []prowapi.Refs{
{
Org: "org",
Repo: "repo",
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org",
InitUpload: "initupload:test-by-org",
Entrypoint: "entrypoint:test-by-org",
Sidecar: "sidecar:test-by-org",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org",
PathStrategy: "single-by-org",
DefaultOrg: "org-by-org",
DefaultRepo: "repo-by-org",
},
GCSCredentialsSecret: "credentials-gcs-by-org",
},
},
{
id: "extraRefs[0] defined and org/repo of defaultDecorationConfigs exists, changes from defaultDecorationConfigs[org/repo] expected",
config: &Config{
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
"org/repo": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org-repo",
InitUpload: "initupload:test-by-org-repo",
Entrypoint: "entrypoint:test-by-org-repo",
Sidecar: "sidecar:test-by-org-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org-repo",
PathStrategy: "single-by-org-repo",
DefaultOrg: "org-by-org-repo",
DefaultRepo: "repo-by-org-repo",
},
GCSCredentialsSecret: "credentials-gcs-by-org-repo",
},
},
},
},
},
utilityConfig: UtilityConfig{
Decorate: &yes,
ExtraRefs: []prowapi.Refs{
{
Org: "org",
Repo: "repo",
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-org-repo",
InitUpload: "initupload:test-by-org-repo",
Entrypoint: "entrypoint:test-by-org-repo",
Sidecar: "sidecar:test-by-org-repo",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-org-repo",
PathStrategy: "single-by-org-repo",
DefaultOrg: "org-by-org-repo",
DefaultRepo: "repo-by-org-repo",
},
GCSCredentialsSecret: "credentials-gcs-by-org-repo",
},
},
{
id: "decorate_all_jobs set, plank's default decoration config expected",
config: &Config{
JobConfig: JobConfig{
DecorateAllJobs: true,
},
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
},
},
},
expected: &prowapi.DecorationConfig{
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
{
id: "opt out of decorate_all_jobs by specifying undecorated",
utilityConfig: UtilityConfig{Decorate: &no},
config: &Config{
JobConfig: JobConfig{
DecorateAllJobs: true,
},
ProwConfig: ProwConfig{
Plank: Plank{
DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
UtilityImages: &prowapi.UtilityImages{
CloneRefs: "clonerefs:test-by-*",
InitUpload: "initupload:test-by-*",
Entrypoint: "entrypoint:test-by-*",
Sidecar: "sidecar:test-by-*",
},
GCSConfiguration: &prowapi.GCSConfiguration{
Bucket: "test-bucket-by-*",
PathStrategy: "single-by-*",
DefaultOrg: "org-by-*",
DefaultRepo: "repo-by-*",
},
GCSCredentialsSecret: "credentials-gcs-by-*",
},
},
},
},
},
},
}
for _, tc := range testCases {
t.Run(tc.id, func(t *testing.T) {
periodic := &Periodic{JobBase: JobBase{UtilityConfig: tc.utilityConfig}}
setPeriodicDecorationDefaults(tc.config, periodic)
if diff := cmp.Diff(periodic.DecorationConfig, tc.expected, cmpopts.EquateEmpty()); diff != "" {
t.Error(diff)
}
})
}
}
func TestDecorationRequested(t *testing.T) {
yes := true
no := false
testCases := []struct {
name string
decorateAll bool
presubmits map[string][]Presubmit
postsubmits map[string][]Postsubmit
periodics []Periodic
expected bool
}{
{
name: "decorate_all_jobs set",
decorateAll: true,
presubmits: map[string][]Presubmit{
"org/repo": {
{JobBase: JobBase{Name: "presubmit-job"}},
},
},
expected: true,
},
{
name: "at-least one job is decorated",
presubmits: map[string][]Presubmit{
"org/repo": {
{JobBase: JobBase{Name: "presubmit-job"}},
},
},
postsubmits: map[string][]Postsubmit{
"org/repo": {
{JobBase: JobBase{UtilityConfig: UtilityConfig{Decorate: &yes}}},
},
},
expected: true,
},
{
name: "decorate_all_jobs set, at-least one job does not opt out",
decorateAll: true,
presubmits: map[string][]Presubmit{
"org/repo": {
{JobBase: JobBase{UtilityConfig: UtilityConfig{Decorate: &no}}},
},
},
postsubmits: map[string][]Postsubmit{
"org/repo": {
{JobBase: JobBase{UtilityConfig: UtilityConfig{Decorate: &no}}},
},
},
periodics: []Periodic{
{JobBase: JobBase{Name: "periodic-job"}},
},
expected: true,
},
{
name: "decorate_all_jobs set, all jobs opt out",
presubmits: map[string][]Presubmit{
"org/repo": {
{JobBase: JobBase{UtilityConfig: UtilityConfig{Decorate: &no}}},
},
},
postsubmits: map[string][]Postsubmit{
"org/repo": {
{JobBase: JobBase{UtilityConfig: UtilityConfig{Decorate: &no}}},
},
},
periodics: []Periodic{
{JobBase: JobBase{UtilityConfig: UtilityConfig{Decorate: &no}}},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
jobConfig := &JobConfig{
DecorateAllJobs: tc.decorateAll,
PresubmitsStatic: tc.presubmits,
PostsubmitsStatic: tc.postsubmits,
Periodics: tc.periodics,
}
if actual := jobConfig.decorationRequested(); actual != tc.expected {
t.Errorf("expected %t got %t", tc.expected, actual)
}
})
}
}
func TestInRepoConfigEnabled(t *testing.T) {
testCases := []struct {
name string
config Config
expected bool
}{
{
name: "Exact match",
config: Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{
Enabled: map[string]*bool{
"org/repo": utilpointer.BoolPtr(true),
},
},
},
},
expected: true,
},
{
name: "Orgname matches",
config: Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{
Enabled: map[string]*bool{
"org": utilpointer.BoolPtr(true),
},
},
},
},
expected: true,
},
{
name: "Globally enabled",
config: Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{
Enabled: map[string]*bool{
"*": utilpointer.BoolPtr(true),
},
},
},
},
expected: true,
},
{
name: "Disabled by default",
expected: false,
},
}
for idx := range testCases {
tc := testCases[idx]
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
if result := tc.config.InRepoConfigEnabled("org/repo"); result != tc.expected {
t.Errorf("Expected %t, got %t", tc.expected, result)
}
})
}
}
func TestGetProwYAMLDoesNotCallRefGettersWhenInrepoconfigIsDisabled(t *testing.T) {
t.Parallel()
var baseSHAGetterCalled, headSHAGetterCalled bool
baseSHAGetter := func() (string, error) {
baseSHAGetterCalled = true
return "", nil
}
headSHAGetter := func() (string, error) {
headSHAGetterCalled = true
return "", nil
}
c := &Config{}
if _, err := c.getProwYAML(nil, "test", baseSHAGetter, headSHAGetter); err != nil {
t.Fatalf("error calling GetProwYAML: %v", err)
}
if baseSHAGetterCalled {
t.Error("baseSHAGetter got called")
}
if headSHAGetterCalled {
t.Error("headSHAGetter got called")
}
}
func TestGetPresubmitsReturnsStaticAndInrepoconfigPresubmits(t *testing.T) {
t.Parallel()
org, repo := "org", "repo"
c := &Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{Enabled: map[string]*bool{"*": utilpointer.BoolPtr(true)}},
},
JobConfig: JobConfig{
PresubmitsStatic: map[string][]Presubmit{
org + "/" + repo: {{
JobBase: JobBase{Name: "my-static-presubmit"},
Reporter: Reporter{Context: "my-static-presubmit"},
}},
},
ProwYAMLGetter: fakeProwYAMLGetterFactory(
[]Presubmit{
{
JobBase: JobBase{Name: "hans"},
},
},
nil,
),
},
}
presubmits, err := c.GetPresubmits(nil, org+"/"+repo, func() (string, error) { return "", nil })
if err != nil {
t.Fatalf("Error calling GetPresubmits: %v", err)
}
if n := len(presubmits); n != 2 ||
presubmits[0].Name != "my-static-presubmit" ||
presubmits[1].Name != "hans" {
t.Errorf(`expected exactly two presubmits named "my-static-presubmit" and "hans", got %d (%v)`, n, presubmits)
}
}
func TestGetPostsubmitsReturnsStaticAndInrepoconfigPostsubmits(t *testing.T) {
t.Parallel()
org, repo := "org", "repo"
c := &Config{
ProwConfig: ProwConfig{
InRepoConfig: InRepoConfig{Enabled: map[string]*bool{"*": utilpointer.BoolPtr(true)}},
},
JobConfig: JobConfig{
PostsubmitsStatic: map[string][]Postsubmit{
org + "/" + repo: {{
JobBase: JobBase{Name: "my-static-postsubmits"},
Reporter: Reporter{Context: "my-static-postsubmits"},
}},
},
ProwYAMLGetter: fakeProwYAMLGetterFactory(
nil,
[]Postsubmit{
{
JobBase: JobBase{Name: "hans"},
},
},
),
},
}
postsubmits, err := c.GetPostsubmits(nil, org+"/"+repo, func() (string, error) { return "", nil })
if err != nil {
t.Fatalf("Error calling GetPostsubmits: %v", err)
}
if n := len(postsubmits); n != 2 ||
postsubmits[0].Name != "my-static-postsubmits" ||
postsubmits[1].Name != "hans" {
t.Errorf(`expected exactly two postsubmits named "my-static-postsubmits" and "hans", got %d (%v)`, n, postsubmits)
}
}
func TestInRepoConfigAllowsCluster(t *testing.T) {
const clusterName = "that-cluster"
testCases := []struct {
name string
repoIdentifier string
allowedClusters map[string][]string
expectedResult bool
}{
{
name: "Nothing configured, nothing allowed",
repoIdentifier: "foo",
expectedResult: false,
},
{
name: "Allowed on repolevel",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"foo/repo": {clusterName}},
expectedResult: true,
},
{
name: "Not allowed on repolevel",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"foo/repo": {"different-cluster"}},
expectedResult: false,
},
{
name: "Allowed for different repo",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"bar/repo": {clusterName}},
expectedResult: false,
},
{
name: "Allowed on orglevel",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"foo": {clusterName}},
expectedResult: true,
},
{
name: "Not allowed on orglevel",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"foo": {"different-cluster"}},
expectedResult: false,
},
{
name: "Allowed on for different org",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"bar": {clusterName}},
expectedResult: false,
},
{
name: "Allowed globally",
repoIdentifier: "foo/repo",
allowedClusters: map[string][]string{"*": {clusterName}},
expectedResult: true,
},
}
for idx := range testCases {
tc := testCases[idx]
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
cfg := &Config{
ProwConfig: ProwConfig{InRepoConfig: InRepoConfig{AllowedClusters: tc.allowedClusters}},
}
if actual := cfg.InRepoConfigAllowsCluster(clusterName, tc.repoIdentifier); actual != tc.expectedResult {
t.Errorf("expected result %t, got result %t", tc.expectedResult, actual)
}
})
}
}
func TestGetDefaultDecorationConfigsThreadSafety(t *testing.T) {
const repo = "repo"
p := Plank{DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{
"*": {
GCSConfiguration: &prowapi.GCSConfiguration{
MediaTypes: map[string]string{"text": "text"},
},
},
repo: {
GCSConfiguration: &prowapi.GCSConfiguration{
MediaTypes: map[string]string{"text": "text"},
},
},
}}
s1 := make(chan struct{})
s2 := make(chan struct{})
go func() {
_ = p.GetDefaultDecorationConfigs(repo)
close(s1)
}()
go func() {
_ = p.GetDefaultDecorationConfigs(repo)
close(s2)
}()
<-s1
<-s2
}
func TestDefaultAndValidateReportTemplate(t *testing.T) {
testCases := []struct {
id string
controller *Controller
expected *Controller
expectedErr bool
}{
{
id: "no report_template or report_templates specified, no changes expected",
controller: &Controller{},
expected: &Controller{},
},
{
id: "only report_template specified, expected report_template[*]=report_template",
controller: &Controller{ReportTemplateString: "test template"},
expected: &Controller{
ReportTemplateString: "test template",
ReportTemplateStrings: map[string]string{"*": "test template"},
ReportTemplates: map[string]*template.Template{
"*": func() *template.Template {
reportTmpl, _ := template.New("Report").Parse("test template")
return reportTmpl
}(),
},
},
},
{
id: "only report_templates specified, expected direct conversion",
controller: &Controller{ReportTemplateStrings: map[string]string{"*": "test template"}},
expected: &Controller{
ReportTemplateStrings: map[string]string{"*": "test template"},
ReportTemplates: map[string]*template.Template{
"*": func() *template.Template {
reportTmpl, _ := template.New("Report").Parse("test template")
return reportTmpl
}(),
},
},
},
{
id: "no '*' in report_templates specified, expected error",
controller: &Controller{ReportTemplateStrings: map[string]string{"org": "test template"}},
expectedErr: true,
},
}
for _, tc := range testCases {
t.Run(tc.id, func(t *testing.T) {
if err := defaultAndValidateReportTemplate(tc.controller); err != nil && !tc.expectedErr {
t.Fatalf("error not expected: %v", err)
}
if !reflect.DeepEqual(tc.controller, tc.expected) && !tc.expectedErr {
t.Fatalf("\nGot: %#v\nExpected: %#v", tc.controller, tc.expected)
}
})
}
}
func TestValidatePresubmits(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
presubmits []Presubmit
expectedError string
}{
{
name: "Duplicate context causes error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "repeated"}},
{JobBase: JobBase{Name: "b"}, Reporter: Reporter{Context: "repeated"}},
},
expectedError: `[jobs b and a report to the same GitHub context "repeated", jobs a and b report to the same GitHub context "repeated"]`,
},
{
name: "Duplicate context on different branch doesn't cause error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "repeated"}, Brancher: Brancher{Branches: []string{"master"}}},
{JobBase: JobBase{Name: "b"}, Reporter: Reporter{Context: "repeated"}, Brancher: Brancher{Branches: []string{"next"}}},
},
},
{
name: "Duplicate jobname causes error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}},
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "bar"}},
},
expectedError: "duplicated presubmit job: a",
},
{
name: "Duplicate jobname on different branches doesn't cause error",
presubmits: []Presubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}, Brancher: Brancher{Branches: []string{"master"}}},
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}, Brancher: Brancher{Branches: []string{"next"}}},
},
},
{
name: "Invalid JobBase causes error",
presubmits: []Presubmit{{Reporter: Reporter{Context: "foo"}}},
expectedError: `invalid presubmit job : name: must match regex "^[A-Za-z0-9-._]+$"`,
},
{
name: "Invalid triggering config causes error",
presubmits: []Presubmit{{Trigger: "some-trigger", JobBase: JobBase{Name: "my-job"}, Reporter: Reporter{Context: "foo"}}},
expectedError: `Either both of job.Trigger and job.RerunCommand must be set, wasnt the case for job "my-job"`,
},
{
name: "Invalid reporting config causes error",
presubmits: []Presubmit{{JobBase: JobBase{Name: "my-job"}}},
expectedError: "invalid presubmit job my-job: job is set to report but has no context configured",
},
}
for _, tc := range testCases {
var errMsg string
err := validatePresubmits(tc.presubmits, "")
if err != nil {
errMsg = err.Error()
}
if errMsg != tc.expectedError {
t.Errorf("expected error '%s', got error '%s'", tc.expectedError, errMsg)
}
}
}
func TestValidatePostsubmits(t *testing.T) {
t.Parallel()
testCases := []struct {
name string
postsubmits []Postsubmit
expectedError string
}{
{
name: "Duplicate context causes error",
postsubmits: []Postsubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "repeated"}},
{JobBase: JobBase{Name: "b"}, Reporter: Reporter{Context: "repeated"}},
},
expectedError: `[jobs b and a report to the same GitHub context "repeated", jobs a and b report to the same GitHub context "repeated"]`,
},
{
name: "Duplicate context on different branch doesn't cause error",
postsubmits: []Postsubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "repeated"}, Brancher: Brancher{Branches: []string{"master"}}},
{JobBase: JobBase{Name: "b"}, Reporter: Reporter{Context: "repeated"}, Brancher: Brancher{Branches: []string{"next"}}},
},
},
{
name: "Duplicate jobname causes error",
postsubmits: []Postsubmit{
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "foo"}},
{JobBase: JobBase{Name: "a"}, Reporter: Reporter{Context: "bar"}},
},
expectedError: "duplicated postsubmit job: a",
},
{
name: "Invalid JobBase causes error",
postsubmits: []Postsubmit{{Reporter: Reporter{Context: "foo"}}},
expectedError: `invalid postsubmit job : name: must match regex "^[A-Za-z0-9-._]+$"`,
},
{
name: "Invalid reporting config causes error",
postsubmits: []Postsubmit{{JobBase: JobBase{Name: "my-job"}}},
expectedError: "invalid postsubmit job my-job: job is set to report but has no context configured",
},
}
for _, tc := range testCases {
var errMsg string
err := validatePostsubmits(tc.postsubmits, "")
if err != nil {
errMsg = err.Error()
}
if errMsg != tc.expectedError {
t.Errorf("expected error '%s', got error '%s'", tc.expectedError, errMsg)
}
}
}
| {
for _, j := range cfg.AllStaticPresubmits(nil) {
if envs, ok := tc.expectEnv[j.Name]; ok {
if !reflect.DeepEqual(envs, j.Spec.Containers[0].Env) {
t.Errorf("tc %s: expect env %v for job %s, got %+v", tc.name, envs, j.Name, j.Spec.Containers[0].Env)
}
}
}
for _, j := range cfg.AllStaticPostsubmits(nil) {
if envs, ok := tc.expectEnv[j.Name]; ok {
if !reflect.DeepEqual(envs, j.Spec.Containers[0].Env) {
t.Errorf("tc %s: expect env %v for job %s, got %+v", tc.name, envs, j.Name, j.Spec.Containers[0].Env)
}
}
}
for _, j := range cfg.AllPeriodics() {
if envs, ok := tc.expectEnv[j.Name]; ok {
if !reflect.DeepEqual(envs, j.Spec.Containers[0].Env) {
t.Errorf("tc %s: expect env %v for job %s, got %+v", tc.name, envs, j.Name, j.Spec.Containers[0].Env)
}
}
}
} |
boards.module.ts | import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { User } from 'src/users/user.entity';
import { UsersService } from 'src/users/users.service';
import { BoardsController } from './boards.controller';
import { BoardsService } from './boards.service';
@Module({
imports: [TypeOrmModule.forFeature([User])],
controllers: [BoardsController],
providers: [BoardsService, UsersService]
})
export class | {}
| BoardsModule |
mod.rs | /*!
Column oriented field storage for tantivy.
It is the equivalent of `Lucene`'s `DocValues`.
Fast fields is a column-oriented fashion storage of `tantivy`.
It is designed for the fast random access of some document
fields given a document id.
`FastField` are useful when a field is required for all or most of
the `DocSet` : for instance for scoring, grouping, filtering, or faceting.
Fields have to be declared as `FAST` in the schema.
Currently only 64-bits integers (signed or unsigned) are
supported.
They are stored in a bit-packed fashion so that their
memory usage is directly linear with the amplitude of the
values stored.
Read access performance is comparable to that of an array lookup.
*/
pub use self::bytes::{BytesFastFieldReader, BytesFastFieldWriter};
pub use self::delete::write_delete_bitset;
pub use self::delete::DeleteBitSet;
pub use self::error::{FastFieldNotAvailableError, Result};
pub use self::facet_reader::FacetReader;
pub use self::multivalued::{MultiValuedFastFieldReader, MultiValuedFastFieldWriter};
pub use self::reader::FastFieldReader;
pub use self::readers::FastFieldReaders;
pub use self::serializer::FastFieldSerializer;
pub use self::writer::{FastFieldsWriter, IntFastFieldWriter};
use crate::common;
use crate::schema::Cardinality;
use crate::schema::FieldType;
use crate::schema::Value;
use crate::{
chrono::{NaiveDateTime, Utc},
schema::Type,
};
mod bytes;
mod delete;
mod error;
mod facet_reader;
mod multivalued;
mod reader;
mod readers;
mod serializer;
mod writer;
/// Trait for types that are allowed for fast fields: (u64, i64 and f64).
pub trait FastValue: Clone + Copy + Send + Sync + PartialOrd {
/// Converts a value from u64
///
/// Internally all fast field values are encoded as u64.
fn from_u64(val: u64) -> Self;
/// Converts a value to u64.
///
/// Internally all fast field values are encoded as u64.
fn to_u64(&self) -> u64;
/// Returns the fast field cardinality that can be extracted from the given
/// `FieldType`.
///
/// If the type is not a fast field, `None` is returned.
fn fast_field_cardinality(field_type: &FieldType) -> Option<Cardinality>;
/// Cast value to `u64`.
/// The value is just reinterpreted in memory.
fn as_u64(&self) -> u64;
/// Build a default value. This default value is never used, so the value does not
/// really matter.
fn make_zero() -> Self {
Self::from_u64(0i64.to_u64())
}
/// Returns the `schema::Type` for this FastValue.
fn to_type() -> Type;
}
impl FastValue for u64 {
fn from_u64(val: u64) -> Self {
val
}
fn to_u64(&self) -> u64 {
*self
}
fn fast_field_cardinality(field_type: &FieldType) -> Option<Cardinality> {
match *field_type {
FieldType::U64(ref integer_options) => integer_options.get_fastfield_cardinality(),
FieldType::HierarchicalFacet => Some(Cardinality::MultiValues),
_ => None,
}
}
fn as_u64(&self) -> u64 {
*self
}
fn to_type() -> Type {
Type::U64
}
}
impl FastValue for i64 {
fn from_u64(val: u64) -> Self {
common::u64_to_i64(val)
}
fn to_u64(&self) -> u64 {
common::i64_to_u64(*self)
}
fn fast_field_cardinality(field_type: &FieldType) -> Option<Cardinality> {
match *field_type {
FieldType::I64(ref integer_options) => integer_options.get_fastfield_cardinality(),
_ => None,
}
}
fn as_u64(&self) -> u64 {
*self as u64
}
fn to_type() -> Type {
Type::I64
}
}
impl FastValue for f64 {
fn from_u64(val: u64) -> Self {
common::u64_to_f64(val)
}
fn to_u64(&self) -> u64 {
common::f64_to_u64(*self)
}
fn fast_field_cardinality(field_type: &FieldType) -> Option<Cardinality> {
match *field_type {
FieldType::F64(ref integer_options) => integer_options.get_fastfield_cardinality(),
_ => None,
}
}
fn as_u64(&self) -> u64 {
self.to_bits()
}
fn to_type() -> Type {
Type::F64
}
}
impl FastValue for crate::DateTime {
fn from_u64(timestamp_u64: u64) -> Self {
let timestamp_i64 = i64::from_u64(timestamp_u64);
crate::DateTime::from_utc(NaiveDateTime::from_timestamp(timestamp_i64, 0), Utc)
}
fn to_u64(&self) -> u64 {
self.timestamp().to_u64()
}
fn fast_field_cardinality(field_type: &FieldType) -> Option<Cardinality> {
match *field_type {
FieldType::Date(ref integer_options) => integer_options.get_fastfield_cardinality(),
_ => None,
}
}
fn as_u64(&self) -> u64 {
self.timestamp().as_u64()
}
fn to_type() -> Type {
Type::Date
}
}
fn value_to_u64(value: &Value) -> u64 {
match *value {
Value::U64(ref val) => *val,
Value::I64(ref val) => common::i64_to_u64(*val),
Value::F64(ref val) => common::f64_to_u64(*val),
Value::Date(ref datetime) => common::i64_to_u64(datetime.timestamp()),
_ => panic!("Expected a u64/i64/f64 field, got {:?} ", value),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::common::CompositeFile;
use crate::directory::{Directory, RAMDirectory, WritePtr};
use crate::fastfield::FastFieldReader;
use crate::merge_policy::NoMergePolicy;
use crate::schema::Field;
use crate::schema::Schema;
use crate::schema::FAST;
use crate::schema::{Document, IntOptions};
use crate::{Index, SegmentId, SegmentReader};
use common::HasLen;
use once_cell::sync::Lazy;
use rand::prelude::SliceRandom;
use rand::rngs::StdRng;
use rand::SeedableRng;
use std::collections::HashMap;
use std::path::Path;
pub static SCHEMA: Lazy<Schema> = Lazy::new(|| {
let mut schema_builder = Schema::builder();
schema_builder.add_u64_field("field", FAST);
schema_builder.build()
});
pub static FIELD: Lazy<Field> = Lazy::new(|| SCHEMA.get_field("field").unwrap());
#[test]
pub fn test_fastfield() |
#[test]
pub fn test_fastfield_i64_u64() {
let datetime = crate::DateTime::from_utc(NaiveDateTime::from_timestamp(0i64, 0), Utc);
assert_eq!(i64::from_u64(datetime.to_u64()), 0i64);
}
#[test]
fn test_intfastfield_small() -> crate::Result<()> {
let path = Path::new("test");
let directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
fast_field_writers.add_document(&doc!(*FIELD=>13u64));
fast_field_writers.add_document(&doc!(*FIELD=>14u64));
fast_field_writers.add_document(&doc!(*FIELD=>2u64));
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
assert_eq!(file.len(), 36 as usize);
let composite_file = CompositeFile::open(&file)?;
let file = composite_file.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(file)?;
assert_eq!(fast_field_reader.get(0), 13u64);
assert_eq!(fast_field_reader.get(1), 14u64);
assert_eq!(fast_field_reader.get(2), 2u64);
Ok(())
}
#[test]
fn test_intfastfield_large() -> crate::Result<()> {
let path = Path::new("test");
let directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test"))?;
let mut serializer = FastFieldSerializer::from_write(write)?;
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
fast_field_writers.add_document(&doc!(*FIELD=>4u64));
fast_field_writers.add_document(&doc!(*FIELD=>14_082_001u64));
fast_field_writers.add_document(&doc!(*FIELD=>3_052u64));
fast_field_writers.add_document(&doc!(*FIELD=>9_002u64));
fast_field_writers.add_document(&doc!(*FIELD=>15_001u64));
fast_field_writers.add_document(&doc!(*FIELD=>777u64));
fast_field_writers.add_document(&doc!(*FIELD=>1_002u64));
fast_field_writers.add_document(&doc!(*FIELD=>1_501u64));
fast_field_writers.add_document(&doc!(*FIELD=>215u64));
fast_field_writers.serialize(&mut serializer, &HashMap::new())?;
serializer.close()?;
}
let file = directory.open_read(&path)?;
assert_eq!(file.len(), 61 as usize);
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(data)?;
assert_eq!(fast_field_reader.get(0), 4u64);
assert_eq!(fast_field_reader.get(1), 14_082_001u64);
assert_eq!(fast_field_reader.get(2), 3_052u64);
assert_eq!(fast_field_reader.get(3), 9002u64);
assert_eq!(fast_field_reader.get(4), 15_001u64);
assert_eq!(fast_field_reader.get(5), 777u64);
assert_eq!(fast_field_reader.get(6), 1_002u64);
assert_eq!(fast_field_reader.get(7), 1_501u64);
assert_eq!(fast_field_reader.get(8), 215u64);
}
Ok(())
}
#[test]
fn test_intfastfield_null_amplitude() -> crate::Result<()> {
let path = Path::new("test");
let directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
for _ in 0..10_000 {
fast_field_writers.add_document(&doc!(*FIELD=>100_000u64));
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
assert_eq!(file.len(), 34 as usize);
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(data)?;
for doc in 0..10_000 {
assert_eq!(fast_field_reader.get(doc), 100_000u64);
}
}
Ok(())
}
#[test]
fn test_intfastfield_large_numbers() -> crate::Result<()> {
let path = Path::new("test");
let directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
// forcing the amplitude to be high
fast_field_writers.add_document(&doc!(*FIELD=>0u64));
for i in 0u64..10_000u64 {
fast_field_writers.add_document(&doc!(*FIELD=>5_000_000_000_000_000_000u64 + i));
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
assert_eq!(file.len(), 80042 as usize);
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(data)?;
assert_eq!(fast_field_reader.get(0), 0u64);
for doc in 1..10_001 {
assert_eq!(
fast_field_reader.get(doc),
5_000_000_000_000_000_000u64 + doc as u64 - 1u64
);
}
}
Ok(())
}
#[test]
fn test_signed_intfastfield() -> crate::Result<()> {
let path = Path::new("test");
let directory: RAMDirectory = RAMDirectory::create();
let mut schema_builder = Schema::builder();
let i64_field = schema_builder.add_i64_field("field", FAST);
let schema = schema_builder.build();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&schema);
for i in -100i64..10_000i64 {
let mut doc = Document::default();
doc.add_i64(i64_field, i);
fast_field_writers.add_document(&doc);
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
assert_eq!(file.len(), 17709 as usize);
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(i64_field).unwrap();
let fast_field_reader = FastFieldReader::<i64>::open(data)?;
assert_eq!(fast_field_reader.min_value(), -100i64);
assert_eq!(fast_field_reader.max_value(), 9_999i64);
for (doc, i) in (-100i64..10_000i64).enumerate() {
assert_eq!(fast_field_reader.get(doc as u32), i);
}
let mut buffer = vec![0i64; 100];
fast_field_reader.get_range(53, &mut buffer[..]);
for i in 0..100 {
assert_eq!(buffer[i], -100i64 + 53i64 + i as i64);
}
}
Ok(())
}
#[test]
fn test_signed_intfastfield_default_val() -> crate::Result<()> {
let path = Path::new("test");
let directory: RAMDirectory = RAMDirectory::create();
let mut schema_builder = Schema::builder();
let i64_field = schema_builder.add_i64_field("field", FAST);
let schema = schema_builder.build();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&schema);
let doc = Document::default();
fast_field_writers.add_document(&doc);
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(i64_field).unwrap();
let fast_field_reader = FastFieldReader::<i64>::open(data)?;
assert_eq!(fast_field_reader.get(0u32), 0i64);
}
Ok(())
}
// Warning: this generates the same permutation at each call
pub fn generate_permutation() -> Vec<u64> {
let mut permutation: Vec<u64> = (0u64..100_000u64).collect();
permutation.shuffle(&mut StdRng::from_seed([1u8; 32]));
permutation
}
#[test]
fn test_intfastfield_permutation() -> crate::Result<()> {
let path = Path::new("test");
let permutation = generate_permutation();
let n = permutation.len();
let directory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test"))?;
let mut serializer = FastFieldSerializer::from_write(write)?;
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
for &x in &permutation {
fast_field_writers.add_document(&doc!(*FIELD=>x));
}
fast_field_writers.serialize(&mut serializer, &HashMap::new())?;
serializer.close()?;
}
let file = directory.open_read(&path)?;
{
let fast_fields_composite = CompositeFile::open(&file)?;
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(data)?;
let mut a = 0u64;
for _ in 0..n {
assert_eq!(fast_field_reader.get(a as u32), permutation[a as usize]);
a = fast_field_reader.get(a as u32);
}
}
Ok(())
}
#[test]
fn test_merge_missing_date_fast_field() {
let mut schema_builder = Schema::builder();
let date_field = schema_builder.add_date_field("date", FAST);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests().unwrap();
index_writer.set_merge_policy(Box::new(NoMergePolicy));
index_writer.add_document(doc!(date_field =>crate::chrono::prelude::Utc::now()));
index_writer.commit().unwrap();
index_writer.add_document(doc!());
index_writer.commit().unwrap();
let reader = index.reader().unwrap();
let segment_ids: Vec<SegmentId> = reader
.searcher()
.segment_readers()
.iter()
.map(SegmentReader::segment_id)
.collect();
assert_eq!(segment_ids.len(), 2);
let merge_future = index_writer.merge(&segment_ids[..]);
let merge_res = futures::executor::block_on(merge_future);
assert!(merge_res.is_ok());
assert!(reader.reload().is_ok());
assert_eq!(reader.searcher().segment_readers().len(), 1);
}
#[test]
fn test_default_datetime() {
assert_eq!(crate::DateTime::make_zero().timestamp(), 0i64);
}
#[test]
fn test_datefastfield() {
use crate::fastfield::FastValue;
let mut schema_builder = Schema::builder();
let date_field = schema_builder.add_date_field("date", FAST);
let multi_date_field = schema_builder.add_date_field(
"multi_date",
IntOptions::default().set_fast(Cardinality::MultiValues),
);
let schema = schema_builder.build();
let index = Index::create_in_ram(schema);
let mut index_writer = index.writer_for_tests().unwrap();
index_writer.set_merge_policy(Box::new(NoMergePolicy));
index_writer.add_document(doc!(
date_field => crate::DateTime::from_u64(1i64.to_u64()),
multi_date_field => crate::DateTime::from_u64(2i64.to_u64()),
multi_date_field => crate::DateTime::from_u64(3i64.to_u64())
));
index_writer.add_document(doc!(
date_field => crate::DateTime::from_u64(4i64.to_u64())
));
index_writer.add_document(doc!(
multi_date_field => crate::DateTime::from_u64(5i64.to_u64()),
multi_date_field => crate::DateTime::from_u64(6i64.to_u64())
));
index_writer.commit().unwrap();
let reader = index.reader().unwrap();
let searcher = reader.searcher();
assert_eq!(searcher.segment_readers().len(), 1);
let segment_reader = searcher.segment_reader(0);
let fast_fields = segment_reader.fast_fields();
let date_fast_field = fast_fields.date(date_field).unwrap();
let dates_fast_field = fast_fields.dates(multi_date_field).unwrap();
let mut dates = vec![];
{
assert_eq!(date_fast_field.get(0u32).timestamp(), 1i64);
dates_fast_field.get_vals(0u32, &mut dates);
assert_eq!(dates.len(), 2);
assert_eq!(dates[0].timestamp(), 2i64);
assert_eq!(dates[1].timestamp(), 3i64);
}
{
assert_eq!(date_fast_field.get(1u32).timestamp(), 4i64);
dates_fast_field.get_vals(1u32, &mut dates);
assert!(dates.is_empty());
}
{
assert_eq!(date_fast_field.get(2u32).timestamp(), 0i64);
dates_fast_field.get_vals(2u32, &mut dates);
assert_eq!(dates.len(), 2);
assert_eq!(dates[0].timestamp(), 5i64);
assert_eq!(dates[1].timestamp(), 6i64);
}
}
}
#[cfg(all(test, feature = "unstable"))]
mod bench {
use super::tests::FIELD;
use super::tests::{generate_permutation, SCHEMA};
use super::*;
use crate::common::CompositeFile;
use crate::directory::{Directory, RAMDirectory, WritePtr};
use crate::fastfield::FastFieldReader;
use std::collections::HashMap;
use std::path::Path;
use test::{self, Bencher};
#[bench]
fn bench_intfastfield_linear_veclookup(b: &mut Bencher) {
let permutation = generate_permutation();
b.iter(|| {
let n = test::black_box(7000u32);
let mut a = 0u64;
for i in (0u32..n / 7).map(|v| v * 7) {
a ^= permutation[i as usize];
}
a
});
}
#[bench]
fn bench_intfastfield_veclookup(b: &mut Bencher) {
let permutation = generate_permutation();
b.iter(|| {
let n = test::black_box(1000u32);
let mut a = 0u64;
for _ in 0u32..n {
a = permutation[a as usize];
}
a
});
}
#[bench]
fn bench_intfastfield_linear_fflookup(b: &mut Bencher) {
let path = Path::new("test");
let permutation = generate_permutation();
let directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
for &x in &permutation {
fast_field_writers.add_document(&doc!(*FIELD=>x));
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(data).unwrap();
b.iter(|| {
let n = test::black_box(7000u32);
let mut a = 0u64;
for i in (0u32..n / 7).map(|val| val * 7) {
a ^= fast_field_reader.get(i);
}
a
});
}
}
#[bench]
fn bench_intfastfield_fflookup(b: &mut Bencher) {
let path = Path::new("test");
let permutation = generate_permutation();
let directory: RAMDirectory = RAMDirectory::create();
{
let write: WritePtr = directory.open_write(Path::new("test")).unwrap();
let mut serializer = FastFieldSerializer::from_write(write).unwrap();
let mut fast_field_writers = FastFieldsWriter::from_schema(&SCHEMA);
for &x in &permutation {
fast_field_writers.add_document(&doc!(*FIELD=>x));
}
fast_field_writers
.serialize(&mut serializer, &HashMap::new())
.unwrap();
serializer.close().unwrap();
}
let file = directory.open_read(&path).unwrap();
{
let fast_fields_composite = CompositeFile::open(&file).unwrap();
let data = fast_fields_composite.open_read(*FIELD).unwrap();
let fast_field_reader = FastFieldReader::<u64>::open(data).unwrap();
b.iter(|| {
let n = test::black_box(1000u32);
let mut a = 0u32;
for _ in 0u32..n {
a = fast_field_reader.get(a) as u32;
}
a
});
}
}
}
| {
let test_fastfield = FastFieldReader::<u64>::from(vec![100, 200, 300]);
assert_eq!(test_fastfield.get(0), 100);
assert_eq!(test_fastfield.get(1), 200);
assert_eq!(test_fastfield.get(2), 300);
} |
cloudshell-gen.go | // Copyright 2019 Google LLC.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated file. DO NOT EDIT.
// Package cloudshell provides access to the Cloud Shell API.
//
// For product documentation, see: https://cloud.google.com/shell/docs/
//
// Creating a client
//
// Usage example:
//
// import "google.golang.org/api/cloudshell/v1alpha1"
// ...
// ctx := context.Background()
// cloudshellService, err := cloudshell.NewService(ctx)
//
// In this example, Google Application Default Credentials are used for authentication.
//
// For information on how to create and obtain Application Default Credentials, see https://developers.google.com/identity/protocols/application-default-credentials.
//
// Other authentication options
//
// To use an API key for authentication (note: some APIs do not support API keys), use option.WithAPIKey:
//
// cloudshellService, err := cloudshell.NewService(ctx, option.WithAPIKey("AIza..."))
//
// To use an OAuth token (e.g., a user token obtained via a three-legged OAuth flow), use option.WithTokenSource:
//
// config := &oauth2.Config{...}
// // ...
// token, err := config.Exchange(ctx, ...)
// cloudshellService, err := cloudshell.NewService(ctx, option.WithTokenSource(config.TokenSource(ctx, token)))
//
// See https://godoc.org/google.golang.org/api/option/ for details on options.
package cloudshell // import "google.golang.org/api/cloudshell/v1alpha1"
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
googleapi "google.golang.org/api/googleapi"
gensupport "google.golang.org/api/internal/gensupport"
option "google.golang.org/api/option"
htransport "google.golang.org/api/transport/http"
)
// Always reference these packages, just in case the auto-generated code
// below doesn't.
var _ = bytes.NewBuffer
var _ = strconv.Itoa
var _ = fmt.Sprintf
var _ = json.NewDecoder
var _ = io.Copy
var _ = url.Parse
var _ = gensupport.MarshalJSON
var _ = googleapi.Version
var _ = errors.New
var _ = strings.Replace
var _ = context.Canceled
const apiId = "cloudshell:v1alpha1"
const apiName = "cloudshell"
const apiVersion = "v1alpha1"
const basePath = "https://cloudshell.googleapis.com/"
// OAuth2 scopes used by this API.
const (
// View and manage your data across Google Cloud Platform services
CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform"
)
// NewService creates a new Service.
func NewService(ctx context.Context, opts ...option.ClientOption) (*Service, error) {
scopesOption := option.WithScopes(
"https://www.googleapis.com/auth/cloud-platform",
)
// NOTE: prepend, so we don't override user-specified scopes.
opts = append([]option.ClientOption{scopesOption}, opts...)
client, endpoint, err := htransport.NewClient(ctx, opts...)
if err != nil {
return nil, err
}
s, err := New(client)
if err != nil {
return nil, err
}
if endpoint != "" {
s.BasePath = endpoint
}
return s, nil
}
// New creates a new Service. It uses the provided http.Client for requests.
//
// Deprecated: please use NewService instead.
// To provide a custom HTTP client, use option.WithHTTPClient.
// If you are using google.golang.org/api/googleapis/transport.APIKey, use option.WithAPIKey with NewService instead.
func New(client *http.Client) (*Service, error) {
if client == nil {
return nil, errors.New("client is nil")
}
s := &Service{client: client, BasePath: basePath}
s.Users = NewUsersService(s)
return s, nil
}
type Service struct {
client *http.Client
BasePath string // API endpoint base URL
UserAgent string // optional additional User-Agent fragment
Users *UsersService
}
func (s *Service) userAgent() string {
if s.UserAgent == "" {
return googleapi.UserAgent
}
return googleapi.UserAgent + " " + s.UserAgent
}
func NewUsersService(s *Service) *UsersService {
rs := &UsersService{s: s}
rs.Environments = NewUsersEnvironmentsService(s)
return rs
}
type UsersService struct {
s *Service
Environments *UsersEnvironmentsService
}
func NewUsersEnvironmentsService(s *Service) *UsersEnvironmentsService {
rs := &UsersEnvironmentsService{s: s}
rs.PublicKeys = NewUsersEnvironmentsPublicKeysService(s)
return rs
}
type UsersEnvironmentsService struct {
s *Service
PublicKeys *UsersEnvironmentsPublicKeysService
}
func NewUsersEnvironmentsPublicKeysService(s *Service) *UsersEnvironmentsPublicKeysService {
rs := &UsersEnvironmentsPublicKeysService{s: s}
return rs
}
type UsersEnvironmentsPublicKeysService struct {
s *Service
}
// AuthorizeEnvironmentRequest: Request message for
// AuthorizeEnvironment.
type AuthorizeEnvironmentRequest struct {
// AccessToken: The OAuth access token that should be sent to the
// environment.
AccessToken string `json:"accessToken,omitempty"`
// ExpireTime: The time when the credentials expire. If not set,
// defaults to one hour from
// when the server received the request.
ExpireTime string `json:"expireTime,omitempty"`
// IdToken: The OAuth ID token that should be sent to the environment.
IdToken string `json:"idToken,omitempty"`
// ForceSendFields is a list of field names (e.g. "AccessToken") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AccessToken") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *AuthorizeEnvironmentRequest) MarshalJSON() ([]byte, error) {
type NoMethod AuthorizeEnvironmentRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// CreatePublicKeyRequest: Request message for CreatePublicKey.
type CreatePublicKeyRequest struct {
// Key: Key that should be added to the environment.
Key *PublicKey `json:"key,omitempty"`
// ForceSendFields is a list of field names (e.g. "Key") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Key") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *CreatePublicKeyRequest) MarshalJSON() ([]byte, error) {
type NoMethod CreatePublicKeyRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Empty: A generic empty message that you can re-use to avoid defining
// duplicated
// empty messages in your APIs. A typical example is to use it as the
// request
// or the response type of an API method. For instance:
//
// service Foo {
// rpc Bar(google.protobuf.Empty) returns
// (google.protobuf.Empty);
// }
//
// The JSON representation for `Empty` is empty JSON object `{}`.
type Empty struct {
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
}
// Environment: A Cloud Shell environment, which is defined as the
// combination of a Docker
// image specifying what is installed on the environment and a home
// directory
// containing the user's data that will remain across sessions. Each
// user has a
// single environment with the ID "default".
type Environment struct {
// DockerImage: Required. Full path to the Docker image used to run this
// environment, e.g.
// "gcr.io/dev-con/cloud-devshell:latest".
DockerImage string `json:"dockerImage,omitempty"`
// Id: Output only. The environment's identifier, unique among the
// user's
// environments.
Id string `json:"id,omitempty"`
// Name: Output only. Full name of this resource, in the
// format
// `users/{owner_email}/environments/{environment_id}`. `{owner_email}`
// is the
// email address of the user to whom this environment belongs,
// and
// `{environment_id}` is the identifier of this environment. For
// example,
// `users/[email protected]/environments/default`.
Name string `json:"name,omitempty"`
// PublicKeys: Output only. Public keys associated with the environment.
// Clients can
// connect to this environment via SSH only if they possess a private
// key
// corresponding to at least one of these public keys. Keys can be added
// to or
// removed from the environment using the CreatePublicKey and
// DeletePublicKey
// methods.
PublicKeys []*PublicKey `json:"publicKeys,omitempty"`
// Size: Indicates the size of the backing VM running the environment.
// If set to
// something other than DEFAULT, it will be reverted to the default VM
// size
// after vm_size_expire_time.
//
// Possible values:
// "VM_SIZE_UNSPECIFIED" - The VM size is unknown.
// "DEFAULT" - The default VM size.
// "BOOSTED" - The boosted VM size.
Size string `json:"size,omitempty"`
// SshHost: Output only. Host to which clients can connect to initiate
// SSH sessions
// with the environment.
SshHost string `json:"sshHost,omitempty"`
// SshPort: Output only. Port to which clients can connect to initiate
// SSH sessions
// with the environment.
SshPort int64 `json:"sshPort,omitempty"`
// SshUsername: Output only. Username that clients should use when
// initiating SSH sessions
// with the environment.
SshUsername string `json:"sshUsername,omitempty"`
// State: Output only. Current execution state of this environment.
//
// Possible values:
// "STATE_UNSPECIFIED" - The environment's states is unknown.
// "DISABLED" - The environment is not running and can't be connected
// to. Starting the
// environment will transition it to the STARTING state.
// "STARTING" - The environment is being started but is not yet ready
// to accept
// connections.
// "RUNNING" - The environment is running and ready to accept
// connections. It will
// automatically transition back to DISABLED after a period of
// inactivity or
// if another environment is started.
State string `json:"state,omitempty"`
// VmSizeExpireTime: Output only. The time when the Environment will
// expire back to the default
// VM size.
VmSizeExpireTime string `json:"vmSizeExpireTime,omitempty"`
// WebHost: Output only. Host to which clients can connect to initiate
// HTTPS or WSS
// connections with the environment.
WebHost string `json:"webHost,omitempty"`
// WebPorts: Output only. Ports to which clients can connect to initiate
// HTTPS or WSS
// connections with the environment.
WebPorts []int64 `json:"webPorts,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "DockerImage") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "DockerImage") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Environment) MarshalJSON() ([]byte, error) {
type NoMethod Environment
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Operation: This resource represents a long-running operation that is
// the result of a
// network API call.
type Operation struct {
// Done: If the value is `false`, it means the operation is still in
// progress.
// If `true`, the operation is completed, and either `error` or
// `response` is
// available.
Done bool `json:"done,omitempty"`
// Error: The error result of the operation in case of failure or
// cancellation.
Error *Status `json:"error,omitempty"`
// Metadata: Service-specific metadata associated with the operation.
// It typically
// contains progress information and common metadata such as create
// time.
// Some services might not provide such metadata. Any method that
// returns a
// long-running operation should document the metadata type, if any.
Metadata googleapi.RawMessage `json:"metadata,omitempty"`
// Name: The server-assigned name, which is only unique within the same
// service that
// originally returns it. If you use the default HTTP mapping,
// the
// `name` should be a resource name ending with
// `operations/{unique_id}`.
Name string `json:"name,omitempty"`
// Response: The normal response of the operation in case of success.
// If the original
// method returns no data on success, such as `Delete`, the response
// is
// `google.protobuf.Empty`. If the original method is
// standard
// `Get`/`Create`/`Update`, the response should be the resource. For
// other
// methods, the response should have the type `XxxResponse`, where
// `Xxx`
// is the original method name. For example, if the original method
// name
// is `TakeSnapshot()`, the inferred response type
// is
// `TakeSnapshotResponse`.
Response googleapi.RawMessage `json:"response,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Done") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Done") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Operation) MarshalJSON() ([]byte, error) {
type NoMethod Operation
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// PublicKey: A public SSH key, corresponding to a private SSH key held
// by the client.
type PublicKey struct {
// Format: Required. Format of this key's content.
//
// Possible values:
// "FORMAT_UNSPECIFIED" - Unknown format. Do not use.
// "SSH_DSS" - `ssh-dss` key format (see RFC4253).
// "SSH_RSA" - `ssh-rsa` key format (see RFC4253).
// "ECDSA_SHA2_NISTP256" - `ecdsa-sha2-nistp256` key format (see
// RFC5656).
// "ECDSA_SHA2_NISTP384" - `ecdsa-sha2-nistp384` key format (see
// RFC5656).
// "ECDSA_SHA2_NISTP521" - `ecdsa-sha2-nistp521` key format (see
// RFC5656).
Format string `json:"format,omitempty"`
// Key: Required. Content of this key.
Key string `json:"key,omitempty"`
// Name: Output only. Full name of this resource, in the
// format
// `users/{owner_email}/environments/{environment_id}/publicKeys/{
// key_id}`.
// `{owner_email}` is the email address of the user to whom the key
// belongs.
// `{environment_id}` is the identifier of the environment to which the
// key
// grants access. `{key_id}` is the unique identifier of the key. For
// example,
// `users/[email protected]/environments/default/publicKeys/my
// Key`.
Name string `json:"name,omitempty"`
// ServerResponse contains the HTTP response code and headers from the
// server.
googleapi.ServerResponse `json:"-"`
// ForceSendFields is a list of field names (e.g. "Format") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Format") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *PublicKey) MarshalJSON() ([]byte, error) {
type NoMethod PublicKey
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// StartEnvironmentMetadata: Message included in the metadata field of
// operations returned from
// StartEnvironment.
type StartEnvironmentMetadata struct {
// State: Current state of the environment being started.
//
// Possible values:
// "STATE_UNSPECIFIED" - The environment's start state is unknown.
// "STARTING" - The environment is in the process of being started,
// but no additional
// details are available.
// "UNARCHIVING_DISK" - Startup is waiting for the user's disk to be
// unarchived. This can happen
// when the user returns to Cloud Shell after not having used it for | // establish an SSH
// connection to their environment.
State string `json:"state,omitempty"`
// ForceSendFields is a list of field names (e.g. "State") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "State") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *StartEnvironmentMetadata) MarshalJSON() ([]byte, error) {
type NoMethod StartEnvironmentMetadata
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// StartEnvironmentRequest: Request message for StartEnvironment.
type StartEnvironmentRequest struct {
// AccessToken: The initial access token passed to the environment. If
// this is present and
// valid, the environment will be pre-authenticated with gcloud so that
// the
// user can run gcloud commands in Cloud Shell without having to log in.
// This
// code can be updated later by calling AuthorizeEnvironment.
AccessToken string `json:"accessToken,omitempty"`
// ForceSendFields is a list of field names (e.g. "AccessToken") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "AccessToken") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *StartEnvironmentRequest) MarshalJSON() ([]byte, error) {
type NoMethod StartEnvironmentRequest
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// StartEnvironmentResponse: Message included in the response field of
// operations returned from
// StartEnvironment once the
// operation is complete.
type StartEnvironmentResponse struct {
// Environment: Environment that was started.
Environment *Environment `json:"environment,omitempty"`
// ForceSendFields is a list of field names (e.g. "Environment") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Environment") to include
// in API requests with the JSON null value. By default, fields with
// empty values are omitted from API requests. However, any field with
// an empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *StartEnvironmentResponse) MarshalJSON() ([]byte, error) {
type NoMethod StartEnvironmentResponse
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// Status: The `Status` type defines a logical error model that is
// suitable for
// different programming environments, including REST APIs and RPC APIs.
// It is
// used by [gRPC](https://github.com/grpc). Each `Status` message
// contains
// three pieces of data: error code, error message, and error
// details.
//
// You can find out more about this error model and how to work with it
// in the
// [API Design Guide](https://cloud.google.com/apis/design/errors).
type Status struct {
// Code: The status code, which should be an enum value of
// google.rpc.Code.
Code int64 `json:"code,omitempty"`
// Details: A list of messages that carry the error details. There is a
// common set of
// message types for APIs to use.
Details []googleapi.RawMessage `json:"details,omitempty"`
// Message: A developer-facing error message, which should be in
// English. Any
// user-facing error message should be localized and sent in
// the
// google.rpc.Status.details field, or localized by the client.
Message string `json:"message,omitempty"`
// ForceSendFields is a list of field names (e.g. "Code") to
// unconditionally include in API requests. By default, fields with
// empty values are omitted from API requests. However, any non-pointer,
// non-interface field appearing in ForceSendFields will be sent to the
// server regardless of whether the field is empty or not. This may be
// used to include empty fields in Patch requests.
ForceSendFields []string `json:"-"`
// NullFields is a list of field names (e.g. "Code") to include in API
// requests with the JSON null value. By default, fields with empty
// values are omitted from API requests. However, any field with an
// empty value appearing in NullFields will be sent to the server as
// null. It is an error if a field in this list has a non-empty value.
// This may be used to include null fields in Patch requests.
NullFields []string `json:"-"`
}
func (s *Status) MarshalJSON() ([]byte, error) {
type NoMethod Status
raw := NoMethod(*s)
return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields)
}
// method id "cloudshell.users.environments.authorize":
type UsersEnvironmentsAuthorizeCall struct {
s *Service
name string
authorizeenvironmentrequest *AuthorizeEnvironmentRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Authorize: Sends OAuth credentials to a running environment on behalf
// of a user. When
// this completes, the environment will be authorized to run various
// Google
// Cloud command line tools without requiring the user to
// manually
// authenticate.
func (r *UsersEnvironmentsService) Authorize(name string, authorizeenvironmentrequest *AuthorizeEnvironmentRequest) *UsersEnvironmentsAuthorizeCall {
c := &UsersEnvironmentsAuthorizeCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.authorizeenvironmentrequest = authorizeenvironmentrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *UsersEnvironmentsAuthorizeCall) Fields(s ...googleapi.Field) *UsersEnvironmentsAuthorizeCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *UsersEnvironmentsAuthorizeCall) Context(ctx context.Context) *UsersEnvironmentsAuthorizeCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *UsersEnvironmentsAuthorizeCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *UsersEnvironmentsAuthorizeCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/1.11.0 gdcl/20191113")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.authorizeenvironmentrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}:authorize")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudshell.users.environments.authorize" call.
// Exactly one of *Empty or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Empty.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *UsersEnvironmentsAuthorizeCall) Do(opts ...googleapi.CallOption) (*Empty, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Empty{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Sends OAuth credentials to a running environment on behalf of a user. When\nthis completes, the environment will be authorized to run various Google\nCloud command line tools without requiring the user to manually\nauthenticate.",
// "flatPath": "v1alpha1/users/{usersId}/environments/{environmentsId}:authorize",
// "httpMethod": "POST",
// "id": "cloudshell.users.environments.authorize",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "Name of the resource that should receive the credentials, for example\n`users/me/environments/default` or\n`users/[email protected]/environments/default`.",
// "location": "path",
// "pattern": "^users/[^/]+/environments/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}:authorize",
// "request": {
// "$ref": "AuthorizeEnvironmentRequest"
// },
// "response": {
// "$ref": "Empty"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "cloudshell.users.environments.get":
type UsersEnvironmentsGetCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ifNoneMatch_ string
ctx_ context.Context
header_ http.Header
}
// Get: Gets an environment. Returns NOT_FOUND if the environment does
// not exist.
func (r *UsersEnvironmentsService) Get(name string) *UsersEnvironmentsGetCall {
c := &UsersEnvironmentsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *UsersEnvironmentsGetCall) Fields(s ...googleapi.Field) *UsersEnvironmentsGetCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// IfNoneMatch sets the optional parameter which makes the operation
// fail if the object's ETag matches the given value. This is useful for
// getting updates only after the object has changed since the last
// request. Use googleapi.IsNotModified to check whether the response
// error from Do is the result of In-None-Match.
func (c *UsersEnvironmentsGetCall) IfNoneMatch(entityTag string) *UsersEnvironmentsGetCall {
c.ifNoneMatch_ = entityTag
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *UsersEnvironmentsGetCall) Context(ctx context.Context) *UsersEnvironmentsGetCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *UsersEnvironmentsGetCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *UsersEnvironmentsGetCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/1.11.0 gdcl/20191113")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
if c.ifNoneMatch_ != "" {
reqHeaders.Set("If-None-Match", c.ifNoneMatch_)
}
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudshell.users.environments.get" call.
// Exactly one of *Environment or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Environment.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *UsersEnvironmentsGetCall) Do(opts ...googleapi.CallOption) (*Environment, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Environment{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Gets an environment. Returns NOT_FOUND if the environment does not exist.",
// "flatPath": "v1alpha1/users/{usersId}/environments/{environmentsId}",
// "httpMethod": "GET",
// "id": "cloudshell.users.environments.get",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "Name of the requested resource, for example `users/me/environments/default`\nor `users/[email protected]/environments/default`.",
// "location": "path",
// "pattern": "^users/[^/]+/environments/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Environment"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "cloudshell.users.environments.patch":
type UsersEnvironmentsPatchCall struct {
s *Service
name string
environment *Environment
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Patch: Updates an existing environment.
func (r *UsersEnvironmentsService) Patch(name string, environment *Environment) *UsersEnvironmentsPatchCall {
c := &UsersEnvironmentsPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.environment = environment
return c
}
// UpdateMask sets the optional parameter "updateMask": Mask specifying
// which fields in the environment should be updated.
func (c *UsersEnvironmentsPatchCall) UpdateMask(updateMask string) *UsersEnvironmentsPatchCall {
c.urlParams_.Set("updateMask", updateMask)
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *UsersEnvironmentsPatchCall) Fields(s ...googleapi.Field) *UsersEnvironmentsPatchCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *UsersEnvironmentsPatchCall) Context(ctx context.Context) *UsersEnvironmentsPatchCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *UsersEnvironmentsPatchCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *UsersEnvironmentsPatchCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/1.11.0 gdcl/20191113")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.environment)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("PATCH", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudshell.users.environments.patch" call.
// Exactly one of *Environment or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Environment.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *UsersEnvironmentsPatchCall) Do(opts ...googleapi.CallOption) (*Environment, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Environment{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Updates an existing environment.",
// "flatPath": "v1alpha1/users/{usersId}/environments/{environmentsId}",
// "httpMethod": "PATCH",
// "id": "cloudshell.users.environments.patch",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "Name of the resource to be updated, for example\n`users/me/environments/default` or\n`users/[email protected]/environments/default`.",
// "location": "path",
// "pattern": "^users/[^/]+/environments/[^/]+$",
// "required": true,
// "type": "string"
// },
// "updateMask": {
// "description": "Mask specifying which fields in the environment should be updated.",
// "format": "google-fieldmask",
// "location": "query",
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "request": {
// "$ref": "Environment"
// },
// "response": {
// "$ref": "Environment"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "cloudshell.users.environments.start":
type UsersEnvironmentsStartCall struct {
s *Service
name string
startenvironmentrequest *StartEnvironmentRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Start: Starts an existing environment, allowing clients to connect to
// it. The
// returned operation will contain an instance of
// StartEnvironmentMetadata in
// its metadata field. Users can wait for the environment to start by
// polling
// this operation via GetOperation. Once the environment has finished
// starting
// and is ready to accept connections, the operation will contain
// a
// StartEnvironmentResponse in its response field.
func (r *UsersEnvironmentsService) Start(name string, startenvironmentrequest *StartEnvironmentRequest) *UsersEnvironmentsStartCall {
c := &UsersEnvironmentsStartCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
c.startenvironmentrequest = startenvironmentrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *UsersEnvironmentsStartCall) Fields(s ...googleapi.Field) *UsersEnvironmentsStartCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *UsersEnvironmentsStartCall) Context(ctx context.Context) *UsersEnvironmentsStartCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *UsersEnvironmentsStartCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *UsersEnvironmentsStartCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/1.11.0 gdcl/20191113")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.startenvironmentrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}:start")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudshell.users.environments.start" call.
// Exactly one of *Operation or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *Operation.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *UsersEnvironmentsStartCall) Do(opts ...googleapi.CallOption) (*Operation, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Operation{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Starts an existing environment, allowing clients to connect to it. The\nreturned operation will contain an instance of StartEnvironmentMetadata in\nits metadata field. Users can wait for the environment to start by polling\nthis operation via GetOperation. Once the environment has finished starting\nand is ready to accept connections, the operation will contain a\nStartEnvironmentResponse in its response field.",
// "flatPath": "v1alpha1/users/{usersId}/environments/{environmentsId}:start",
// "httpMethod": "POST",
// "id": "cloudshell.users.environments.start",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "Name of the resource that should be started, for example\n`users/me/environments/default` or\n`users/[email protected]/environments/default`.",
// "location": "path",
// "pattern": "^users/[^/]+/environments/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}:start",
// "request": {
// "$ref": "StartEnvironmentRequest"
// },
// "response": {
// "$ref": "Operation"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "cloudshell.users.environments.publicKeys.create":
type UsersEnvironmentsPublicKeysCreateCall struct {
s *Service
parent string
createpublickeyrequest *CreatePublicKeyRequest
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Create: Adds a public SSH key to an environment, allowing clients
// with the
// corresponding private key to connect to that environment via SSH. If
// a key
// with the same format and content already exists, this will return
// the
// existing key.
func (r *UsersEnvironmentsPublicKeysService) Create(parent string, createpublickeyrequest *CreatePublicKeyRequest) *UsersEnvironmentsPublicKeysCreateCall {
c := &UsersEnvironmentsPublicKeysCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.parent = parent
c.createpublickeyrequest = createpublickeyrequest
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *UsersEnvironmentsPublicKeysCreateCall) Fields(s ...googleapi.Field) *UsersEnvironmentsPublicKeysCreateCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *UsersEnvironmentsPublicKeysCreateCall) Context(ctx context.Context) *UsersEnvironmentsPublicKeysCreateCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *UsersEnvironmentsPublicKeysCreateCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *UsersEnvironmentsPublicKeysCreateCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/1.11.0 gdcl/20191113")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
body, err := googleapi.WithoutDataWrapper.JSONReader(c.createpublickeyrequest)
if err != nil {
return nil, err
}
reqHeaders.Set("Content-Type", "application/json")
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+parent}/publicKeys")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("POST", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"parent": c.parent,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudshell.users.environments.publicKeys.create" call.
// Exactly one of *PublicKey or error will be non-nil. Any non-2xx
// status code is an error. Response headers are in either
// *PublicKey.ServerResponse.Header or (if a response was returned at
// all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified
// to check whether the returned error was because
// http.StatusNotModified was returned.
func (c *UsersEnvironmentsPublicKeysCreateCall) Do(opts ...googleapi.CallOption) (*PublicKey, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &PublicKey{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Adds a public SSH key to an environment, allowing clients with the\ncorresponding private key to connect to that environment via SSH. If a key\nwith the same format and content already exists, this will return the\nexisting key.",
// "flatPath": "v1alpha1/users/{usersId}/environments/{environmentsId}/publicKeys",
// "httpMethod": "POST",
// "id": "cloudshell.users.environments.publicKeys.create",
// "parameterOrder": [
// "parent"
// ],
// "parameters": {
// "parent": {
// "description": "Parent resource name, e.g. `users/me/environments/default`.",
// "location": "path",
// "pattern": "^users/[^/]+/environments/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+parent}/publicKeys",
// "request": {
// "$ref": "CreatePublicKeyRequest"
// },
// "response": {
// "$ref": "PublicKey"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
}
// method id "cloudshell.users.environments.publicKeys.delete":
type UsersEnvironmentsPublicKeysDeleteCall struct {
s *Service
name string
urlParams_ gensupport.URLParams
ctx_ context.Context
header_ http.Header
}
// Delete: Removes a public SSH key from an environment. Clients will no
// longer be
// able to connect to the environment using the corresponding private
// key.
func (r *UsersEnvironmentsPublicKeysService) Delete(name string) *UsersEnvironmentsPublicKeysDeleteCall {
c := &UsersEnvironmentsPublicKeysDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)}
c.name = name
return c
}
// Fields allows partial responses to be retrieved. See
// https://developers.google.com/gdata/docs/2.0/basics#PartialResponse
// for more information.
func (c *UsersEnvironmentsPublicKeysDeleteCall) Fields(s ...googleapi.Field) *UsersEnvironmentsPublicKeysDeleteCall {
c.urlParams_.Set("fields", googleapi.CombineFields(s))
return c
}
// Context sets the context to be used in this call's Do method. Any
// pending HTTP request will be aborted if the provided context is
// canceled.
func (c *UsersEnvironmentsPublicKeysDeleteCall) Context(ctx context.Context) *UsersEnvironmentsPublicKeysDeleteCall {
c.ctx_ = ctx
return c
}
// Header returns an http.Header that can be modified by the caller to
// add HTTP headers to the request.
func (c *UsersEnvironmentsPublicKeysDeleteCall) Header() http.Header {
if c.header_ == nil {
c.header_ = make(http.Header)
}
return c.header_
}
func (c *UsersEnvironmentsPublicKeysDeleteCall) doRequest(alt string) (*http.Response, error) {
reqHeaders := make(http.Header)
reqHeaders.Set("x-goog-api-client", "gl-go/1.11.0 gdcl/20191113")
for k, v := range c.header_ {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
c.urlParams_.Set("alt", alt)
c.urlParams_.Set("prettyPrint", "false")
urls := googleapi.ResolveRelative(c.s.BasePath, "v1alpha1/{+name}")
urls += "?" + c.urlParams_.Encode()
req, err := http.NewRequest("DELETE", urls, body)
if err != nil {
return nil, err
}
req.Header = reqHeaders
googleapi.Expand(req.URL, map[string]string{
"name": c.name,
})
return gensupport.SendRequest(c.ctx_, c.s.client, req)
}
// Do executes the "cloudshell.users.environments.publicKeys.delete" call.
// Exactly one of *Empty or error will be non-nil. Any non-2xx status
// code is an error. Response headers are in either
// *Empty.ServerResponse.Header or (if a response was returned at all)
// in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to
// check whether the returned error was because http.StatusNotModified
// was returned.
func (c *UsersEnvironmentsPublicKeysDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) {
gensupport.SetOptions(c.urlParams_, opts...)
res, err := c.doRequest("json")
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &googleapi.Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, err
}
defer googleapi.CloseBody(res)
if err := googleapi.CheckResponse(res); err != nil {
return nil, err
}
ret := &Empty{
ServerResponse: googleapi.ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := &ret
if err := gensupport.DecodeResponse(target, res); err != nil {
return nil, err
}
return ret, nil
// {
// "description": "Removes a public SSH key from an environment. Clients will no longer be\nable to connect to the environment using the corresponding private key.",
// "flatPath": "v1alpha1/users/{usersId}/environments/{environmentsId}/publicKeys/{publicKeysId}",
// "httpMethod": "DELETE",
// "id": "cloudshell.users.environments.publicKeys.delete",
// "parameterOrder": [
// "name"
// ],
// "parameters": {
// "name": {
// "description": "Name of the resource to be deleted, e.g.\n`users/me/environments/default/publicKeys/my-key`.",
// "location": "path",
// "pattern": "^users/[^/]+/environments/[^/]+/publicKeys/[^/]+$",
// "required": true,
// "type": "string"
// }
// },
// "path": "v1alpha1/{+name}",
// "response": {
// "$ref": "Empty"
// },
// "scopes": [
// "https://www.googleapis.com/auth/cloud-platform"
// ]
// }
} | // a
// while, and suggests that startup will take longer than normal.
// "FINISHED" - Startup is complete and the user should be able to |
run_pretrain_static.py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Pretrain GPT in static graph mode.
"""
import argparse
import math
import os
import random
import time
import sys
os.path.expandvars('$HOME')
os.path.expanduser('~')
import numpy as np
import paddle
import paddle.distributed.fleet as fleet
from paddle.distributed.fleet.meta_optimizers.sharding.utils import save_persistables
from modeling import GPTModel, GPTForPretraining, GPTPretrainingCriterion
from paddlenlp.transformers import GPTTokenizer, GPTChineseTokenizer
from paddlenlp.ops import guard, Topology, get_rng_state_tracker
from paddlenlp.utils.log import logger
from paddlenlp.utils import profiler
import paddlenlp.ops as ops
from visualdl import LogWriter
# Used to load the data_tools path, should import before dataset
filepath = os.path.abspath(os.path.dirname(__file__))
sys.path.insert(0, os.path.join(filepath, "../"))
from dataset import create_pretrained_dataset
from args import parse_args
import lr
MODEL_CLASSES = {
"gpt": (GPTForPretraining, GPTTokenizer),
"gpt-cn": (GPTForPretraining, GPTChineseTokenizer),
}
def create_data_holder(args):
"""creat data holder"""
tokens = paddle.static.data(
name="tokens", shape=[-1, args.max_seq_len], dtype="int64")
loss_mask = paddle.static.data(
name="loss_mask", shape=[-1, args.max_seq_len], dtype="float32")
position_ids = paddle.static.data(
name="position_ids", shape=[-1, args.max_seq_len], dtype="int64")
labels = paddle.static.data(
name="labels", shape=[-1, args.max_seq_len], dtype="int64")
return [tokens, loss_mask, position_ids, labels]
def dist_optimizer(args, topo):
default_global_batch_size = topo.data_info.size * args.micro_batch_size
if args.global_batch_size is None:
args.global_batch_size = default_global_batch_size
bsz_per_dp = args.global_batch_size // topo.data_info.size
micro_batch_size = args.micro_batch_size
assert args.global_batch_size % micro_batch_size == 0, "cannot do gradient accumulate, global_batch_size: {} micro_batch_size: {}".format(
args.global_batch_size, micro_batch_size)
acc_steps = bsz_per_dp // micro_batch_size
exec_strategy = paddle.fluid.ExecutionStrategy()
exec_strategy.num_threads = 2
exec_strategy.num_iteration_per_drop_scope = 1
dist_strategy = fleet.DistributedStrategy()
dist_strategy.execution_strategy = exec_strategy
dist_strategy.nccl_comm_num = 3
dist_strategy.recompute = args.use_recompute
dist_strategy.pipeline = args.pp_degree > 1
if args.use_amp:
dist_strategy.amp = True
dist_strategy.amp_configs = {
"custom_white_list": [
'softmax', 'layer_norm', 'gelu',
"fused_softmax_mask_upper_triangle", "elementwise_add"
],
"custom_black_list":
["reduce_sum", "c_softmax_with_cross_entropy", "elementwise_div"],
"init_loss_scaling": 32768,
"use_dynamic_loss_scaling": True,
"use_pure_fp16": args.amp_level == "O2",
"use_fp16_guard": False
}
if args.use_sharding:
dist_strategy.sharding = True
dist_strategy.sharding_configs = {
"segment_broadcast_MB": 32,
"sharding_degree": args.sharding_degree,
"mp_degree": args.mp_degree,
"pp_degree": args.pp_degree,
"dp_degree": args.dp_degree,
"optimize_offload": False,
}
if args.pp_degree > 1:
dist_strategy.pipeline_configs = {
"schedule_mode": "1F1B",
"micro_micro_batch_size": micro_batch_size,
"accumulate_steps": acc_steps,
}
else:
assert acc_steps == 1, "Only support accumulate steps in piplinemode. Please set you global_batch_size={}".format(
default_global_batch_size)
return dist_strategy
def get_train_data_file(args):
files = [
os.path.join(args.input_dir, f) for f in os.listdir(args.input_dir)
if (os.path.isfile(os.path.join(args.input_dir, f)) and str(f).endswith(
"_idx.npz"))
]
files = [x.replace("_idx.npz", "") for x in files]
if len(files) == 0:
logger.warning(
"Not found dataset with name of xxx_ids.npy and xxx_idx.npz! Try to found old compatible xxx_ids.npz file."
)
else:
return files
files = [
os.path.join(args.input_dir, f) for f in os.listdir(args.input_dir)
if (os.path.isfile(os.path.join(args.input_dir, f)) and str(f).endswith(
"_ids.npz"))
]
files = [x.replace("_ids.npz", "") for x in files]
return files
def init_static_with_params(model, dygraph_params, topo, prog=None):
from paddlenlp.utils.tools import dygraph_params_to_static
static_params = dygraph_params_to_static(model, dygraph_params, topo)
if prog is None:
prog = paddle.static.default_main_program()
paddle.static.set_program_state(prog, static_params)
def run_evaluate(data_loader,
exe,
program,
iter_steps,
log_writer,
global_step,
args,
epoch,
is_last,
eval_fetch,
task_name="valid"):
all_loss = []
local_time = time.time()
for eval_step, batch in enumerate(data_loader):
loss_return = exe.run(program, feed=batch, fetch_list=eval_fetch)
if is_last:
all_loss.append(float(loss_return[0]))
if eval_step >= iter_steps - 1:
if not is_last:
break
average_loss = sum(all_loss) / len(all_loss)
logger.info(
"%s step %d, epoch: %d, batch: %d, loss: %f, speed: %.0f tokens/s"
% (task_name, global_step, epoch, eval_step, average_loss,
iter_steps * args.micro_batch_size * args.max_seq_len /
(time.time() - local_time)))
log_writer.add_scalar(task_name + "_loss", average_loss,
global_step)
break
def do_train(args):
# Initialize the paddle and paddle fleet execute environment
paddle.enable_static()
fleet.init(is_collective=True)
# Create the random seed for the worker
random.seed(args.seed)
np.random.seed(args.seed)
paddle.seed(args.seed)
get_rng_state_tracker().add('global_seed', args.seed)
get_rng_state_tracker().add('local_seed',
args.seed + fleet.worker_index() + 2021)
if args.use_amp and args.amp_level == "O2":
assert (args.mp_degree == 1 and args.pp_degree == 1
), "When amp level is O2, mp_degree and pp_degree should be 1."
assert (args.use_sharding == False
), "When amp level is O2, use_sharding should be False."
assert args.device in [
"cpu", "gpu", "xpu"
], "Invalid device! Available device should be cpu, gpu, or xpu."
place = paddle.set_device(args.device)
worker_num = fleet.worker_num()
worker_index = fleet.worker_index()
local_rank = 0 if fleet.local_rank() is None else int(fleet.local_rank())
topo = Topology(
device_rank=worker_index,
world_size=worker_num,
dp_degree=args.dp_degree,
pp_degree=args.pp_degree,
sharding_degree=args.sharding_degree,
mp_degree=args.mp_degree)
logger.info("The topo of hybrid parallelism:\n{}".format(topo))
dist_strategy = dist_optimizer(args, topo)
# Create log write, train results show on last card of pipeline.
if topo.is_last:
log_writer_path = os.path.join(
args.output_dir, "train_log",
"{}_globalbsz_{}_amp_{}_recompute_{}_card_{}".format(
args.model_name_or_path, args.global_batch_size, args.use_amp,
args.use_recompute, worker_index).lower())
if os.path.exists(log_writer_path):
import shutil
shutil.rmtree(log_writer_path)
log_writer = LogWriter(log_writer_path)
# Define the input data in the static mode
model_class, tokenizer_class = MODEL_CLASSES[args.model_type]
pretrained_models_list = list(
model_class.pretrained_init_configuration.keys())
data_file = get_train_data_file(args)
main_program = paddle.static.default_main_program()
startup_program = paddle.static.default_startup_program()
with paddle.static.program_guard(main_program, startup_program):
with paddle.utils.unique_name.guard():
with paddle.static.device_guard('gpu:0'):
data_holders = create_data_holder(args)
[tokens, loss_mask, position_ids, labels] = data_holders
tokenizer = tokenizer_class.from_pretrained(
args.model_name_or_path)
eos_id = tokenizer.eos_token_id
train_data_loader, valid_data_loader, test_data_loader = create_pretrained_dataset(
args,
data_file,
local_rank=local_rank,
data_world_size=topo.data_info.size,
data_world_rank=topo.data_info.rank,
eos_id=eos_id,
max_seq_len=args.max_seq_len,
places=paddle.static.cuda_places(),
data_holders=data_holders,
pipeline_mode=False, )
if args.model_name_or_path in pretrained_models_list:
model_config = model_class.pretrained_init_configuration[
args.model_name_or_path]
model_config[
"hidden_dropout_prob"] = args.hidden_dropout_prob
model_config[
"attention_probs_dropout_prob"] = args.attention_probs_dropout_prob
model_config["topo"] = topo
model = guard(f'gpu:{args.pp_degree -1}')(
GPTForPretraining)(guard(f'gpu:0')(GPTModel)(
**model_config))
else:
model, _ = GPTForPretraining.from_pretrained(
args.model_name_or_path,
hidden_dropout_prob=args.hidden_dropout_prob,
attention_probs_dropout_prob=args.
attention_probs_dropout_prob,
topo=topo)
# Create the model for the gpt pretrain
preds = model(tokens, position_ids)
criterion = guard(f'gpu:{args.pp_degree -1}')(
GPTPretrainingCriterion)(topo)
loss = criterion(preds, labels, loss_mask)
# Create the learning_rate sheduler and optimizer
if args.decay_steps is None:
args.decay_steps = args.max_steps
warmup_step = args.warmup_rate * args.decay_steps
# TODO @ZHUI Use paddle network to support lr scheduler
lr_scheduler = lr.CosineAnnealingWithWarmupDecay(
max_lr=args.max_lr,
min_lr=args.min_lr,
warmup_step=warmup_step,
decay_step=args.decay_steps)
clip = None
if args.grad_clip > 0:
clip = paddle.fluid.clip.GradientClipByGlobalNorm(
clip_norm=args.grad_clip)
decay_param = [
p.name for n, p in model.named_parameters()
if not any(nd in n for nd in ["bias", "norm"])
]
optimizer = paddle.optimizer.AdamW(
learning_rate=lr_scheduler,
beta1=args.adam_beta1,
beta2=args.adam_beta2,
epsilon=args.adam_epsilon,
grad_clip=clip,
weight_decay=args.weight_decay,
apply_decay_param_fun=lambda x: x in decay_param)
# alias
optimizer.apply_optimize = optimizer._apply_optimize
if args.use_recompute:
dist_strategy.recompute = True
dist_strategy.recompute_configs = {
"checkpoints": model.gpt.checkpoints
}
# Use the fleet api to compile the distributed optimizer
optimizer = fleet.distributed_optimizer(
optimizer, strategy=dist_strategy)
optimizer.minimize(loss)
logger.info(f'final strategy: {fleet._final_strategy()}')
logger.info("The training meta optimizer is/are %s" %
fleet._get_applied_meta_list())
program_desc_dir = os.path.join(args.output_dir, "program_desc")
if not os.path.isdir(program_desc_dir):
os.mkdir(program_desc_dir)
with open(program_desc_dir + "/main_program.txt.%d" % worker_index,
'w') as f:
f.write(str(main_program))
with open(program_desc_dir + "/startup_program.txt.%d" % worker_index,
'w') as f:
f.write(str(startup_program))
# Define the Executor for running the static model
exe = paddle.static.Executor(place)
exe.run(startup_program)
test_program = main_program.clone(for_test=True)
if args.use_amp and args.amp_level == "O2":
optimizer.amp_init(place)
if args.model_name_or_path not in pretrained_models_list:
logger.info("Try to load checkpoint from %s " % args.model_name_or_path)
dygrah_path = os.path.join(args.model_name_or_path,
"model_state.pdparams")
static_path = os.path.join(args.model_name_or_path, "static_vars")
flag_loaded = False
if os.path.exists(static_path):
if args.mp_degree > 1:
logger.warning("MP should init with dygraph params")
else:
logger.info("Loading parameters from %s" % static_path)
paddle.static.load(main_program, static_path, exe)
flag_loaded = True
if not flag_loaded and os.path.exists(dygrah_path):
if args.sharding_degree > 1:
logger.warning("Sharding should init with static vars")
else:
logger.info("Loading parameters from %s" % dygrah_path)
init_static_with_params(
model,
paddle.load(
dygrah_path, return_numpy=True),
topo,
main_program)
flag_loaded = True
if not flag_loaded:
logger.error("No checkpoint load.")
global_step = 0
tic_train = time.time()
epoch = 0
learning_rate = main_program.global_block().vars["learning_rate_0"]
while True:
fetchs = []
if topo.is_last:
fetchs = [loss, learning_rate]
# Bug fix, if not call valid_data_loader, the enumerate will call valid_data_loader
# many times. and start a new random dataloader.
valid_data_loader = valid_data_loader()
test_data_loader = test_data_loader()
train_reader_cost = 0.0
train_run_cost = 0.0
reader_start = time.time()
for step, batch in enumerate(train_data_loader()):
train_reader_cost += time.time() - reader_start
train_start = time.time()
global_step += 1
ret = exe.run(main_program,
feed=batch,
fetch_list=fetchs,
use_program_cache=True)
# In the new 2.0 api, must call this function to change the learning_rate
lr_scheduler.step()
train_run_cost += time.time() - train_start
# Profile for model benchmark
profiler.add_profiler_step(args.profiler_options)
if global_step % args.logging_freq == 0:
if topo.is_last:
loss_return, lr_return = ret
#speed = args.logging_freq / (time.time() - tic_train)
speed = args.logging_freq / (
train_reader_cost + train_run_cost)
avg_reader_cost = train_reader_cost / args.logging_freq
logger.info(
"global step %d, epoch: %d, batch: %d, loss: %.9f, avg_reader_cost: %.5f sec, avg_batch_cost: %.5f sec, speed: %.2f steps/s, ips_total: %.0f tokens/s, ips: %.0f tokens/s, learning rate: %.5e"
% (global_step, epoch, step, loss_return[0],
avg_reader_cost, 1. / speed, speed,
speed * args.global_batch_size * args.max_seq_len,
speed * args.global_batch_size * args.max_seq_len /
worker_num, lr_return[0]))
log_writer.add_scalar("loss", loss_return[0], global_step)
log_writer.add_scalar("learning_rate", lr_return[0],
global_step)
tic_train = time.time() | if global_step >= args.max_steps:
return
else:
continue
if global_step % args.eval_freq == 0:
# TODO, check the input data of validation
eval_fetch = []
if topo.is_last:
eval_fetch = [loss]
run_evaluate(valid_data_loader, exe, test_program,
args.eval_iters, log_writer, global_step, args,
epoch, topo.is_last, eval_fetch, "valid")
tic_train = time.time()
if global_step % args.save_steps == 0 or global_step >= args.max_steps:
output_dir = os.path.join(args.output_dir,
"model_%d" % global_step)
logger.debug("saving models to {}".format(output_dir))
save_persistables(exe,
os.path.join(output_dir, "static_vars"),
main_program)
if global_step <= args.save_steps:
model.init_config["init_args"][0].init_config.pop("topo",
None)
model.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
tic_train = time.time()
if global_step >= args.max_steps:
eval_fetch = []
if topo.is_last:
eval_fetch = [loss]
run_evaluate(test_data_loader, exe, test_program,
args.test_iters, log_writer, global_step, args,
epoch, topo.is_last, eval_fetch, "test")
del train_data_loader
return
reader_start = time.time()
epoch += 1
if __name__ == "__main__":
config = parse_args(MODEL_CLASSES)
do_train(config) | train_reader_cost = 0.0
train_run_cost = 0.0
if args.check_accuracy: |
test_metrics.go | /*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package bundle
import (
"fmt"
"k8s.io/klog"
"k8s.io/perf-tests/clusterloader2/pkg/errors"
"k8s.io/perf-tests/clusterloader2/pkg/measurement"
"k8s.io/perf-tests/clusterloader2/pkg/util"
)
const (
testMetricsMeasurementName = "TestMetrics"
)
func init() {
if err := measurement.Register(testMetricsMeasurementName, createTestMetricsMeasurment); err != nil {
klog.Fatalf("Cannot register %s: %v", testMetricsMeasurementName, err)
}
}
func createTestMetricsMeasurment() measurement.Measurement {
var metrics testMetrics
var err error
if metrics.etcdMetrics, err = measurement.CreateMeasurement("EtcdMetrics"); err != nil {
klog.Errorf("%s: etcdMetrics creation error: %v", metrics, err)
}
if metrics.schedulingMetrics, err = measurement.CreateMeasurement("SchedulingMetrics"); err != nil {
klog.Errorf("%s: schedulingMetrics creation error: %v", metrics, err)
}
if metrics.metricsForE2E, err = measurement.CreateMeasurement("MetricsForE2E"); err != nil {
klog.Errorf("%s: metricsForE2E creation error: %v", metrics, err)
}
if metrics.resourceUsageSummary, err = measurement.CreateMeasurement("ResourceUsageSummary"); err != nil {
klog.Errorf("%s: resourceUsageSummary creation error: %v", metrics, err)
}
if metrics.etcdCPUProfile, err = measurement.CreateMeasurement("CPUProfile"); err != nil {
klog.Errorf("%s: etcdCPUProfile creation error: %v", metrics, err)
}
if metrics.etcdMemoryProfile, err = measurement.CreateMeasurement("MemoryProfile"); err != nil {
klog.Errorf("%s: etcdMemoryProfile creation error: %v", metrics, err)
}
if metrics.etcdMutexProfile, err = measurement.CreateMeasurement("MutexProfile"); err != nil {
klog.Errorf("%s: etcdMutexProfile creation error: %v", metrics, err)
}
if metrics.apiserverCPUProfile, err = measurement.CreateMeasurement("CPUProfile"); err != nil {
klog.Errorf("%s: apiserverCPUProfile creation error: %v", metrics, err)
}
if metrics.apiserverMemoryProfile, err = measurement.CreateMeasurement("MemoryProfile"); err != nil {
klog.Errorf("%s: apiserverMemoryProfile creation error: %v", metrics, err)
}
if metrics.schedulerCPUProfile, err = measurement.CreateMeasurement("CPUProfile"); err != nil {
klog.Errorf("%s: schedulerCPUProfile creation error: %v", metrics, err)
}
if metrics.schedulerMemoryProfile, err = measurement.CreateMeasurement("MemoryProfile"); err != nil {
klog.Errorf("%s: schedulerMemoryProfile creation error: %v", metrics, err)
}
if metrics.controllerManagerCPUProfile, err = measurement.CreateMeasurement("CPUProfile"); err != nil {
klog.Errorf("%s: controllerManagerCPUProfile creation error: %v", metrics, err)
}
if metrics.controllerManagerMemoryProfile, err = measurement.CreateMeasurement("MemoryProfile"); err != nil {
klog.Errorf("%s: controllerManagerMemoryProfile creation error: %v", metrics, err)
}
if metrics.systemPodMetrics, err = measurement.CreateMeasurement("SystemPodMetrics"); err != nil {
klog.Errorf("%s: systemPodMetrics creation error: %v", metrics, err)
}
return &metrics
}
type testMetrics struct {
etcdMetrics measurement.Measurement
schedulingMetrics measurement.Measurement
metricsForE2E measurement.Measurement
resourceUsageSummary measurement.Measurement
etcdCPUProfile measurement.Measurement
etcdMemoryProfile measurement.Measurement
etcdMutexProfile measurement.Measurement
apiserverCPUProfile measurement.Measurement
apiserverMemoryProfile measurement.Measurement
schedulerCPUProfile measurement.Measurement
schedulerMemoryProfile measurement.Measurement
controllerManagerCPUProfile measurement.Measurement
controllerManagerMemoryProfile measurement.Measurement
systemPodMetrics measurement.Measurement
}
// Execute supports two actions. start - which sets up all metrics.
// stop - which stops all metrics and collects all measurements.
func (t *testMetrics) Execute(config *measurement.MeasurementConfig) ([]measurement.Summary, error) {
var summaries []measurement.Summary
errList := errors.NewErrorList()
action, err := util.GetString(config.Params, "action")
if err != nil {
return summaries, err
}
actionStartConfig := createConfig(config, map[string]interface{}{
"action": "start",
})
actionResetConfig := createConfig(config, map[string]interface{}{
"action": "reset",
})
actionGatherConfig := createConfig(config, map[string]interface{}{
"action": "gather",
})
etcdStartConfig := createConfig(config, map[string]interface{}{
"action": "start",
"componentName": "etcd",
})
etcdGatherConfig := createConfig(config, map[string]interface{}{
"action": "gather",
"componentName": "etcd",
})
kubeApiserverStartConfig := createConfig(config, map[string]interface{}{
"action": "start",
"componentName": "kube-apiserver",
})
kubeApiserverGatherConfig := createConfig(config, map[string]interface{}{
"action": "gather",
"componentName": "kube-apiserver",
})
kubeSchedulerStartConfig := createConfig(config, map[string]interface{}{
"action": "start",
"componentName": "kube-scheduler",
})
kubeSchedulerGatherConfig := createConfig(config, map[string]interface{}{
"action": "gather",
"componentName": "kube-scheduler",
})
kubeControllerManagerStartConfig := createConfig(config, map[string]interface{}{
"action": "start",
"componentName": "kube-controller-manager",
})
kubeControllerManagerGatherConfig := createConfig(config, map[string]interface{}{
"action": "gather",
"componentName": "kube-controller-manager",
})
switch action {
case "start":
summary, err := execute(t.etcdMetrics, actionStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.schedulingMetrics, actionResetConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.resourceUsageSummary, actionStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.etcdCPUProfile, etcdStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.etcdMemoryProfile, etcdStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.etcdMutexProfile, etcdStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.apiserverCPUProfile, kubeApiserverStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.apiserverMemoryProfile, kubeApiserverStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.schedulerCPUProfile, kubeSchedulerStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.schedulerMemoryProfile, kubeSchedulerStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.controllerManagerCPUProfile, kubeControllerManagerStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.controllerManagerMemoryProfile, kubeControllerManagerStartConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.systemPodMetrics, config)
appendResults(&summaries, errList, summary, err)
case "gather":
summary, err := execute(t.etcdMetrics, actionGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.schedulingMetrics, actionGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.metricsForE2E, config)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.resourceUsageSummary, actionGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.etcdCPUProfile, etcdGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.etcdMemoryProfile, etcdGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.etcdMutexProfile, etcdGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.apiserverCPUProfile, kubeApiserverGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.apiserverMemoryProfile, kubeApiserverGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.schedulerCPUProfile, kubeSchedulerGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.schedulerMemoryProfile, kubeSchedulerGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.controllerManagerCPUProfile, kubeControllerManagerGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.controllerManagerMemoryProfile, kubeControllerManagerGatherConfig)
appendResults(&summaries, errList, summary, err)
summary, err = execute(t.systemPodMetrics, config)
appendResults(&summaries, errList, summary, err)
default:
return summaries, fmt.Errorf("unknown action %v", action)
}
if !errList.IsEmpty() {
klog.Errorf("%s: %v", t, errList.String())
return summaries, errList
}
return summaries, nil
}
// Dispose cleans up after the measurement.
func (t *testMetrics) Dispose() {
t.etcdMetrics.Dispose()
t.schedulingMetrics.Dispose()
t.metricsForE2E.Dispose()
t.resourceUsageSummary.Dispose()
t.etcdCPUProfile.Dispose()
t.etcdMemoryProfile.Dispose()
t.etcdMutexProfile.Dispose()
t.apiserverCPUProfile.Dispose()
t.apiserverMemoryProfile.Dispose()
t.schedulerCPUProfile.Dispose()
t.schedulerMemoryProfile.Dispose()
t.controllerManagerCPUProfile.Dispose()
t.controllerManagerMemoryProfile.Dispose()
}
// String returns a string representation of the measurement.
func (*testMetrics) String() string {
return testMetricsMeasurementName
}
func createConfig(config *measurement.MeasurementConfig, overrides map[string]interface{}) *measurement.MeasurementConfig {
params := make(map[string]interface{})
for k, v := range config.Params {
params[k] = v
}
for k, v := range overrides {
params[k] = v
}
return &measurement.MeasurementConfig{
ClusterFramework: config.ClusterFramework,
PrometheusFramework: config.PrometheusFramework,
Params: params,
TemplateProvider: config.TemplateProvider,
CloudProvider: config.CloudProvider,
}
}
func execute(m measurement.Measurement, config *measurement.MeasurementConfig) ([]measurement.Summary, error) {
if m == nil {
return nil, fmt.Errorf("uninitialized metric")
}
return m.Execute(config)
}
func | (summaries *[]measurement.Summary, errList *errors.ErrorList, summaryResults []measurement.Summary, errResult error) {
if errResult != nil {
errList.Append(errResult)
}
*summaries = append(*summaries, summaryResults...)
}
| appendResults |
text.py | class Text:
| """ The Plot Text Text Template
"""
def __init__(self, text=""):
"""
Initializes the plot text Text
:param text: plot text text
:type text: str
"""
self.text = text
self.template = '\ttext = "{text}";\n'
def to_str(self):
"""
Converts the plot text text instance to a z-tree text property declaration.
:return: plot text text property declaration
:rtype: str
"""
return self.template.format(text=self.text) |
|
simpleservice_transheaderstype_easyjson.go | // Code generated by zanzibar
// @generated
// Checksum : vEe2VtOwomuaVyMmKmRxmQ==
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
package baz
import (
json "encoding/json"
fmt "fmt"
easyjson "github.com/mailru/easyjson"
jlexer "github.com/mailru/easyjson/jlexer"
jwriter "github.com/mailru/easyjson/jwriter"
)
// suppress unused package warning
var (
_ *json.RawMessage
_ *jlexer.Lexer
_ *jwriter.Writer
_ easyjson.Marshaler
)
func easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType(in *jlexer.Lexer, out *SimpleService_TransHeadersType_Result) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "success":
if in.IsNull() {
in.Skip()
out.Success = nil
} else {
if out.Success == nil {
out.Success = new(TransHeader)
}
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz(in, &*out.Success)
}
case "authErr":
if in.IsNull() {
in.Skip()
out.AuthErr = nil
} else {
if out.AuthErr == nil {
out.AuthErr = new(AuthErr)
}
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz1(in, &*out.AuthErr)
}
case "otherAuthErr":
if in.IsNull() {
in.Skip()
out.OtherAuthErr = nil
} else {
if out.OtherAuthErr == nil {
out.OtherAuthErr = new(OtherAuthErr)
}
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz2(in, &*out.OtherAuthErr)
}
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType(out *jwriter.Writer, in SimpleService_TransHeadersType_Result) {
out.RawByte('{')
first := true
_ = first
if in.Success != nil {
const prefix string = ",\"success\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz(out, *in.Success)
}
if in.AuthErr != nil {
const prefix string = ",\"authErr\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz1(out, *in.AuthErr)
}
if in.OtherAuthErr != nil {
const prefix string = ",\"otherAuthErr\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz2(out, *in.OtherAuthErr)
}
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v SimpleService_TransHeadersType_Result) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{}
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v SimpleService_TransHeadersType_Result) MarshalEasyJSON(w *jwriter.Writer) {
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *SimpleService_TransHeadersType_Result) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *SimpleService_TransHeadersType_Result) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType(l, v)
}
func easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz2(in *jlexer.Lexer, out *OtherAuthErr) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
var MessageSet bool
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "message":
out.Message = string(in.String())
MessageSet = true
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
if !MessageSet {
in.AddError(fmt.Errorf("key 'message' is required"))
}
}
func easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz2(out *jwriter.Writer, in OtherAuthErr) {
out.RawByte('{')
first := true
_ = first
{
const prefix string = ",\"message\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Message))
}
out.RawByte('}')
}
func easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz1(in *jlexer.Lexer, out *AuthErr) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
var MessageSet bool
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() |
switch key {
case "message":
out.Message = string(in.String())
MessageSet = true
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
if !MessageSet {
in.AddError(fmt.Errorf("key 'message' is required"))
}
}
func easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz1(out *jwriter.Writer, in AuthErr) {
out.RawByte('{')
first := true
_ = first
{
const prefix string = ",\"message\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
out.String(string(in.Message))
}
out.RawByte('}')
}
func easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz(in *jlexer.Lexer, out *TransHeader) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz(out *jwriter.Writer, in TransHeader) {
out.RawByte('{')
first := true
_ = first
out.RawByte('}')
}
func easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType1(in *jlexer.Lexer, out *SimpleService_TransHeadersType_Args) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
var ReqSet bool
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "req":
if in.IsNull() {
in.Skip()
out.Req = nil
} else {
if out.Req == nil {
out.Req = new(TransHeader)
}
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz(in, &*out.Req)
}
ReqSet = true
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
if !ReqSet {
in.AddError(fmt.Errorf("key 'req' is required"))
}
}
func easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType1(out *jwriter.Writer, in SimpleService_TransHeadersType_Args) {
out.RawByte('{')
first := true
_ = first
{
const prefix string = ",\"req\":"
if first {
first = false
out.RawString(prefix[1:])
} else {
out.RawString(prefix)
}
if in.Req == nil {
out.RawString("null")
} else {
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBaz(out, *in.Req)
}
}
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v SimpleService_TransHeadersType_Args) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{}
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType1(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v SimpleService_TransHeadersType_Args) MarshalEasyJSON(w *jwriter.Writer) {
easyjson77db7ff2EncodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType1(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *SimpleService_TransHeadersType_Args) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType1(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *SimpleService_TransHeadersType_Args) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson77db7ff2DecodeGithubComUberZanzibarExamplesExampleGatewayBuildGenCodeEndpointsBazBazSimpleServiceTransHeadersType1(l, v)
}
| {
in.Skip()
in.WantComma()
continue
} |
ListServicesRequest.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RoaRequest
from aliyunsdkeas.endpoint import endpoint_data
class ListServicesRequest(RoaRequest):
def __init__(self):
RoaRequest.__init__(self, 'eas', '2021-07-01', 'ListServices','eas')
self.set_uri_pattern('/api/v2/services')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Filter(self):
return self.get_query_params().get('Filter')
def set_Filter(self,Filter):
|
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_Sort(self):
return self.get_query_params().get('Sort')
def set_Sort(self,Sort):
self.add_query_param('Sort',Sort)
def get_PageNumber(self):
return self.get_query_params().get('PageNumber')
def set_PageNumber(self,PageNumber):
self.add_query_param('PageNumber',PageNumber)
def get_Order(self):
return self.get_query_params().get('Order')
def set_Order(self,Order):
self.add_query_param('Order',Order) | self.add_query_param('Filter',Filter) |
main.ts | import * as core from '@actions/core'
import {KubeflowConfig, getKubeflowConfig, getKfctl} from './kind-kf'
async function run() {
try {
const cfg: KubeflowConfig = getKubeflowConfig();
let toolPath: string = await getKfctl(cfg.version);
core.addPath(toolPath);
await cfg.deployKubeflow();
} catch (error) { | core.setFailed(error.message)
}
}
run(); | |
switches.module.ts | import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ByteSwitchComponent } from './byte_switch/byte_switch.component';
import { TactileSwitchComponent } from './tact_switch/tact_switch.component';
@NgModule({
declarations: [
ByteSwitchComponent,
TactileSwitchComponent
],
exports: [
ByteSwitchComponent,
TactileSwitchComponent
],
imports: [CommonModule]
}) | export class SwitchesModule { } |
|
2017-08-22_v0.0.1.py | # The MIT License (MIT)
# Copyright (c) 2014-2017 University of Bristol
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from hyperstream import Tool, StreamInstance
from hyperstream.utils import check_input_stream_count
from datetime import datetime, timedelta
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import label_binarize
import numpy as np
from pytz import UTC
class Dataset(Tool):
def __init__(self, dataset, shuffle=True, epochs=1, seed=None):
"""
Converts a static dataset into a stream with timestamps
Parameters
==========
dataset: data structure with the following attributes
data: matrix
Matrix with one row per sample and one column per feature
target: array of int
Array of integers with one label per sample
shuffle: boolean
Value indicating if the data needs to be shuffled
epochs: Integer
Number of iterations that the data will be repeated
seed: Integer
seed for the shuffling process
"""
super(Dataset, self).__init__(dataset=dataset, shuffle=shuffle,
epochs=epochs, seed=seed)
@check_input_stream_count(0)
def | (self, sources, alignment_stream, interval):
"""
Processes the input data and produces streamed data
yelds
=====
stream : with date and dictionary with following entries
x_tr: array of float
Training values for the given data stream
y_tr: array of int
Training binary label corresponding to the given data stream
x_te: array of float
Test values for the given data stream
y_te: array of int
Test binary label corresponding to the given data stream
"""
x = self.dataset.data
y = self.dataset.target
# Binarize data
classes = np.unique(y)
y = label_binarize(y, classes)
j = 0
start_dt = datetime.utcfromtimestamp(0).replace(tzinfo=UTC)
for i in range(self.epochs):
X_tr, X_te, Y_tr, Y_te = train_test_split(
x, y, shuffle=self.shuffle, train_size=0.5, stratify=y,
random_state=self.seed)
for x_tr, y_tr in zip(X_tr, Y_tr):
x_te, y_te = X_te[j % len(X_te)], Y_te[j % len(Y_te)]
j += 1
dt = (start_dt + timedelta(minutes=j)).replace(tzinfo=UTC)
yield StreamInstance(dt, dict(x_tr=x_tr.reshape(1, -1),
x_te=x_te.reshape(1, -1),
y_tr=y_tr.reshape(1, -1),
y_te=y_te.reshape(1, -1)))
| _execute |
_0977_squares_of_a_sorted_array.rs | struct Solution;
impl Solution {
fn sorted_squares(a: Vec<i32>) -> Vec<i32> {
let mut squared: Vec<i32> = a.iter().map(|a| a * a).collect();
squared.sort();
squared
}
}
#[test]
fn test() {
assert_eq!(
Solution::sorted_squares(vec![-4, -1, 0, 3, 10]),
vec![0, 1, 9, 16, 100]
);
assert_eq!( | );
} | Solution::sorted_squares(vec![-7, -3, 2, 3, 11]),
vec![4, 9, 9, 49, 121] |
balancing.go | package proxy
import (
"context"
"net/url"
"strings"
"github.com/devopsfaith/krakend/config" | "github.com/devopsfaith/krakend/sd"
)
// NewLoadBalancedMiddleware creates proxy middleware adding the most perfomant balancer
// over a default subscriber
func NewLoadBalancedMiddleware(remote *config.Backend) Middleware {
return NewLoadBalancedMiddlewareWithSubscriber(sd.GetSubscriber(remote))
}
// NewLoadBalancedMiddlewareWithSubscriber creates proxy middleware adding the most perfomant balancer
// over the received subscriber
func NewLoadBalancedMiddlewareWithSubscriber(subscriber sd.Subscriber) Middleware {
return newLoadBalancedMiddleware(sd.NewBalancer(subscriber))
}
// NewRoundRobinLoadBalancedMiddleware creates proxy middleware adding a round robin balancer
// over a default subscriber
func NewRoundRobinLoadBalancedMiddleware(remote *config.Backend) Middleware {
return NewRoundRobinLoadBalancedMiddlewareWithSubscriber(sd.GetSubscriber(remote))
}
// NewRandomLoadBalancedMiddleware creates proxy middleware adding a random balancer
// over a default subscriber
func NewRandomLoadBalancedMiddleware(remote *config.Backend) Middleware {
return NewRandomLoadBalancedMiddlewareWithSubscriber(sd.GetSubscriber(remote))
}
// NewRoundRobinLoadBalancedMiddlewareWithSubscriber creates proxy middleware adding a round robin
// balancer over the received subscriber
func NewRoundRobinLoadBalancedMiddlewareWithSubscriber(subscriber sd.Subscriber) Middleware {
return newLoadBalancedMiddleware(sd.NewRoundRobinLB(subscriber))
}
// NewRandomLoadBalancedMiddlewareWithSubscriber creates proxy middleware adding a random
// balancer over the received subscriber
func NewRandomLoadBalancedMiddlewareWithSubscriber(subscriber sd.Subscriber) Middleware {
return newLoadBalancedMiddleware(sd.NewRandomLB(subscriber))
}
func newLoadBalancedMiddleware(lb sd.Balancer) Middleware {
return func(next ...Proxy) Proxy {
if len(next) > 1 {
panic(ErrTooManyProxies)
}
return func(ctx context.Context, request *Request) (*Response, error) {
host, err := lb.Host()
if err != nil {
return nil, err
}
r := request.Clone()
var b strings.Builder
b.WriteString(host)
b.WriteString(r.Path)
r.URL, err = url.Parse(b.String())
if err != nil {
return nil, err
}
if len(r.Query) > 0 {
r.URL.RawQuery += "&" + r.Query.Encode()
}
return next[0](ctx, &r)
}
}
} | |
paginate-function.js | /**
* 前台分页执行函数 通用JS文件
*
* 创建时间:2016年11月9日 10:06:03
* 创建人:Steve Jrong
* 版本:1.0
*/ | function pageSkip(num,type){
if (type === "front") {
$("input[name*='pageIndex']").val(parseInt($("#currentPageNum").text())-1);
}else if (type === "back") {
$("input[name*='pageIndex']").val(parseInt($("#currentPageNum").text())+1);
}else if (type === "last") {
$("input[name*='pageIndex']").val($("#lastPageNum").text()*1);
}else if (type === "first") {
$("input[name*='pageIndex']").val($("#homePageNum").text()*1);
}
window.location.href=$("#pagination_redirect_urlrewrite").val()+$("input[name*='pageIndex']").val();
} | |
configuration.rs | use std::collections::HashMap;
use serde_derive::{
Serialize,
Deserialize
};
use crate::env_variables::{
EnvVariables,
Shell
};
#[derive(Debug, Serialize, Deserialize)]
struct ConfigFile {
shell: Option<Shell>,
sets: HashMap<String,Vec<EnvVariables>>,
}
#[derive(Debug, Serialize)]
pub struct Config {
pub shell: Shell,
pub sets: HashMap<String,Vec<EnvVariables>>,
}
pub fn get_config(path: &str) -> Config | {
let config = {
let mut configger = config::Config::default();
configger.merge(config::File::with_name(path)).unwrap();
let read_config = configger.try_into::<ConfigFile>().unwrap();
Config {
shell: read_config.shell.unwrap_or(Shell::Posix),
sets: read_config.sets,
}
};
config
} |
|
resnet_mldg_smm.py | from __future__ import absolute_import
import torch
from torch import nn
from torch.nn import functional as F
from torch.nn import init
import torchvision
from collections import OrderedDict
from ..models.layers.adain import SMMBlock
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152', 'resnet50_mldg_smm']
class ResNet(nn.Module):
__factory = {
18: torchvision.models.resnet18,
34: torchvision.models.resnet34,
50: torchvision.models.resnet50,
101: torchvision.models.resnet101,
152: torchvision.models.resnet152,
}
def __init__(self, depth, pretrained=True, cut_at_pooling=False,
num_features=0, norm=False, dropout=0, num_classes=None):
super(ResNet, self).__init__()
self.pretrained = pretrained
self.depth = depth
self.cut_at_pooling = cut_at_pooling
# Construct base (pretrained) resnet
if depth not in ResNet.__factory:
raise KeyError("Unsupported depth:", depth)
resnet = ResNet.__factory[depth](pretrained=pretrained)
resnet.layer4[0].conv2.stride = (1,1)
resnet.layer4[0].downsample[0].stride = (1,1)
# self.base = nn.Sequential(
# resnet.conv1, resnet.bn1, resnet.relu, resnet.maxpool,
# resnet.layer1, resnet.layer2, resnet.layer3, resnet.layer4)
self.conv = nn.Sequential(OrderedDict([
('conv1', resnet.conv1),
('bn1', resnet.bn1),
('relu', resnet.relu),
('maxpool', resnet.maxpool)]))
self.layer1 = resnet.layer1
self.layer2 = resnet.layer2
self.layer3 = resnet.layer3
self.layer4 = resnet.layer4
self.gap = nn.AdaptiveAvgPool2d(1)
self.smm_block = SMMBlock(1, rand=False, learnable=False)
if not self.cut_at_pooling:
self.num_features = num_features
self.norm = norm
self.dropout = dropout
self.has_embedding = num_features > 0
self.num_classes = num_classes
out_planes = resnet.fc.in_features
# Append new layers
if self.has_embedding:
self.feat = nn.Linear(out_planes, self.num_features)
self.feat_bn = nn.BatchNorm1d(self.num_features)
init.kaiming_normal_(self.feat.weight, mode='fan_out')
init.constant_(self.feat.bias, 0)
else:
# Change the num_features to CNN output channels
self.num_features = out_planes
self.feat_bn = nn.BatchNorm1d(self.num_features)
self.feat_bn.bias.requires_grad_(False)
if self.dropout > 0:
self.drop = nn.Dropout(self.dropout)
self.classifier = nn.Linear(self.num_features, self.num_classes, bias=False)
init.normal_(self.classifier.weight, std=0.001)
init.constant_(self.feat_bn.weight, 1)
init.constant_(self.feat_bn.bias, 0)
if not pretrained:
self.reset_params()
def forward(self, x, meta_train=True, output_prob=False, return_featuremaps=False):
if self.training:
num_domains = len(x)
x = torch.cat(x, dim=0)
x = self.conv(x)
# NOTE: change to 'if self.training and meta_train:'
if meta_train:
mixed_x, _ = self.smm_block(x)
if return_featuremaps:
return [x, mixed_x]
x = mixed_x
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.gap(x)
x = x.view(x.size(0), -1)
if self.cut_at_pooling:
return x
if self.has_embedding:
bn_x = self.feat_bn(self.feat(x))
else:
bn_x = self.feat_bn(x)
if self.training is False and output_prob is False:
bn_x = F.normalize(bn_x)
return bn_x
if self.norm:
norm_bn_x = F.normalize(bn_x)
elif self.has_embedding:
bn_x = F.relu(bn_x)
if self.dropout > 0:
bn_x = self.drop(bn_x)
prob = self.classifier(bn_x)
# prob, mixed_prob = torch.chunk(prob, 2, dim=0)
prob = torch.chunk(prob, num_domains, dim=0)
# mixed_prob = torch.chunk(mixed_prob, num_domains, dim=0)
# x, mixed_x = torch.chunk(x, 2, dim=0)
x = torch.chunk(x, num_domains, dim=0)
# mixed_x = torch.chunk(mixed_x, num_domains, dim=0)
return prob, x
def reset_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
init.kaiming_normal_(m.weight, mode='fan_out')
if m.bias is not None:
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm1d):
init.constant_(m.weight, 1)
init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
init.normal_(m.weight, std=0.001)
if m.bias is not None:
init.constant_(m.bias, 0)
def | (self):
for param in self.parameters():
if param.requires_grad:
yield param
# def train(self, mode=True):
# """
# Override the default train() to freeze the BN parameters
# """
# super().train(mode)
# self.freeze_bn()
#
# def freeze_bn(self):
# for m in self.modules():
# if isinstance(m, nn.BatchNorm1d):
# m.eval()
# if isinstance(m, nn.BatchNorm2d):
# m.eval()
def resnet18(**kwargs):
return ResNet(18, **kwargs)
def resnet34(**kwargs):
return ResNet(34, **kwargs)
def resnet50(**kwargs):
return ResNet(50, **kwargs)
def resnet101(**kwargs):
return ResNet(101, **kwargs)
def resnet152(**kwargs):
return ResNet(152, **kwargs)
def resnet50_mde(**kwargs):
return ResNet(50, **kwargs)
def resnet50_mldg_smm(**kwargs):
return ResNet(50, **kwargs)
| get_params |
event_processor.rs | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
#[cfg(test)]
use crate::chained_bft::safety::safety_rules::ConsensusState;
use crate::{
chained_bft::{
block_storage::{
BlockReader, BlockStore, InsertError, NeedFetchResult, VoteReceptionResult,
},
common::{Author, Payload, Round},
consensus_types::{
block::Block,
proposal_msg::ProposalMsg,
quorum_cert::QuorumCert,
sync_info::SyncInfo,
timeout_msg::{PacemakerTimeout, PacemakerTimeoutCertificate, TimeoutMsg},
vote_data::VoteData,
vote_msg::VoteMsg,
},
epoch_manager::EpochManager,
liveness::{
pacemaker::{NewRoundEvent, NewRoundReason, Pacemaker},
proposal_generator::{ProposalGenerationError, ProposalGenerator},
proposer_election::ProposerElection,
},
network::{BlockRetrievalRequest, BlockRetrievalResponse, ConsensusNetworkImpl},
persistent_storage::PersistentStorage,
safety::safety_rules::SafetyRules,
sync_manager::{SyncManager, SyncMgrContext},
},
counters,
state_replication::{StateComputer, TxnManager},
util::time_service::{
duration_since_epoch, wait_if_possible, TimeService, WaitingError, WaitingSuccess,
},
};
use logger::prelude::*;
use network::proto::BlockRetrievalStatus;
use std::{sync::Arc, time::Duration};
use termion::color::*;
use types::crypto_proxies::LedgerInfoWithSignatures;
#[cfg(test)]
#[path = "event_processor_test.rs"]
mod event_processor_test;
#[cfg(any(feature = "fuzzing", test))]
#[path = "event_processor_fuzzing.rs"]
pub mod event_processor_fuzzing;
/// Consensus SMR is working in an event based fashion: EventProcessor is responsible for
/// processing the individual events (e.g., process_new_round, process_proposal, process_vote,
/// etc.). It is exposing the async processing functions for each event type.
/// The caller is responsible for running the event loops and driving the execution via some
/// executors.
pub struct EventProcessor<T> {
author: Author,
block_store: Arc<BlockStore<T>>,
pacemaker: Pacemaker,
proposer_election: Box<dyn ProposerElection<T> + Send + Sync>,
proposal_generator: ProposalGenerator<T>,
safety_rules: SafetyRules,
state_computer: Arc<dyn StateComputer<Payload = T>>,
txn_manager: Arc<dyn TxnManager<Payload = T>>,
network: ConsensusNetworkImpl,
storage: Arc<dyn PersistentStorage<T>>,
sync_manager: SyncManager<T>,
time_service: Arc<dyn TimeService>,
enforce_increasing_timestamps: bool,
// Cache of the last sent vote message.
last_vote_sent: Option<(VoteMsg, Round)>,
epoch_mgr: Arc<EpochManager>,
}
impl<T: Payload> EventProcessor<T> {
pub fn new(
author: Author,
block_store: Arc<BlockStore<T>>,
pacemaker: Pacemaker,
proposer_election: Box<dyn ProposerElection<T> + Send + Sync>,
proposal_generator: ProposalGenerator<T>,
safety_rules: SafetyRules,
state_computer: Arc<dyn StateComputer<Payload = T>>,
txn_manager: Arc<dyn TxnManager<Payload = T>>,
network: ConsensusNetworkImpl,
storage: Arc<dyn PersistentStorage<T>>,
time_service: Arc<dyn TimeService>,
enforce_increasing_timestamps: bool,
epoch_mgr: Arc<EpochManager>,
) -> Self {
let sync_manager = SyncManager::new(
Arc::clone(&block_store),
Arc::clone(&storage),
network.clone(),
Arc::clone(&state_computer),
);
Self {
author,
block_store,
pacemaker,
proposer_election,
proposal_generator,
safety_rules,
state_computer,
txn_manager,
network,
storage,
sync_manager,
time_service,
enforce_increasing_timestamps,
last_vote_sent: None,
epoch_mgr,
}
}
/// Leader:
///
/// This event is triggered by a new quorum certificate at the previous round or a
/// timeout certificate at the previous round. In either case, if this replica is the new
/// proposer for this round, it is ready to propose and guarantee that it can create a proposal
/// that all honest replicas can vote for. While this method should only be invoked at most
/// once per round, we ensure that only at most one proposal can get generated per round to
/// avoid accidental equivocation of proposals.
///
/// Replica:
///
/// Do nothing
async fn process_new_round_event(&self, new_round_event: NewRoundEvent) {
debug!("Processing {}", new_round_event);
counters::CURRENT_ROUND.set(new_round_event.round as i64);
counters::ROUND_TIMEOUT_MS.set(new_round_event.timeout.as_millis() as i64);
match new_round_event.reason {
NewRoundReason::QCReady => {
counters::QC_ROUNDS_COUNT.inc();
}
NewRoundReason::Timeout { .. } => {
counters::TIMEOUT_ROUNDS_COUNT.inc();
}
};
if self
.proposer_election
.is_valid_proposer(self.author, new_round_event.round)
.is_none()
{
return;
}
let proposal_msg = match self.generate_proposal(new_round_event).await {
Ok(x) => x,
Err(e) => {
error!("Error while generating proposal: {:?}", e);
return;
}
};
let mut network = self.network.clone();
network.broadcast_proposal(proposal_msg).await;
counters::PROPOSALS_COUNT.inc();
}
async fn generate_proposal(
&self,
new_round_event: NewRoundEvent,
) -> Result<ProposalMsg<T>, ProposalGenerationError> {
// Proposal generator will ensure that at most one proposal is generated per round
let proposal = self
.proposal_generator
.generate_proposal(
new_round_event.round,
self.pacemaker.current_round_deadline(),
)
.await?;
debug!("Propose {}", proposal);
// should we include a TC?
let timeout_certificate = match &new_round_event.reason {
NewRoundReason::Timeout { cert }
if cert.round() > proposal.quorum_cert().certified_block_round() =>
{
Some(cert.clone())
}
_ => None,
};
let sync_info = SyncInfo::new(
(*proposal.quorum_cert()).clone(),
(*self.block_store.highest_ledger_info()).clone(),
timeout_certificate,
);
// return proposal
Ok(ProposalMsg::new(proposal, sync_info))
}
/// Process a ProposalMsg, pre_process would bring all the dependencies and filter out invalid
/// proposal, process_proposed_block would execute and decide whether to vote for it.
pub async fn process_proposal_msg(&mut self, proposal_msg: ProposalMsg<T>) {
if let Some(block) = self.pre_process_proposal(proposal_msg).await {
self.process_proposed_block(block).await
}
}
/// The function is responsible for processing the incoming proposals and the Quorum
/// Certificate.
/// 1. sync up to the SyncInfo including committing to the committed state the HLI carries
/// and fetch all the blocks from the committed state to the HQC
/// 2. forwarding the proposals to the ProposerElection queue,
/// which is going to eventually trigger one winning proposal per round
async fn pre_process_proposal(&mut self, proposal_msg: ProposalMsg<T>) -> Option<Block<T>> {
debug!("EventProcessor: receive proposal {}", proposal_msg);
// Pacemaker is going to be updated with all the proposal certificates later,
// but it's known that the pacemaker's round is not going to decrease so we can already
// filter out the proposals from old rounds.
let current_round = self.pacemaker.current_round();
if proposal_msg.round() < current_round {
warn!(
"Proposal {} is ignored because its round {} < current round {}",
proposal_msg,
proposal_msg.round(),
current_round
);
return None;
}
if self
.proposer_election
.is_valid_proposer(proposal_msg.proposer(), proposal_msg.round())
.is_none()
{
warn!(
"Proposer {} for block {} is not a valid proposer for this round",
proposal_msg.proposer(),
proposal_msg.proposal()
);
return None;
}
if let Err(e) = self
.sync_up(proposal_msg.sync_info(), proposal_msg.proposer(), true)
.await
{
warn!(
"Dependencies of proposal {} could not be added to the block store: {:?}",
proposal_msg, e
);
return None;
}
// pacemaker may catch up with the SyncInfo, check again
let current_round = self.pacemaker.current_round();
if proposal_msg.round() != current_round {
warn!(
"Proposal {} is ignored because its round {} != current round {}",
proposal_msg,
proposal_msg.round(),
current_round
);
return None;
}
self.proposer_election
.process_proposal(proposal_msg.take_proposal())
}
/// Upon receiving TimeoutMsg, ensure that any branches with higher quorum certificates are
/// populated to this replica prior to processing the pacemaker timeout. This ensures that when
/// a pacemaker timeout certificate is formed with 2f+1 timeouts, the next proposer will be
/// able to chain a proposal block to a highest quorum certificate such that all honest replicas
/// can vote for it.
pub async fn process_remote_timeout_msg(&mut self, timeout_msg: TimeoutMsg) {
debug!(
"Received timeout msg for round {} from {}",
timeout_msg.pacemaker_timeout().round(),
timeout_msg.author().short_str()
);
if self
.sync_up(timeout_msg.sync_info(), timeout_msg.author(), true)
.await
.is_err()
{
warn!("Stop timeout msg processing because of sync up error.");
return;
};
if let Some(vote) = timeout_msg.pacemaker_timeout().vote_msg() {
self.add_vote(vote.clone(), self.epoch_mgr.quorum_size())
.await;
}
if let Some(new_round_event) = self.pacemaker.process_remote_timeout(
timeout_msg.pacemaker_timeout().clone(),
self.epoch_mgr.quorum_size(),
) {
self.process_new_round_event(new_round_event).await;
}
}
/// In case some peer's round or HQC is stale, send a SyncInfo message to that peer.
async fn help_remote_if_stale(
&self,
peer: Author,
remote_round: Round,
remote_hqc_round: Round,
) {
if self.author == peer {
return;
}
// pacemaker's round is sync_info.highest_round() + 1
if remote_round + 1 < self.pacemaker.current_round()
|| remote_hqc_round
< self
.block_store
.highest_quorum_cert()
.certified_block_round()
{
let sync_info = SyncInfo::new(
self.block_store.highest_quorum_cert().as_ref().clone(),
self.block_store.highest_ledger_info().as_ref().clone(),
self.pacemaker.highest_timeout_certificate(),
);
debug!(
"Peer {} is at round {} with hqc round {}, sending it {}",
peer.short_str(),
remote_round,
remote_hqc_round,
sync_info,
);
counters::SYNC_INFO_MSGS_SENT_COUNT.inc();
self.network.send_sync_info(sync_info, peer).await;
}
}
/// The function makes sure that it brings the missing dependencies from the QC and LedgerInfo
/// of the given sync info and update the pacemaker with the certificates if succeed.
/// Returns Error in case sync mgr failed to bring the missing dependencies.
/// We'll try to help the remote if the SyncInfo lags behind and the flag is set.
async fn sync_up(
&mut self,
sync_info: &SyncInfo,
author: Author,
help_remote: bool,
) -> failure::Result<()> {
if help_remote {
self.help_remote_if_stale(author, sync_info.highest_round(), sync_info.hqc_round())
.await;
}
let current_hqc_round = self
.block_store
.highest_quorum_cert()
.certified_block_round();
if current_hqc_round < sync_info.hqc_round() {
debug!(
"Starting sync: current_hqc_round = {}, sync_info_hqc_round = {}",
current_hqc_round,
sync_info.hqc_round(),
);
let deadline = self.pacemaker.current_round_deadline();
let sync_mgr_context = SyncMgrContext::new(sync_info, author);
self.sync_manager
.sync_to(deadline, sync_mgr_context)
.await
.map_err(|e| {
warn!(
"Fail to sync up to HQC @ round {}: {:?}",
sync_info.hqc_round(),
e
);
e
})?;
debug!("Caught up to HQC at round {}", sync_info.hqc_round());
}
self.process_certificates(
&sync_info.highest_quorum_cert(),
sync_info.highest_timeout_certificate(),
)
.await;
Ok(())
}
/// Process the SyncInfo sent by peers to catch up to latest state.
pub async fn process_sync_info_msg(&mut self, sync_info: SyncInfo, peer: Author) {
debug!("Received a sync info msg: {}", sync_info);
counters::SYNC_INFO_MSGS_RECEIVED_COUNT.inc();
// To avoid a ping-pong cycle between two peers that move forward together.
if let Err(e) = self.sync_up(&sync_info, peer, false).await {
error!("Fail to process sync info: {:?}", e);
}
}
/// The replica stops voting for this round and saves its consensus state. Voting is halted
/// to ensure that the next proposer can make a proposal that can be voted on by all replicas.
/// Saving the consensus state ensures that on restart, the replicas will not waste time
/// on previous rounds.
pub async fn process_local_timeout(&mut self, round: Round) {
if !self.pacemaker.process_local_timeout(round) {
return;
}
let last_vote_round = self.safety_rules.consensus_state().last_vote_round();
warn!(
"Round {} timed out: {}, expected round proposer was {:?}, broadcasting new round to all replicas",
round,
if last_vote_round == round { "already executed and voted at this round" } else { "will try to generate a backup vote" },
self.proposer_election.get_valid_proposers(round).iter().map(|p| p.short_str()).collect::<Vec<String>>(),
);
let vote_msg_to_attach = match self.last_vote_sent.as_ref() {
Some((vote, vote_round)) if (*vote_round == round) => Some(vote.clone()),
_ => {
// Try to generate a backup vote
match self.gen_backup_vote(round).await {
Ok(backup_vote_msg) => {
self.last_vote_sent
.replace((backup_vote_msg.clone(), round));
Some(backup_vote_msg)
}
Err(e) => {
warn!("Failed to generate a backup vote: {}", e);
None
}
}
}
};
// Stop voting at this round, persist the consensus state to support restarting from
// a recent round (i.e. > the last vote round) and then send the SyncInfo
let consensus_state = self.safety_rules.increase_last_vote_round(round);
if let Some(consensus_state) = consensus_state {
if let Err(e) = self.storage.save_consensus_state(consensus_state) {
error!("Failed to persist consensus state after increasing the last vote round due to {:?}", e);
return;
}
}
self.network
.broadcast_timeout_msg(TimeoutMsg::new(
SyncInfo::new(
self.block_store.highest_quorum_cert().as_ref().clone(),
self.block_store.highest_ledger_info().as_ref().clone(),
self.pacemaker.highest_timeout_certificate(),
),
PacemakerTimeout::new(round, self.block_store.signer(), vote_msg_to_attach),
self.block_store.signer(),
))
.await;
}
async fn gen_backup_vote(&mut self, round: Round) -> failure::Result<VoteMsg> {
// We generally assume that this function is called only if no votes have been sent in this
// round, but having a duplicate proposal here would work ok because block store makes
// sure the calls to `execute_and_insert_block` are idempotent.
// Either use the best proposal received in this round or a NIL block if nothing available.
let block = match self.proposer_election.take_backup_proposal(round) {
Some(b) => {
debug!("Planning to vote for a backup proposal {}", b);
counters::VOTE_SECONDARY_PROPOSAL_COUNT.inc();
b
}
None => {
let nil_block = self.proposal_generator.generate_nil_block(round)?;
debug!("Planning to vote for a NIL block {}", nil_block);
counters::VOTE_NIL_COUNT.inc();
nil_block
}
};
self.execute_and_vote(block).await
}
async fn process_certificates(
&mut self,
qc: &QuorumCert,
tc: Option<&PacemakerTimeoutCertificate>,
) {
self.safety_rules.update(qc);
let mut highest_committed_proposal_round = None;
if let Some(new_commit) = qc.committed_block_id() {
if let Some(block) = self.block_store.get_block(new_commit) {
let finality_proof = qc.ledger_info().clone();
// We don't want to use NIL commits for pacemaker round interval calculations.
if !block.is_nil_block() {
highest_committed_proposal_round = Some(block.round());
}
self.process_commit(block, finality_proof).await;
}
}
if let Some(new_round_event) = self.pacemaker.process_certificates(
qc.certified_block_round(),
highest_committed_proposal_round,
tc,
) {
self.process_new_round_event(new_round_event).await;
}
}
/// This function processes a proposal that was chosen as a representative of its round:
/// 1. Add it to a block store.
/// 2. Try to vote for it following the safety rules.
/// 3. In case a validator chooses to vote, send the vote to the representatives at the next
/// position.
async fn process_proposed_block(&mut self, proposal: Block<T>) {
if let Some(time_to_receival) =
duration_since_epoch().checked_sub(Duration::from_micros(proposal.timestamp_usecs()))
{
counters::CREATION_TO_RECEIVAL_S.observe_duration(time_to_receival);
}
let proposal_round = proposal.round();
let vote_msg = match self.execute_and_vote(proposal).await {
Err(_) => {
return;
}
Ok(vote_msg) => vote_msg,
};
self.last_vote_sent
.replace((vote_msg.clone(), proposal_round));
let recipients = self
.proposer_election
.get_valid_proposers(proposal_round + 1);
debug!("{}Voted: {} {}", Fg(Green), Fg(Reset), vote_msg);
self.network.send_vote(vote_msg, recipients).await;
}
async fn wait_before_vote_if_needed(
&self,
block_timestamp_us: u64,
) -> Result<(), WaitingError> {
let current_round_deadline = self.pacemaker.current_round_deadline();
if self.enforce_increasing_timestamps {
match wait_if_possible(
self.time_service.as_ref(),
Duration::from_micros(block_timestamp_us),
current_round_deadline,
)
.await
{
Ok(waiting_success) => {
debug!("Success with {:?} for being able to vote", waiting_success);
match waiting_success {
WaitingSuccess::WaitWasRequired { wait_duration, .. } => {
counters::VOTE_SUCCESS_WAIT_S.observe_duration(wait_duration);
counters::VOTE_WAIT_WAS_REQUIRED_COUNT.inc();
}
WaitingSuccess::NoWaitRequired { .. } => {
counters::VOTE_SUCCESS_WAIT_S.observe_duration(Duration::new(0, 0));
counters::VOTE_NO_WAIT_REQUIRED_COUNT.inc();
}
}
}
Err(waiting_error) => {
match waiting_error {
WaitingError::MaxWaitExceeded => {
error!(
"Waiting until proposal block timestamp usecs {:?} would exceed the round duration {:?}, hence will not vote for this round",
block_timestamp_us,
current_round_deadline);
counters::VOTE_FAILURE_WAIT_S.observe_duration(Duration::new(0, 0));
counters::VOTE_MAX_WAIT_EXCEEDED_COUNT.inc();
}
WaitingError::WaitFailed {
current_duration_since_epoch,
wait_duration,
} => {
error!(
"Even after waiting for {:?}, proposal block timestamp usecs {:?} >= current timestamp usecs {:?}, will not vote for this round",
wait_duration,
block_timestamp_us,
current_duration_since_epoch);
counters::VOTE_FAILURE_WAIT_S.observe_duration(wait_duration);
counters::VOTE_WAIT_FAILED_COUNT.inc();
}
};
return Err(waiting_error);
}
}
}
Ok(())
}
/// The function generates a VoteMsg for a given proposed_block:
/// * first execute the block and add it to the block store
/// * then verify the voting rules
/// * save the updated state to consensus DB
/// * return a VoteMsg with the LedgerInfo to be committed in case the vote gathers QC.
///
/// This function assumes that it might be called from different tasks concurrently.
async fn execute_and_vote(&mut self, proposed_block: Block<T>) -> failure::Result<VoteMsg> {
let block = self
.sync_manager
.execute_and_insert_block(proposed_block)
.await
.map_err(|e| {
debug!("Failed to execute_and_insert the block: {:?}", e);
e
})?;
// Checking pacemaker round again, because multiple proposed_block can now race
// during async block retrieval
if self.pacemaker.current_round() != block.round() {
debug!(
"Proposal {} rejected because round is incorrect. Pacemaker: {}, proposed_block: {}",
block,
self.pacemaker.current_round(),
block.round(),
);
return Err(InsertError::InvalidBlockRound.into());
}
self.wait_before_vote_if_needed(block.timestamp_usecs())
.await?;
let vote_info = self.safety_rules.voting_rule(&block).map_err(|e| {
debug!("{}Rejected{} {}: {:?}", Fg(Red), Fg(Reset), block, e);
e
})?;
self.storage
.save_consensus_state(vote_info.consensus_state().clone())
.map_err(|e| {
debug!("Fail to persist consensus state: {:?}", e);
e
})?;
let proposal_id = vote_info.proposal_id();
let executed_state_id = self
.block_store
.get_compute_result(proposal_id)
.expect("Block proposed_block: no execution state found for inserted block.")
.executed_state
.state_id;
let ledger_info_placeholder = self
.block_store
.ledger_info_placeholder(vote_info.potential_commit_id());
Ok(VoteMsg::new(
VoteData::new(
proposal_id,
executed_state_id,
block.round(),
vote_info.parent_block_id(),
vote_info.parent_block_round(),
vote_info.grandparent_block_id(),
vote_info.grandparent_block_round(),
),
self.author,
ledger_info_placeholder,
self.block_store.signer(),
))
}
/// Upon new vote:
/// 1. Filter out votes for rounds that should not be processed by this validator (to avoid
/// potential attacks).
/// 2. Add the vote to the store and check whether it finishes a QC.
/// 3. Once the QC successfully formed, notify the Pacemaker.
pub async fn process_vote(&mut self, vote: VoteMsg) {
// Check whether this validator is a valid recipient of the vote.
let next_round = vote.block_round() + 1;
if self
.proposer_election
.is_valid_proposer(self.author, next_round)
.is_none()
{
debug!(
"Received {}, but I am not a valid proposer for round {}, ignore.",
vote, next_round
);
security_log(SecurityEvent::InvalidConsensusVote)
.error("InvalidProposer")
.data(vote)
.data(next_round)
.log();
return;
}
self.add_vote(vote, self.epoch_mgr.quorum_size()).await;
}
/// Add a vote. Fetch missing dependencies if required.
/// The `duplicates_expected` field is used for cases, in which some of the votes might
/// be duplicated (e.g., when the votes are attached to the timeout messages).
/// If a QC is formed then
/// 1) fetch missing dependencies if required, and then
/// 2) pass the new QC to the pacemaker, which can generate a new round in return.
/// The function returns an Option for a newly generate QuorumCert in case it's been
/// successfully added with all its dependencies.
async fn add_vote(&mut self, vote: VoteMsg, quorum_size: usize) -> Option<Arc<QuorumCert>> {
let deadline = self.pacemaker.current_round_deadline();
let preferred_peer = vote.author();
// TODO [Reconfiguration] Verify epoch of the vote message.
// Add the vote and check whether it completes a new QC.
if let VoteReceptionResult::NewQuorumCertificate(qc) =
self.block_store.insert_vote(vote, quorum_size)
{
if self.block_store.need_fetch_for_quorum_cert(&qc) == NeedFetchResult::NeedFetch {
if let Err(e) = self
.sync_manager
.fetch_quorum_cert(qc.as_ref().clone(), preferred_peer, deadline)
.await
{
error!("Error syncing to qc {}: {:?}", qc, e);
return None;
}
} else if let Err(e) = self
.block_store
.insert_single_quorum_cert(qc.as_ref().clone())
{
error!("Error inserting qc {}: {:?}", qc, e);
return None;
}
self.process_certificates(qc.as_ref(), None).await;
return Some(qc);
};
None
}
/// Upon (potentially) new commit:
/// 0. Verify that this commit is newer than the current root.
/// 1. Notify state computer with the finality proof.
/// 2. After the state is finalized, update the txn manager with the status of the committed
/// transactions.
/// 3. Prune the tree.
async fn process_commit(
&self,
committed_block: Arc<Block<T>>,
finality_proof: LedgerInfoWithSignatures,
) {
// First make sure that this commit is new.
if committed_block.round() <= self.block_store.root().round() {
return;
}
// Verify that the ledger info is indeed for the block we're planning to
// commit.
assert_eq!(
finality_proof.ledger_info().consensus_block_id(),
committed_block.id()
);
if let Err(e) = self.state_computer.commit(finality_proof).await {
// We assume that state computer cannot enter an inconsistent state that might
// violate safety of the protocol. Specifically, an executor service is going to panic
// if it fails to persist the commit requests, which would crash the whole process
// including consensus.
error!(
"Failed to persist commit, mempool will not be notified: {:?}",
e
);
return;
}
// At this moment the new state is persisted and we can notify the clients.
// Multiple blocks might be committed at once: notify about all the transactions in the
// path from the old root to the new root.
for committed in self
.block_store
.path_from_root(Arc::clone(&committed_block))
.unwrap_or_else(Vec::new)
{
if let Some(time_to_commit) = duration_since_epoch()
.checked_sub(Duration::from_micros(committed.timestamp_usecs()))
{
counters::CREATION_TO_COMMIT_S.observe_duration(time_to_commit);
}
let compute_result = self
.block_store
.get_compute_result(committed.id())
.expect("Compute result of a pending block is unknown");
if let Err(e) = self
.txn_manager
.commit_txns(
committed.get_payload(),
compute_result.as_ref(),
committed.timestamp_usecs(),
)
.await
{
error!("Failed to notify mempool: {:?}", e);
}
}
counters::LAST_COMMITTED_ROUND.set(committed_block.round() as i64);
debug!("{}Committed{} {}", Fg(Blue), Fg(Reset), *committed_block);
event!("committed",
"block_id": committed_block.id().short_str(),
"round": committed_block.round(),
"parent_id": committed_block.parent_id().short_str(),
);
self.block_store.prune_tree(committed_block.id());
}
/// Retrieve a n chained blocks from the block store starting from
/// an initial parent id, returning with <n (as many as possible) if
/// id or its ancestors can not be found.
///
/// The current version of the function is not really async, but keeping it this way for
/// future possible changes.
pub async fn process_block_retrieval(&self, request: BlockRetrievalRequest<T>) |
/// To jump start new round with the current certificates we have.
pub async fn start(&mut self) {
let hqc = self.block_store.highest_quorum_cert();
let last_committed_round = self.block_store.root().round();
let new_round_event = self
.pacemaker
.process_certificates(
hqc.certified_block_round(),
Some(last_committed_round),
None,
)
.expect("Can not jump start a new round from existing certificates.");
self.process_new_round_event(new_round_event).await;
}
/// Inspect the current consensus state.
#[cfg(test)]
pub fn consensus_state(&self) -> ConsensusState {
self.safety_rules.consensus_state()
}
}
| {
let mut blocks = vec![];
let mut status = BlockRetrievalStatus::SUCCEEDED;
let mut id = request.block_id;
while (blocks.len() as u64) < request.num_blocks {
if let Some(block) = self.block_store.get_block(id) {
id = block.parent_id();
blocks.push(Block::clone(&block));
} else {
status = BlockRetrievalStatus::NOT_ENOUGH_BLOCKS;
break;
}
}
if blocks.is_empty() {
status = BlockRetrievalStatus::ID_NOT_FOUND;
}
if let Err(e) = request
.response_sender
.send(BlockRetrievalResponse { status, blocks })
{
error!("Failed to return the requested block: {:?}", e);
}
} |
watch.py | # Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Provides UI functionality for watching replays."""
from __future__ import annotations
import os
from typing import TYPE_CHECKING, cast
import _ba
import ba
if TYPE_CHECKING:
from typing import Any, Optional, Tuple, Dict
class WatchWindow(ba.Window):
"""Window for watching replays."""
def __init__(self,
transition: Optional[str] = 'in_right',
origin_widget: ba.Widget = None):
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
from bastd.ui import tabs
ba.set_analytics_screen('Watch Window')
scale_origin: Optional[Tuple[float, float]]
if origin_widget is not None:
self._transition_out = 'out_scale'
scale_origin = origin_widget.get_screen_space_center()
transition = 'in_scale'
else:
self._transition_out = 'out_right'
scale_origin = None
ba.app.main_window = 'Watch'
self._tab_data: Dict[str, Any] = {}
self._my_replays_scroll_width: Optional[float] = None
self._my_replays_watch_replay_button: Optional[ba.Widget] = None
self._scrollwidget: Optional[ba.Widget] = None
self._columnwidget: Optional[ba.Widget] = None
self._my_replay_selected: Optional[str] = None
self._my_replays_rename_window: Optional[ba.Widget] = None
self._my_replay_rename_text: Optional[ba.Widget] = None
self._r = 'watchWindow'
self._width = 1240 if ba.app.small_ui else 1040
x_inset = 100 if ba.app.small_ui else 0
self._height = (578
if ba.app.small_ui else 670 if ba.app.med_ui else 800)
self._current_tab: Optional[str] = None
extra_top = 20 if ba.app.small_ui else 0
super().__init__(root_widget=ba.containerwidget(
size=(self._width, self._height + extra_top),
transition=transition,
toolbar_visibility='menu_minimal',
scale_origin_stack_offset=scale_origin,
scale=(1.3 if ba.app.small_ui else 0.97 if ba.app.med_ui else 0.8),
stack_offset=(0, -10) if ba.app.small_ui else (
0, 15) if ba.app.med_ui else (0, 0)))
if ba.app.small_ui and ba.app.toolbars:
ba.containerwidget(edit=self._root_widget,
on_cancel_call=self._back)
self._back_button = None
else:
self._back_button = btn = ba.buttonwidget(
parent=self._root_widget,
autoselect=True,
position=(70 + x_inset, self._height - 74),
size=(140, 60),
scale=1.1,
label=ba.Lstr(resource='backText'),
button_type='back',
on_activate_call=self._back)
ba.containerwidget(edit=self._root_widget, cancel_button=btn)
ba.buttonwidget(edit=btn,
button_type='backSmall',
size=(60, 60),
label=ba.charstr(ba.SpecialChar.BACK))
ba.textwidget(parent=self._root_widget,
position=(self._width * 0.5, self._height - 38),
size=(0, 0),
color=ba.app.title_color,
scale=1.5,
h_align='center',
v_align='center',
text=ba.Lstr(resource=self._r + '.titleText'),
maxwidth=400)
tabs_def = [('my_replays',
ba.Lstr(resource=self._r + '.myReplaysText'))]
scroll_buffer_h = 130 + 2 * x_inset
tab_buffer_h = 750 + 2 * x_inset
self._tab_buttons = tabs.create_tab_buttons(
self._root_widget,
tabs_def,
pos=(tab_buffer_h * 0.5, self._height - 130),
size=(self._width - tab_buffer_h, 50),
on_select_call=self._set_tab)
if ba.app.toolbars:
ba.widget(edit=self._tab_buttons[tabs_def[-1][0]],
right_widget=_ba.get_special_widget('party_button'))
if ba.app.small_ui:
bbtn = _ba.get_special_widget('back_button')
ba.widget(edit=self._tab_buttons[tabs_def[0][0]],
up_widget=bbtn,
left_widget=bbtn)
self._scroll_width = self._width - scroll_buffer_h
self._scroll_height = self._height - 180
# not actually using a scroll widget anymore; just an image
scroll_left = (self._width - self._scroll_width) * 0.5
scroll_bottom = self._height - self._scroll_height - 79 - 48
buffer_h = 10
buffer_v = 4
ba.imagewidget(parent=self._root_widget,
position=(scroll_left - buffer_h,
scroll_bottom - buffer_v),
size=(self._scroll_width + 2 * buffer_h,
self._scroll_height + 2 * buffer_v),
texture=ba.gettexture('scrollWidget'),
model_transparent=ba.getmodel('softEdgeOutside'))
self._tab_container: Optional[ba.Widget] = None
self._restore_state()
def _set_tab(self, tab: str) -> None:
# pylint: disable=too-many-locals
from bastd.ui import tabs
if self._current_tab == tab:
return
self._current_tab = tab
# We wanna preserve our current tab between runs.
cfg = ba.app.config
cfg['Watch Tab'] = tab
cfg.commit()
# Update tab colors based on which is selected.
tabs.update_tab_button_colors(self._tab_buttons, tab)
if self._tab_container:
self._tab_container.delete()
scroll_left = (self._width - self._scroll_width) * 0.5
scroll_bottom = self._height - self._scroll_height - 79 - 48
# A place where tabs can store data to get cleared when
# switching to a different tab
self._tab_data = {}
if tab == 'my_replays':
c_width = self._scroll_width
c_height = self._scroll_height - 20
sub_scroll_height = c_height - 63
self._my_replays_scroll_width = sub_scroll_width = (
680 if ba.app.small_ui else 640)
self._tab_container = cnt = ba.containerwidget(
parent=self._root_widget,
position=(scroll_left, scroll_bottom +
(self._scroll_height - c_height) * 0.5),
size=(c_width, c_height),
background=False,
selection_loop_to_parent=True)
v = c_height - 30
ba.textwidget(parent=cnt,
position=(c_width * 0.5, v),
color=(0.6, 1.0, 0.6),
scale=0.7,
size=(0, 0),
maxwidth=c_width * 0.9,
h_align='center',
v_align='center',
text=ba.Lstr(
resource='replayRenameWarningText',
subs=[('${REPLAY}',
ba.Lstr(resource='replayNameDefaultText'))
]))
b_width = 140 if ba.app.small_ui else 178
b_height = (107
if ba.app.small_ui else 142 if ba.app.med_ui else 190)
b_space_extra = (0 if ba.app.small_ui else
-2 if ba.app.med_ui else -5)
b_color = (0.6, 0.53, 0.63)
b_textcolor = (0.75, 0.7, 0.8)
btnv = c_height - (48 if ba.app.small_ui else
45 if ba.app.med_ui else 40) - b_height
btnh = 40 if ba.app.small_ui else 40
smlh = 190 if ba.app.small_ui else 225
tscl = 1.0 if ba.app.small_ui else 1.2
self._my_replays_watch_replay_button = btn1 = ba.buttonwidget(
parent=cnt,
size=(b_width, b_height),
position=(btnh, btnv),
button_type='square',
color=b_color,
textcolor=b_textcolor,
on_activate_call=self._on_my_replay_play_press,
text_scale=tscl,
label=ba.Lstr(resource=self._r + '.watchReplayButtonText'),
autoselect=True)
ba.widget(edit=btn1, up_widget=self._tab_buttons[tab])
if ba.app.small_ui and ba.app.toolbars:
ba.widget(edit=btn1,
left_widget=_ba.get_special_widget('back_button'))
btnv -= b_height + b_space_extra
ba.buttonwidget(parent=cnt,
size=(b_width, b_height),
position=(btnh, btnv),
button_type='square',
color=b_color,
textcolor=b_textcolor,
on_activate_call=self._on_my_replay_rename_press,
text_scale=tscl,
label=ba.Lstr(resource=self._r +
'.renameReplayButtonText'),
autoselect=True)
btnv -= b_height + b_space_extra
ba.buttonwidget(parent=cnt,
size=(b_width, b_height),
position=(btnh, btnv),
button_type='square',
color=b_color,
textcolor=b_textcolor,
on_activate_call=self._on_my_replay_delete_press,
text_scale=tscl,
label=ba.Lstr(resource=self._r +
'.deleteReplayButtonText'),
autoselect=True)
v -= sub_scroll_height + 23
self._scrollwidget = scrlw = ba.scrollwidget(
parent=cnt,
position=(smlh, v),
size=(sub_scroll_width, sub_scroll_height))
ba.containerwidget(edit=cnt, selected_child=scrlw)
self._columnwidget = ba.columnwidget(parent=scrlw, left_border=10)
ba.widget(edit=scrlw,
autoselect=True,
left_widget=btn1,
up_widget=self._tab_buttons[tab])
ba.widget(edit=self._tab_buttons[tab], down_widget=scrlw)
self._my_replay_selected = None
self._refresh_my_replays()
def _no_replay_selected_error(self) -> None:
ba.screenmessage(ba.Lstr(resource=self._r +
'.noReplaySelectedErrorText'),
color=(1, 0, 0))
ba.playsound(ba.getsound('error'))
def _on_my_replay_play_press(self) -> None:
if self._my_replay_selected is None:
self._no_replay_selected_error()
return
_ba.increment_analytics_count('Replay watch')
def do_it() -> None:
try:
# Reset to normal speed.
_ba.set_replay_speed_exponent(0)
_ba.fade_screen(True)
assert self._my_replay_selected is not None
_ba.new_replay_session(_ba.get_replays_dir() + '/' +
self._my_replay_selected)
except Exception:
ba.print_exception('Error running replay session.')
# Drop back into a fresh main menu session
# in case we half-launched or something.
from bastd import mainmenu
_ba.new_host_session(mainmenu.MainMenuSession)
_ba.fade_screen(False, endcall=ba.Call(ba.pushcall, do_it))
ba.containerwidget(edit=self._root_widget, transition='out_left')
def _on_my_replay_rename_press(self) -> None:
if self._my_replay_selected is None:
self._no_replay_selected_error()
return
c_width = 600
c_height = 250
self._my_replays_rename_window = cnt = ba.containerwidget(
scale=1.8 if ba.app.small_ui else 1.55 if ba.app.med_ui else 1.0,
size=(c_width, c_height),
transition='in_scale')
dname = self._get_replay_display_name(self._my_replay_selected)
ba.textwidget(parent=cnt,
size=(0, 0),
h_align='center',
v_align='center',
text=ba.Lstr(resource=self._r + '.renameReplayText',
subs=[('${REPLAY}', dname)]),
maxwidth=c_width * 0.8,
position=(c_width * 0.5, c_height - 60))
self._my_replay_rename_text = txt = ba.textwidget(
parent=cnt,
size=(c_width * 0.8, 40),
h_align='left',
v_align='center',
text=dname,
editable=True,
description=ba.Lstr(resource=self._r + '.replayNameText'),
position=(c_width * 0.1, c_height - 140),
autoselect=True,
maxwidth=c_width * 0.7,
max_chars=200)
cbtn = ba.buttonwidget(
parent=cnt,
label=ba.Lstr(resource='cancelText'),
on_activate_call=ba.Call(
lambda c: ba.containerwidget(edit=c, transition='out_scale'),
cnt),
size=(180, 60),
position=(30, 30),
autoselect=True)
okb = ba.buttonwidget(parent=cnt,
label=ba.Lstr(resource=self._r + '.renameText'),
size=(180, 60),
position=(c_width - 230, 30),
on_activate_call=ba.Call(
self._rename_my_replay,
self._my_replay_selected),
autoselect=True)
ba.widget(edit=cbtn, right_widget=okb)
ba.widget(edit=okb, left_widget=cbtn)
ba.textwidget(edit=txt, on_return_press_call=okb.activate)
ba.containerwidget(edit=cnt, cancel_button=cbtn, start_button=okb)
def _rename_my_replay(self, replay: str) -> None:
new_name = None
try:
if not self._my_replay_rename_text:
return
new_name_raw = cast(
str, ba.textwidget(query=self._my_replay_rename_text))
new_name = new_name_raw + '.brp'
# ignore attempts to change it to what it already is
# (or what it looks like to the user)
if (replay != new_name
and self._get_replay_display_name(replay) != new_name_raw):
old_name_full = (_ba.get_replays_dir() + '/' +
replay).encode('utf-8')
new_name_full = (_ba.get_replays_dir() + '/' +
new_name).encode('utf-8')
# false alarm; ba.textwidget can return non-None val
# pylint: disable=unsupported-membership-test
if os.path.exists(new_name_full):
ba.playsound(ba.getsound('error'))
ba.screenmessage(
ba.Lstr(resource=self._r +
'.replayRenameErrorAlreadyExistsText'),
color=(1, 0, 0))
elif any(char in new_name_raw for char in ['/', '\\', ':']):
ba.playsound(ba.getsound('error'))
ba.screenmessage(ba.Lstr(resource=self._r +
'.replayRenameErrorInvalidName'),
color=(1, 0, 0))
else:
_ba.increment_analytics_count('Replay rename')
os.rename(old_name_full, new_name_full)
self._refresh_my_replays()
ba.playsound(ba.getsound('gunCocking'))
except Exception:
ba.print_exception(
f"Error renaming replay '{replay}' to '{new_name}'.")
ba.playsound(ba.getsound('error'))
ba.screenmessage(
ba.Lstr(resource=self._r + '.replayRenameErrorText'),
color=(1, 0, 0),
)
ba.containerwidget(edit=self._my_replays_rename_window,
transition='out_scale')
def _on_my_replay_delete_press(self) -> None:
from bastd.ui import confirm
if self._my_replay_selected is None:
self._no_replay_selected_error()
return
confirm.ConfirmWindow(
ba.Lstr(resource=self._r + '.deleteConfirmText',
subs=[('${REPLAY}',
self._get_replay_display_name(
self._my_replay_selected))]),
ba.Call(self._delete_replay, self._my_replay_selected), 450, 150)
def _get_replay_display_name(self, replay: str) -> str:
if replay.endswith('.brp'):
replay = replay[:-4]
if replay == '__lastReplay':
return ba.Lstr(resource='replayNameDefaultText').evaluate()
return replay
def _delete_replay(self, replay: str) -> None:
try:
_ba.increment_analytics_count('Replay delete')
os.remove((_ba.get_replays_dir() + '/' + replay).encode('utf-8'))
self._refresh_my_replays()
ba.playsound(ba.getsound('shieldDown'))
if replay == self._my_replay_selected:
self._my_replay_selected = None
except Exception:
ba.print_exception(f"Error deleting replay '{replay}'.")
ba.playsound(ba.getsound('error'))
ba.screenmessage(
ba.Lstr(resource=self._r + '.replayDeleteErrorText'),
color=(1, 0, 0),
)
def _on_my_replay_select(self, replay: str) -> None:
|
def _refresh_my_replays(self) -> None:
assert self._columnwidget is not None
for child in self._columnwidget.get_children():
child.delete()
t_scale = 1.6
try:
names = os.listdir(_ba.get_replays_dir())
# ignore random other files in there..
names = [n for n in names if n.endswith('.brp')]
names.sort(key=lambda x: x.lower())
except Exception:
ba.print_exception('Error listing replays dir.')
names = []
assert self._my_replays_scroll_width is not None
assert self._my_replays_watch_replay_button is not None
for i, name in enumerate(names):
txt = ba.textwidget(
parent=self._columnwidget,
size=(self._my_replays_scroll_width / t_scale, 30),
selectable=True,
color=(1.0, 1, 0.4) if name == '__lastReplay.brp' else
(1, 1, 1),
always_highlight=True,
on_select_call=ba.Call(self._on_my_replay_select, name),
on_activate_call=self._my_replays_watch_replay_button.activate,
text=self._get_replay_display_name(name),
h_align='left',
v_align='center',
corner_scale=t_scale,
maxwidth=(self._my_replays_scroll_width / t_scale) * 0.93)
if i == 0:
ba.widget(edit=txt, up_widget=self._tab_buttons['my_replays'])
def _save_state(self) -> None:
try:
sel = self._root_widget.get_selected_child()
if sel == self._back_button:
sel_name = 'Back'
elif sel in list(self._tab_buttons.values()):
sel_name = 'Tab:' + list(self._tab_buttons.keys())[list(
self._tab_buttons.values()).index(sel)]
elif sel == self._tab_container:
sel_name = 'TabContainer'
else:
raise ValueError(f'unrecognized selection {sel}')
ba.app.window_states[self.__class__.__name__] = {
'sel_name': sel_name,
'tab': self._current_tab
}
except Exception:
ba.print_exception(f'Error saving state for {self}.')
def _restore_state(self) -> None:
try:
sel_name = ba.app.window_states.get(self.__class__.__name__,
{}).get('sel_name')
current_tab = ba.app.config.get('Watch Tab')
if current_tab is None or current_tab not in self._tab_buttons:
current_tab = 'my_replays'
self._set_tab(current_tab)
if sel_name == 'Back':
sel = self._back_button
elif sel_name == 'TabContainer':
sel = self._tab_container
elif isinstance(sel_name, str) and sel_name.startswith('Tab:'):
sel = self._tab_buttons[sel_name.split(':')[-1]]
else:
if self._tab_container is not None:
sel = self._tab_container
else:
sel = self._tab_buttons[current_tab]
ba.containerwidget(edit=self._root_widget, selected_child=sel)
except Exception:
ba.print_exception(f'Error restoring state for {self}.')
def _back(self) -> None:
from bastd.ui import mainmenu
self._save_state()
ba.containerwidget(edit=self._root_widget,
transition=self._transition_out)
ba.app.main_menu_window = (mainmenu.MainMenuWindow(
transition='in_left').get_root_widget())
| self._my_replay_selected = replay |
fileUtil.go | package fileUtil
import (
"crypto/md5"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
"os"
"path"
"regexp"
"strconv"
"strings"
)
var (
empty = ``
)
// FileExists returns whether a file exists
func FileExists(filePath string) bool |
// DirExists returns whether a directory exists
func DirExists(dirPath string) bool {
f, err := os.Stat(dirPath)
if err != nil {
return false
}
return f.IsDir()
}
// GetFileSize returns file size , support http(s)
func GetFileSize(uri string) (fileSize int64, err error) {
urlStruct, err := url.Parse(uri)
if err != nil {
return
}
// local file
if len(urlStruct.Scheme) == 0 {
f, Ferr := os.Stat(uri)
if Ferr != nil {
return 0, Ferr
}
return f.Size(), nil
}
// http(s)
resp, err := http.Get(uri)
if err != nil {
return
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return 0, fmt.Errorf("Connection Error")
}
if resp.ContentLength <= 0 {
fileSize, _ = strconv.ParseInt(resp.Header["Accept-Length"][0], 10, 64)
} else {
fileSize = resp.ContentLength
}
return
}
// GetFileName returns file name
func GetFileName(filePath string) string {
return path.Base(filePath)
}
// GetExt returns extension name
// Will return `` when giving a string `.foo` or `.foo.bar.` etc
func GetExt(filePath string) string {
if len(filePath) == 0 {
return empty
}
if -1 == strings.Index(filePath, `.`) {
return empty
}
if ok, _ := regexp.MatchString(`^\.[^\.]*$`, filePath); ok {
return empty
}
if string(filePath[len(filePath)-1]) == `.` {
return empty
}
return path.Ext(filePath)
}
// ReadAll returns file content,will return `` if err
func ReadAll(filePath string) string {
f, err := os.Stat(filePath)
if err != nil {
return empty
}
if f.IsDir() {
return empty
}
fo, err := os.Open(filePath)
if err != nil {
return empty
}
defer fo.Close()
fd, err := ioutil.ReadAll(fo)
if err != nil {
return empty
}
return string(fd)
}
// ReadAllOk returns file content with err
func ReadAllOk(filePath string) (content string, err error) {
f, err := os.Stat(filePath)
if err != nil {
return
}
if f.IsDir() {
return empty, errors.New("not a file")
}
fo, err := os.Open(filePath)
if err != nil {
return
}
defer fo.Close()
fd, err := ioutil.ReadAll(fo)
if err != nil {
return
}
return string(fd), nil
}
// Truncate warps os.Truncate
func Truncate(path string) (err error) {
err = os.Truncate(path, 0)
if err != nil {
return
}
return
}
// MkdirAll warps os.MkdirAll
func MkdirAll(path string, perm os.FileMode) error {
return os.MkdirAll(path, perm)
}
// WriteString warps os.file.WriteString
func WriteString(path, s string, perm os.FileMode) (n int, err error) {
f, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC, perm)
defer f.Close()
if err != nil {
return
}
n, err = f.WriteString(s)
if err != nil {
return
}
return
}
// CopyFile forked from https://github.com/koding/file/blob/master/file.go#L90
func CopyFile(src, dst string) (err error) {
sf, err := os.Open(src)
if err != nil {
return
}
defer sf.Close()
fi, err := sf.Stat()
if err != nil {
return
}
if fi.IsDir() {
return errors.New("src is a directory, please provide a file")
}
df, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, fi.Mode())
if err != nil {
return err
}
defer df.Close()
if _, err := io.Copy(df, sf); err != nil {
return err
}
return nil
}
//Md5File returns md5 of a local file
func Md5File(filePath string) (md5String string, err error) {
if FileExists(filePath) {
f, err := os.Open(filePath)
if err != nil {
return ``, err
}
md5Object := md5.New()
io.Copy(md5Object, f)
defer f.Close()
md5String = fmt.Sprintf("%x", md5Object.Sum(nil))
return md5String, nil
}
return ``, errors.New("file does not exist")
}
| {
f, err := os.Stat(filePath)
if err != nil {
return false
}
return !f.IsDir()
} |
fr-BE.js | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
(function(global) {
global.ng = global.ng || {};
global.ng.common = global.ng.common || {};
global.ng.common.locales = global.ng.common.locales || {};
const u = undefined;
function plural(n) {
let i = Math.floor(Math.abs(n));
if (i === 0 || i === 1) return 1;
return 5;
}
root.ng.common.locales['fr-be'] = [
'fr-BE',
[['AM', 'PM'], u, u],
u,
[
['D', 'L', 'M', 'M', 'J', 'V', 'S'], ['dim.', 'lun.', 'mar.', 'mer.', 'jeu.', 'ven.', 'sam.'],
['dimanche', 'lundi', 'mardi', 'mercredi', 'jeudi', 'vendredi', 'samedi'],
['di', 'lu', 'ma', 'me', 'je', 've', 'sa']
],
u,
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
[
'janv.', 'févr.', 'mars', 'avr.', 'mai', 'juin', 'juil.', 'août', 'sept.', 'oct.', 'nov.',
'déc.'
],
[
'janvier', 'février', 'mars', 'avril', 'mai', 'juin', 'juillet', 'août', 'septembre',
'octobre', 'novembre', 'décembre'
]
],
u,
[['av. J.-C.', 'ap. J.-C.'], u, ['avant Jésus-Christ', 'après Jésus-Christ']],
1,
[6, 0],
['d/MM/yy', 'd MMM y', 'd MMMM y', 'EEEE d MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'H \'h\' mm \'min\' ss \'s\' zzzz'],
['{1} {0}', '{1} \'à\' {0}', u, u],
[',', '\u202f', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0 %', '#,##0.00 ¤', '#E0'],
'€',
'euro',
{
'ARS': ['$AR', '$'],
'AUD': ['$AU', '$'],
'BEF': ['FB'],
'BMD': ['$BM', '$'],
'BND': ['$BN', '$'],
'BZD': ['$BZ', '$'],
'CAD': ['$CA', '$'],
'CLP': ['$CL', '$'],
'CNY': [u, '¥'],
'COP': ['$CO', '$'],
'CYP': ['£CY'],
'EGP': [u, '£E'],
'FJD': ['$FJ', '$'],
'FKP': ['£FK', '£'],
'FRF': ['F'],
'GBP': ['£GB', '£'],
'GIP': ['£GI', '£'],
'HKD': [u, '$'],
'IEP': ['£IE'],
'ILP': ['£IL'],
'ITL': ['₤IT'],
'JPY': [u, '¥'],
'KMF': [u, 'FC'],
'LBP': ['£LB', '£L'],
'MTP': ['£MT'],
'MXN': ['$MX', '$'],
'NAD': ['$NA', '$'],
'NIO': [u, '$C'],
'NZD': ['$NZ', '$'],
'RHD': ['$RH'],
'RON': [u, 'L'],
'RWF': [u, 'FR'],
'SBD': ['$SB', '$'],
'SGD': ['$SG', '$'],
'SRD': ['$SR', '$'],
'TOP': [u, '$T'],
'TTD': ['$TT', '$'], | 'WST': ['$WS'],
'XCD': [u, '$'],
'XPF': ['FCFP'],
'ZMW': [u, 'Kw']
},
plural,
[
[
['minuit', 'midi', 'mat.', 'ap.m.', 'soir', 'nuit'], u,
['minuit', 'midi', 'du matin', 'de l’après-midi', 'du soir', 'du matin']
],
[
['minuit', 'midi', 'mat.', 'ap.m.', 'soir', 'nuit'], u,
['minuit', 'midi', 'matin', 'après-midi', 'soir', 'nuit']
],
[
'00:00', '12:00', ['04:00', '12:00'], ['12:00', '18:00'], ['18:00', '24:00'],
['00:00', '04:00']
]
]
];
})(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global ||
typeof window !== 'undefined' && window); | 'TWD': [u, 'NT$'],
'USD': ['$US', '$'],
'UYU': ['$UY', '$'], |
list.rs | use std::rc::Rc;
/// Linked List
///
/// # Examples
///
/// ```
/// use structures::list;
///
/// let xs = list![1, 2, 3];
///
/// println!("{:?}", xs);
/// ```
#[derive(PartialEq)]
pub struct | <T> {
head: Option<Rc<Node<T>>>,
}
#[derive(PartialEq)]
struct Node<T> {
next: Option<Rc<Node<T>>>,
data: T,
}
#[macro_export]
macro_rules! list {
() => ($crate::list::List::nil());
($x:expr) => ($crate::list::List::cons($x, &list![]));
($x:expr, $($xs:expr),*) => ($crate::list::List::cons($x, &list![$($xs),*]));
}
impl<T> List<T> {
pub fn nil() -> Self {
List { head: None }
}
pub fn cons(data: T, next: &Self) -> Self {
let node = Node { data, next: next.head.clone() };
List { head: Some(Rc::new(node)) }
}
pub fn decons(&self) -> Option<(&T, Self)> {
self.head.as_ref().map(|node| (&node.data, List { head: node.next.clone() }))
}
pub fn head(&self) -> Option<&T> {
self.head.as_ref().map(|node| &node.data)
}
pub fn tail(&self) -> Option<Self> {
self.head.as_ref().map(|node| List { head: node.next.clone() })
}
pub fn is_empty(&self) -> bool {
self.head.is_none()
}
pub fn len(&self) -> usize {
self.iter().count()
}
pub fn iter(&self) -> impl Iterator<Item = &T> {
std::iter::successors(self.head.as_ref(), |node| node.next.as_ref()).map(|node| &node.data)
}
}
impl<T> Drop for List<T> {
fn drop(&mut self) {
let mut next = self.head.take();
while let Some(node) = next {
if let Ok(mut node) = Rc::try_unwrap(node) {
next = node.next.take();
} else {
break;
}
}
}
}
impl<T: std::fmt::Debug> std::fmt::Debug for List<T> {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.debug_list().entries(self.iter()).finish()
}
}
#[cfg(test)]
mod tests {
use super::List;
#[test]
fn macro_list() {
assert_eq!(list![], List::<()>::nil());
assert_eq!(list![1], List::cons(1, &List::nil()));
assert_eq!(list![1, 2], List::cons(1, &List::cons(2, &List::nil())));
}
#[test]
fn decons() {
assert_eq!((list![] as List<()>).decons(), None);
assert_eq!(list![1].decons(), Some((&1, list![])));
assert_eq!(list![1, 2].decons(), Some((&1, list![2])));
}
#[test]
fn head() {
assert_eq!((list![] as List<()>).head(), None);
assert_eq!(list![1].head(), Some(&1));
assert_eq!(list![1, 2].head(), Some(&1));
}
#[test]
fn tail() {
assert_eq!((list![] as List<()>).tail(), None);
assert_eq!(list![1].tail(), Some(list![]));
assert_eq!(list![1, 2].tail(), Some(list![2]));
}
#[test]
fn is_empty() {
assert!((list![] as List<()>).is_empty());
assert!(!list![1].is_empty());
assert!(!list![1, 2].is_empty());
}
#[test]
fn len() {
assert_eq!((list![] as List<()>).len(), 0);
assert_eq!(list![1].len(), 1);
assert_eq!(list![1, 2].len(), 2);
}
#[test]
fn fmt() {
assert_eq!(format!("{:?}", list![] as List<()>), "[]");
assert_eq!(format!("{:?}", list![1]), "[1]");
assert_eq!(format!("{:?}", list![1, 2]), "[1, 2]");
}
#[test]
fn iter() {
let h = |xs: List<_>| xs.iter().cloned().collect::<Vec<_>>();
assert_eq!(h(list![]), []);
assert_eq!(h(list![1]), [1]);
assert_eq!(h(list![1, 2]), [1, 2]);
}
}
| List |
angular-sanitize.min.js | /*
AngularJS v1.3.0-beta.12
(c) 2010-2014 Google, Inc. http://angularjs.org
License: MIT
*/
(function(p,h,q){'use strict';function E(a){var d=[];s(d,h.noop).chars(a);return d.join("")}function | (a){var d={};a=a.split(",");var b;for(b=0;b<a.length;b++)d[a[b]]=!0;return d}function F(a,d){function b(a,c,b,g){c=h.lowercase(c);if(t[c])for(;f.last()&&u[f.last()];)e("",f.last());v[c]&&f.last()==c&&e("",c);(g=w[c]||!!g)||f.push(c);var l={};b.replace(G,function(a,c,d,b,e){l[c]=r(d||b||e||"")});d.start&&d.start(c,l,g)}function e(a,c){var b=0,e;if(c=h.lowercase(c))for(b=f.length-1;0<=b&&f[b]!=c;b--);
if(0<=b){for(e=f.length-1;e>=b;e--)d.end&&d.end(f[e]);f.length=b}}var c,g,f=[],l=a;for(f.last=function(){return f[f.length-1]};a;){g=!0;if(f.last()&&x[f.last()])a=a.replace(RegExp("(.*)<\\s*\\/\\s*"+f.last()+"[^>]*>","i"),function(c,a){a=a.replace(H,"$1").replace(I,"$1");d.chars&&d.chars(r(a));return""}),e("",f.last());else{if(0===a.indexOf("\x3c!--"))c=a.indexOf("--",4),0<=c&&a.lastIndexOf("--\x3e",c)===c&&(d.comment&&d.comment(a.substring(4,c)),a=a.substring(c+3),g=!1);else if(y.test(a)){if(c=a.match(y))a=
a.replace(c[0],""),g=!1}else if(J.test(a)){if(c=a.match(z))a=a.substring(c[0].length),c[0].replace(z,e),g=!1}else K.test(a)&&(c=a.match(A))&&(a=a.substring(c[0].length),c[0].replace(A,b),g=!1);g&&(c=a.indexOf("<"),g=0>c?a:a.substring(0,c),a=0>c?"":a.substring(c),d.chars&&d.chars(r(g)))}if(a==l)throw L("badparse",a);l=a}e()}function r(a){if(!a)return"";var d=M.exec(a);a=d[1];var b=d[3];if(d=d[2])n.innerHTML=d.replace(/</g,"<"),d="textContent"in n?n.textContent:n.innerText;return a+d+b}function B(a){return a.replace(/&/g,
"&").replace(N,function(a){var b=a.charCodeAt(0);a=a.charCodeAt(1);return"&#"+(1024*(b-55296)+(a-56320)+65536)+";"}).replace(O,function(a){return"&#"+a.charCodeAt(0)+";"}).replace(/</g,"<").replace(/>/g,">")}function s(a,d){var b=!1,e=h.bind(a,a.push);return{start:function(a,g,f){a=h.lowercase(a);!b&&x[a]&&(b=a);b||!0!==C[a]||(e("<"),e(a),h.forEach(g,function(b,f){var g=h.lowercase(f),k="img"===a&&"src"===g||"background"===g;!0!==P[g]||!0===D[g]&&!d(b,k)||(e(" "),e(f),e('="'),e(B(b)),e('"'))}),
e(f?"/>":">"))},end:function(a){a=h.lowercase(a);b||!0!==C[a]||(e("</"),e(a),e(">"));a==b&&(b=!1)},chars:function(a){b||e(B(a))}}}var L=h.$$minErr("$sanitize"),A=/^<\s*([\w:-]+)((?:\s+[\w:-]+(?:\s*=\s*(?:(?:"[^"]*")|(?:'[^']*')|[^>\s]+))?)*)\s*(\/?)\s*>/,z=/^<\s*\/\s*([\w:-]+)[^>]*>/,G=/([\w:-]+)(?:\s*=\s*(?:(?:"((?:[^"])*)")|(?:'((?:[^'])*)')|([^>\s]+)))?/g,K=/^</,J=/^<\s*\//,H=/\x3c!--(.*?)--\x3e/g,y=/<!DOCTYPE([^>]*?)>/i,I=/<!\[CDATA\[(.*?)]]\x3e/g,N=/[\uD800-\uDBFF][\uDC00-\uDFFF]/g,O=/([^\#-~| |!])/g,
w=k("area,br,col,hr,img,wbr");p=k("colgroup,dd,dt,li,p,tbody,td,tfoot,th,thead,tr");q=k("rp,rt");var v=h.extend({},q,p),t=h.extend({},p,k("address,article,aside,blockquote,caption,center,del,dir,div,dl,figure,figcaption,footer,h1,h2,h3,h4,h5,h6,header,hgroup,hr,ins,map,menu,nav,ol,pre,script,section,table,ul")),u=h.extend({},q,k("a,abbr,acronym,b,bdi,bdo,big,br,cite,code,del,dfn,em,font,i,img,ins,kbd,label,map,mark,q,ruby,rp,rt,s,samp,small,span,strike,strong,sub,sup,time,tt,u,var")),x=k("script,style"),
C=h.extend({},w,t,u,v),D=k("background,cite,href,longdesc,src,usemap"),P=h.extend({},D,k("abbr,align,alt,axis,bgcolor,border,cellpadding,cellspacing,class,clear,color,cols,colspan,compact,coords,dir,face,headers,height,hreflang,hspace,ismap,lang,language,nohref,nowrap,rel,rev,rows,rowspan,rules,scope,scrolling,shape,size,span,start,summary,target,title,type,valign,value,vspace,width")),n=document.createElement("pre"),M=/^(\s*)([\s\S]*?)(\s*)$/;h.module("ngSanitize",[]).provider("$sanitize",function(){this.$get=
["$$sanitizeUri",function(a){return function(d){var b=[];F(d,s(b,function(b,c){return!/^unsafe/.test(a(b,c))}));return b.join("")}}]});h.module("ngSanitize").filter("linky",["$sanitize",function(a){var d=/((ftp|https?):\/\/|(mailto:)?[A-Za-z0-9._%+-]+@)\S*[^\s.;,(){}<>]/,b=/^mailto:/;return function(e,c){function g(a){a&&m.push(E(a))}function f(a,b){m.push("<a ");h.isDefined(c)&&(m.push('target="'),m.push(c),m.push('" '));m.push('href="');m.push(a);m.push('">');g(b);m.push("</a>")}if(!e)return e;
for(var l,k=e,m=[],n,p;l=k.match(d);)n=l[0],l[2]==l[3]&&(n="mailto:"+n),p=l.index,g(k.substr(0,p)),f(n,l[0].replace(b,"")),k=k.substring(p+l[0].length);g(k);return a(m.join(""))}}])})(window,window.angular);
//# sourceMappingURL=angular-sanitize.min.js.map
| k |
presets.js | iD.presets = function(context) {
// an iD.presets.Collection with methods for
// loading new data and returning defaults
var other = iD.presets.Preset('other', {
tags: {},
geometry: ['point', 'vertex', 'line', 'area']
}),
all = iD.presets.Collection([other]),
defaults = { area: all, line: all, point: all, vertex: all },
fields = {},
universal = [],
recent = iD.presets.Collection([]);
all.load = function(d) {
if (d.fields) {
_.forEach(d.fields, function(d, id) {
fields[id] = iD.presets.Field(id, d);
if (d.universal) universal.push(fields[id]);
});
}
if (d.presets) {
_.forEach(d.presets, function(d, id) {
all.collection.push(iD.presets.Preset(id, d, fields));
});
}
if (d.categories) {
d.categories.forEach(function(d) {
all.collection.push(iD.presets.Category(d, all));
});
}
if (d.defaults) {
var getItem = _.bind(all.item, all);
defaults = {
area: iD.presets.Collection(d.defaults.area.map(getItem)),
line: iD.presets.Collection(d.defaults.line.map(getItem)),
point: iD.presets.Collection(d.defaults.point.map(getItem)),
vertex: iD.presets.Collection(d.defaults.vertex.map(getItem))
};
}
return all;
};
all.universal = function() {
return universal;
};
all.defaults = function(entity, n) {
var rec = recent.matchGeometry(entity, context.graph()).collection.slice(0, 4),
def = _.uniq(rec.concat(defaults[entity.geometry(context.graph())].collection)).slice(0, n - 1);
return iD.presets.Collection(_.unique(rec.concat(def).concat(other)));
};
all.choose = function(preset) {
if (preset !== other) {
recent = iD.presets.Collection(_.unique([preset].concat(recent.collection)));
} | return all;
};
return all;
}; | |
managed_nodegroup_test.go | // +build integration
package managed
import (
"fmt"
"strings"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/eks"
harness "github.com/dlespiau/kube-test-harness"
. "github.com/onsi/ginkgo"
. "github.com/onsi/ginkgo/extensions/table"
. "github.com/onsi/gomega"
corev1 "k8s.io/api/core/v1"
. "github.com/weaveworks/eksctl/integration/matchers"
. "github.com/weaveworks/eksctl/integration/runner"
"github.com/weaveworks/eksctl/integration/tests"
"github.com/weaveworks/eksctl/integration/utilities/kube"
api "github.com/weaveworks/eksctl/pkg/apis/eksctl.io/v1alpha5"
"github.com/weaveworks/eksctl/pkg/testutils"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/clientcmd"
)
const (
k8sUpdatePollInterval = "2s"
k8sUpdatePollTimeout = "3m"
)
var params *tests.Params
func init() {
// Call testing.Init() prior to tests.NewParams(), as otherwise -test.* will not be recognised. See also: https://golang.org/doc/go1.13#testing
testing.Init()
params = tests.NewParams("managed")
supportedVersions := api.SupportedVersions()
if len(supportedVersions) < 2 {
panic("managed nodegroup tests require at least two supported Kubernetes versions to run")
}
params.Version = supportedVersions[len(supportedVersions)-2]
}
func TestManaged(t *testing.T) {
testutils.RegisterAndRun(t)
}
var _ = Describe("(Integration) Create Managed Nodegroups", func() {
const (
initialNodeGroup = "managed-ng-0"
newPublicNodeGroup = "ng-public-1"
newPrivateNodeGroup = "ng-private-1"
)
makeClusterConfig := func() *api.ClusterConfig {
clusterConfig := api.NewClusterConfig()
clusterConfig.Metadata.Name = params.ClusterName
clusterConfig.Metadata.Region = params.Region
clusterConfig.Metadata.Version = params.Version
return clusterConfig
}
defaultTimeout := 20 * time.Minute
BeforeSuite(func() {
fmt.Fprintf(GinkgoWriter, "Using kubeconfig: %s\n", params.KubeconfigPath)
cmd := params.EksctlCreateCmd.WithArgs(
"cluster",
"--verbose", "4",
"--name", params.ClusterName,
"--tags", "alpha.eksctl.io/description=eksctl integration test",
"--managed",
"--nodegroup-name", initialNodeGroup,
"--node-labels", "ng-name="+initialNodeGroup,
"--nodes", "2",
"--version", params.Version,
"--kubeconfig", params.KubeconfigPath,
)
Expect(cmd).To(RunSuccessfully())
})
DescribeTable("Bottlerocket and Ubuntu support", func(ng *api.ManagedNodeGroup) {
clusterConfig := makeClusterConfig()
clusterConfig.ManagedNodeGroups = []*api.ManagedNodeGroup{ng}
cmd := params.EksctlCreateCmd.
WithArgs(
"nodegroup",
"--config-file", "-",
"--verbose", "4",
).
WithoutArg("--region", params.Region).
WithStdin(testutils.ClusterConfigReader(clusterConfig))
Expect(cmd).To(RunSuccessfully())
},
Entry("Bottlerocket", &api.ManagedNodeGroup{
NodeGroupBase: &api.NodeGroupBase{
Name: "bottlerocket",
VolumeSize: aws.Int(35),
AMIFamily: "Bottlerocket",
},
Taints: []api.NodeGroupTaint{
{
Key: "key2",
Value: "value2",
Effect: "PreferNoSchedule",
},
},
}),
Entry("Ubuntu", &api.ManagedNodeGroup{
NodeGroupBase: &api.NodeGroupBase{
Name: "ubuntu",
VolumeSize: aws.Int(25),
AMIFamily: "Ubuntu2004",
},
}),
)
Context("Bottlerocket nodegroups", func() {
It("should work as a node AMI family", func() {
clusterConfig := makeClusterConfig()
clusterConfig.ManagedNodeGroups = []*api.ManagedNodeGroup{
{
NodeGroupBase: &api.NodeGroupBase{
Name: "bottlerocket",
VolumeSize: aws.Int(35),
AMIFamily: "Bottlerocket",
},
Taints: []api.NodeGroupTaint{
{
Key: "key1",
Value: "value1",
Effect: "PreferNoSchedule",
},
},
},
}
cmd := params.EksctlCreateCmd.
WithArgs(
"nodegroup",
"--config-file", "-",
"--verbose", "4",
).
WithoutArg("--region", params.Region).
WithStdin(testutils.ClusterConfigReader(clusterConfig))
Expect(cmd).To(RunSuccessfully())
})
})
Context("cluster with 1 managed nodegroup", func() {
It("should have created an EKS cluster and two CloudFormation stacks", func() {
awsSession := NewSession(params.Region)
Expect(awsSession).To(HaveExistingCluster(params.ClusterName, eks.ClusterStatusActive, params.Version))
Expect(awsSession).To(HaveExistingStack(fmt.Sprintf("eksctl-%s-cluster", params.ClusterName)))
Expect(awsSession).To(HaveExistingStack(fmt.Sprintf("eksctl-%s-nodegroup-%s", params.ClusterName, initialNodeGroup)))
})
It("should have created a valid kubectl config file", func() {
config, err := clientcmd.LoadFromFile(params.KubeconfigPath)
Expect(err).ShouldNot(HaveOccurred())
err = clientcmd.ConfirmUsable(*config, "")
Expect(err).ShouldNot(HaveOccurred())
Expect(config.CurrentContext).To(ContainSubstring("eksctl"))
Expect(config.CurrentContext).To(ContainSubstring(params.ClusterName))
Expect(config.CurrentContext).To(ContainSubstring(params.Region))
})
Context("and listing clusters", func() {
It("should return the previously created cluster", func() {
cmd := params.EksctlGetCmd.WithArgs("clusters", "--all-regions")
Expect(cmd).To(RunSuccessfullyWithOutputString(ContainSubstring(params.ClusterName)))
})
})
Context("and checking the nodegroup health", func() {
It("should return healthy", func() {
cmd := params.EksctlUtilsCmd.WithArgs(
"nodegroup-health",
"--cluster", params.ClusterName,
"--name", initialNodeGroup,
)
Expect(cmd).To(RunSuccessfullyWithOutputString(ContainSubstring("active")))
})
})
Context("and scale the initial nodegroup", func() {
It("should not return an error", func() {
cmd := params.EksctlScaleNodeGroupCmd.WithArgs(
"--cluster", params.ClusterName,
"--nodes-min", "2",
"--nodes", "3",
"--nodes-max", "4",
"--name", initialNodeGroup,
)
Expect(cmd).To(RunSuccessfully())
})
})
Context("and add two managed nodegroups (one public and one private)", func() {
It("should not return an error for public node group", func() {
cmd := params.EksctlCreateCmd.WithArgs(
"nodegroup",
"--cluster", params.ClusterName,
"--nodes", "4",
"--managed",
newPublicNodeGroup,
)
Expect(cmd).To(RunSuccessfully())
})
It("should not return an error for private node group", func() {
cmd := params.EksctlCreateCmd.WithArgs(
"nodegroup",
"--cluster", params.ClusterName,
"--nodes", "2",
"--managed",
"--node-private-networking",
newPrivateNodeGroup,
)
Expect(cmd).To(RunSuccessfully())
})
Context("create test workloads", func() {
var (
err error
test *harness.Test
)
BeforeEach(func() {
test, err = kube.NewTest(params.KubeconfigPath)
Expect(err).ShouldNot(HaveOccurred())
})
AfterEach(func() {
test.Close()
Eventually(func() int {
return len(test.ListPods(test.Namespace, metav1.ListOptions{}).Items)
}, "3m", "1s").Should(BeZero())
})
It("should deploy podinfo service to the cluster and access it via proxy", func() {
d := test.CreateDeploymentFromFile(test.Namespace, "../../data/podinfo.yaml")
test.WaitForDeploymentReady(d, defaultTimeout)
pods := test.ListPodsFromDeployment(d)
Expect(len(pods.Items)).To(Equal(2))
// For each pod of the Deployment, check we receive a sensible response to a
// GET request on /version.
for _, pod := range pods.Items {
Expect(pod.Namespace).To(Equal(test.Namespace))
req := test.PodProxyGet(&pod, "", "/version")
fmt.Fprintf(GinkgoWriter, "url = %#v", req.URL())
var js map[string]interface{}
test.PodProxyGetJSON(&pod, "", "/version", &js)
Expect(js).To(HaveKeyWithValue("version", "1.5.1"))
}
})
It("should have functional DNS", func() {
d := test.CreateDaemonSetFromFile(test.Namespace, "../../data/test-dns.yaml")
test.WaitForDaemonSetReady(d, defaultTimeout)
{
ds, err := test.GetDaemonSet(test.Namespace, d.Name)
Expect(err).ShouldNot(HaveOccurred())
fmt.Fprintf(GinkgoWriter, "ds.Status = %#v", ds.Status)
}
})
It("should have access to HTTP(S) sites", func() {
d := test.CreateDaemonSetFromFile(test.Namespace, "../../data/test-http.yaml")
test.WaitForDaemonSetReady(d, defaultTimeout)
{
ds, err := test.GetDaemonSet(test.Namespace, d.Name)
Expect(err).ShouldNot(HaveOccurred())
fmt.Fprintf(GinkgoWriter, "ds.Status = %#v", ds.Status)
}
})
})
Context("and delete the managed public nodegroup", func() {
It("should not return an error", func() {
cmd := params.EksctlDeleteCmd.WithArgs(
"nodegroup",
"--verbose", "4",
"--cluster", params.ClusterName,
newPublicNodeGroup,
)
Expect(cmd).To(RunSuccessfully())
})
})
Context("and delete the managed private nodegroup", func() {
It("should not return an error", func() {
cmd := params.EksctlDeleteCmd.WithArgs(
"nodegroup",
"--verbose", "4",
"--cluster", params.ClusterName,
newPrivateNodeGroup,
)
Expect(cmd).To(RunSuccessfully())
})
})
})
Context("and upgrading a nodegroup", func() {
It("should upgrade to the next Kubernetes version", func() {
By("updating the control plane version")
cmd := params.EksctlUpgradeCmd.
WithArgs(
"cluster",
"--verbose", "4",
"--name", params.ClusterName,
"--approve",
)
Expect(cmd).To(RunSuccessfully())
var nextVersion string
{
supportedVersions := api.SupportedVersions()
nextVersion = supportedVersions[len(supportedVersions)-1]
}
By(fmt.Sprintf("checking that control plane is updated to %v", nextVersion))
config, err := clientcmd.BuildConfigFromFlags("", params.KubeconfigPath)
Expect(err).ToNot(HaveOccurred())
clientset, err := kubernetes.NewForConfig(config)
Expect(err).ToNot(HaveOccurred())
Eventually(func() string {
serverVersion, err := clientset.ServerVersion()
Expect(err).ToNot(HaveOccurred())
return fmt.Sprintf("%s.%s", serverVersion.Major, strings.TrimSuffix(serverVersion.Minor, "+"))
}, k8sUpdatePollTimeout, k8sUpdatePollInterval).Should(Equal(nextVersion))
By(fmt.Sprintf("upgrading nodegroup %s to Kubernetes version %s", initialNodeGroup, nextVersion))
cmd = params.EksctlUpgradeCmd.WithArgs(
"nodegroup",
"--verbose", "4",
"--cluster", params.ClusterName,
"--name", initialNodeGroup,
"--kubernetes-version", nextVersion,
)
Expect(cmd).To(RunSuccessfullyWithOutputString(ContainSubstring("nodegroup successfully upgraded")))
})
})
Context("and creating a nodegroup with taints", func() {
It("should create nodegroups with taints applied", func() {
taints := []api.NodeGroupTaint{
{
Key: "key1",
Value: "value1",
Effect: "NoSchedule",
},
{
Key: "key2",
Effect: "NoSchedule",
},
{
Key: "key3",
Value: "value2",
Effect: "NoExecute",
},
}
clusterConfig := makeClusterConfig()
clusterConfig.ManagedNodeGroups = []*api.ManagedNodeGroup{
{
NodeGroupBase: &api.NodeGroupBase{
Name: "taints",
},
Taints: taints,
},
} | "nodegroup",
"--config-file", "-",
"--verbose", "4",
).
WithoutArg("--region", params.Region).
WithStdin(testutils.ClusterConfigReader(clusterConfig))
Expect(cmd).To(RunSuccessfully())
config, err := clientcmd.BuildConfigFromFlags("", params.KubeconfigPath)
Expect(err).ToNot(HaveOccurred())
clientset, err := kubernetes.NewForConfig(config)
Expect(err).ToNot(HaveOccurred())
mapTaints := func(taints []api.NodeGroupTaint) []corev1.Taint {
var ret []corev1.Taint
for _, t := range taints {
ret = append(ret, corev1.Taint{
Key: t.Key,
Value: t.Value,
Effect: t.Effect,
})
}
return ret
}
tests.AssertNodeTaints(clientset, "taints", mapTaints(taints))
})
})
Context("and deleting the cluster", func() {
It("should not return an error", func() {
cmd := params.EksctlDeleteClusterCmd.WithArgs(
"--name", params.ClusterName,
)
Expect(cmd).To(RunSuccessfully())
})
})
})
})
var _ = AfterSuite(func() {
params.DeleteClusters()
}) |
cmd := params.EksctlCreateCmd.
WithArgs( |
signal-r.service.spec.ts | describe('SignalRService', () => {
let service: SignalRService;
beforeEach(() => {
TestBed.configureTestingModule({});
service = TestBed.inject(SignalRService);
});
it('should be created', () => {
expect(service).toBeTruthy();
});
}); | import { TestBed } from '@angular/core/testing';
import { SignalRService } from './signal-r.service';
|
|
output.py | from pathlib import Path
import json
# Directory
dir = Path().resolve()
# Configuration
with open(dir/'config.json') as config_file:
CONFIG = json.load(config_file)
def | (config = CONFIG):
"""
Return string for default output directory path.
"""
return(config['output_dir']['path']) | output_dir |
ft2font.py | def | ():
global __bootstrap__, __loader__, __file__
import sys, pkg_resources, imp
__file__ = pkg_resources.resource_filename(__name__,'ft2font.so')
__loader__ = None; del __bootstrap__, __loader__
imp.load_dynamic(__name__,__file__)
__bootstrap__()
| __bootstrap__ |
pagehome.go | package pagehome
import (
"encoding/json"
"fmt"
"html/template"
"math/rand"
"net/http"
"strconv"
"time"
"github.com/dekoch/gouniversal/module/picturex/global"
"github.com/dekoch/gouniversal/module/picturex/lang"
"github.com/dekoch/gouniversal/module/picturex/typemo"
"github.com/dekoch/gouniversal/shared/alert"
"github.com/dekoch/gouniversal/shared/functions"
"github.com/dekoch/gouniversal/shared/navigation"
"github.com/dekoch/gouniversal/shared/sbool"
"github.com/dekoch/gouniversal/shared/stringarray"
)
// SSE writes Server-Sent Events to an HTTP client.
type sse struct{}
type sseMessage struct {
ClientUUID string
PairUUID string
Content string
}
type sseJSON struct {
First string
Second string
}
var (
messages = make(chan sseMessage)
clients stringarray.StringArray
streamEnabled sbool.Sbool
)
func LoadConfig() {
rand.Seed(time.Now().UnixNano())
clients.RemoveAll()
http.Handle("/picturex/sse/", &sse{})
}
func RegisterPage(page *typemo.Page, nav *navigation.Navigation) {
nav.Sitemap.Register(page.Lang.Home.Menu, "App:PictureX:Home", page.Lang.Home.Title)
pairs, err := global.PairList.GetPairsFromUser(nav.User.UUID)
if err == nil {
for _, pair := range pairs {
nav.Sitemap.Register(page.Lang.Home.Menu, "App:PictureX:Home$Pair="+pair, page.Lang.Home.Title+" ("+pair+")")
}
}
}
func Render(page *typemo.Page, nav *navigation.Navigation, r *http.Request) {
type Content struct {
Lang lang.Home
UUID template.HTML
Token template.HTML
Pair template.HTML
Link template.HTML
ShowShareLink template.HTML
ShowLinkReceived template.HTML
}
var c Content
c.Lang = page.Lang.Home
var (
err error
redirect bool
isFirstUser bool
ssej sseJSON
ssem sseMessage
firstPic string
secondPic string
uid string
partnerToken string
b []byte
)
pair := nav.Parameter("Pair")
token := global.Tokens.New(nav.User.UUID)
// need random to change urls, because we update pictures
ra := strconv.Itoa(rand.Int())
func() {
for i := 0; i <= 15; i++ {
switch i {
case 0:
if pair == "" {
pair, err = global.PairList.NewPair(nav.User.UUID)
redirect = true
}
case 1:
switch r.FormValue("edit") {
case "newpair":
pair, err = global.PairList.NewPair(nav.User.UUID)
redirect = true
case "unlock":
global.PairList.UnlockPicture(pair, nav.User.UUID)
case "deletepair":
global.PairList.DeletePair(pair)
pair, err = global.PairList.GetFirstPairFromUser(nav.User.UUID)
if err != nil {
pair, err = global.PairList.NewPair(nav.User.UUID)
}
redirect = true
}
case 2:
isFirstUser, err = global.PairList.IsFirstUser(pair, nav.User.UUID)
case 3:
if isFirstUser {
c.ShowShareLink = template.HTML("show")
} else {
err = global.PairList.SetSecondUser(pair, nav.User.UUID)
c.ShowLinkReceived = template.HTML("show")
}
case 4:
firstPic, err = global.PairList.GetFirstPicture(pair, nav.User.UUID)
case 5:
secondPic, err = global.PairList.GetSecondPicture(pair, nav.User.UUID)
case 6:
if firstPic != "" {
ssej.First = "/picturex/req/?uuid=" + nav.User.UUID + "&token=" + token + "&name=" + firstPic + "&random=" + ra
}
if secondPic != "" {
ssej.Second = "/picturex/req/?uuid=" + nav.User.UUID + "&token=" + token + "&name=" + secondPic + "&random=" + ra
}
case 7:
b, err = json.Marshal(ssej)
case 8:
ssem.ClientUUID = nav.User.UUID
ssem.PairUUID = pair
ssem.Content = string(b)
go func(message sseMessage) {
time.Sleep(500 * time.Millisecond)
cl := clients.List()
// send message to all waiting clients
for i := 0; i < len(cl); i++ {
messages <- message
}
}(ssem)
case 9:
// send update to parter
if isFirstUser {
uid, err = global.PairList.GetSecondUser(pair)
} else {
uid, err = global.PairList.GetFirstUser(pair)
}
if uid == "" {
return
}
case 10:
partnerToken, err = global.Tokens.Get(uid)
case 11:
firstPic, err = global.PairList.GetFirstPicture(pair, uid)
case 12:
secondPic, err = global.PairList.GetSecondPicture(pair, uid)
case 13:
if firstPic != "" {
ssej.First = "/picturex/req/?uuid=" + uid + "&token=" + partnerToken + "&name=" + firstPic + "&random=" + ra
}
if secondPic != "" {
ssej.Second = "/picturex/req/?uuid=" + uid + "&token=" + partnerToken + "&name=" + secondPic + "&random=" + ra
}
case 14:
b, err = json.Marshal(ssej)
case 15:
ssem.ClientUUID = uid
ssem.PairUUID = pair
ssem.Content = string(b)
go func(message sseMessage) {
cl := clients.List()
// send message to all waiting clients
for i := 0; i < len(cl); i++ {
messages <- message
}
}(ssem)
}
if err != nil {
pair, _ = global.PairList.NewPair(nav.User.UUID)
redirect = true
alert.Message(alert.ERROR, page.Lang.Alert.Error, err, "", nav.User.UUID)
return
}
}
}()
if redirect {
nav.RedirectPath("App:PictureX:Home$Pair="+pair, false)
return
}
c.UUID = template.HTML(nav.User.UUID)
c.Token = template.HTML(token)
c.Pair = template.HTML(pair)
link := "http://"
if nav.UIConfig.HTTPS.Enabled {
link = "https://"
}
link += nav.Server.Host + "?path=App:PictureX:Home$Pair=" + pair
c.Link = template.HTML(link)
p, err := functions.PageToString(global.Config.UIFileRoot+"home.html", c)
if err == nil {
page.Content += p
} else {
nav.RedirectPath("404", true)
}
}
func (s *sse) ServeHTTP(w http.ResponseWriter, r *http.Request) {
f, ok := w.(http.Flusher)
if !ok {
http.Error(w, "cannot stream", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
cn, ok := w.(http.CloseNotifier)
if !ok {
http.Error(w, "cannot stream", http.StatusInternalServerError)
return
}
id := r.FormValue("uuid")
if functions.IsEmpty(id) {
fmt.Fprintf(w, "error: UUID not set")
f.Flush()
return
}
tok := r.FormValue("token")
if functions.IsEmpty(tok) {
fmt.Fprintf(w, "error: token not set")
f.Flush()
return
}
if global.Tokens.Check(id, tok) == false {
fmt.Fprintf(w, "error: invalid token")
f.Flush()
return
}
pair := r.FormValue("pair")
clients.Add(id)
| //fmt.Println("done: closed connection")
return
case ssem := <-messages:
// check client uuid
if ssem.ClientUUID == id &&
ssem.PairUUID == pair {
fmt.Fprintf(w, "data: %s\n\n", ssem.Content)
f.Flush()
}
}
}
} | for {
select {
case <-cn.CloseNotify():
clients.Remove(id) |
error.rs | use core::fmt::{Display, Formatter};
#[cfg(feature = "dynamic")]
use alloc::string::FromUtf8Error;
/// Alias of `Result` objects that return [`Error`](self::Error)
pub type Result<T> = core::result::Result<T, Error>;
/// An opaque error type, used for all errors in this crate
#[derive(Debug)]
pub struct Error {
kind: ErrorKind,
}
impl Error {
/// Returns [`ErrorKind`](self::ErrorKind) of current error
#[inline]
pub fn kind(&self) -> ErrorKind {
self.kind
}
}
impl From<ErrorKind> for Error {
#[inline]
fn from(kind: ErrorKind) -> Self {
Self { kind }
}
}
impl Display for Error {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result |
}
/// Different variants of possible errors
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum ErrorKind {
/// Returned when deserialization from bytes to char fails.
InvalidChar,
/// Returned when deserialization from bytes to str fails.
#[cfg(feature = "dynamic")]
InvalidStr,
/// Returned when input slice is of invalid length.
#[cfg(feature = "dynamic")]
InvalidSliceLength,
/// Returned when input slice cannot be de-serialized into given type.
#[cfg(feature = "dynamic")]
InvalidInput,
}
impl Display for ErrorKind {
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
match self {
ErrorKind::InvalidChar => write!(f, "Deserialization from bytes to char failed"),
#[cfg(feature = "dynamic")]
ErrorKind::InvalidStr => write!(f, "Deserialization from bytes to String failed"),
#[cfg(feature = "dynamic")]
ErrorKind::InvalidSliceLength => write!(f, "Input slice is of invalid length"),
#[cfg(feature = "dynamic")]
ErrorKind::InvalidInput => {
write!(f, "input slice cannot be de-serialized into given type")
}
}
}
}
#[cfg(feature = "dynamic")]
impl From<FromUtf8Error> for Error {
#[inline]
fn from(_: FromUtf8Error) -> Error {
Error {
kind: ErrorKind::InvalidStr,
}
}
}
| {
write!(f, "{}", self.kind)
} |
types.rs | #![allow(rustc::default_hash_types)]
use std::borrow::Cow;
use std::cmp::Ordering;
use std::collections::BTreeMap;
use if_chain::if_chain;
use rustc_ast::{LitFloatType, LitIntType, LitKind};
use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::intravisit::{walk_body, walk_expr, walk_ty, FnKind, NestedVisitorMap, Visitor};
use rustc_hir::{
BinOpKind, Block, Body, Expr, ExprKind, FnDecl, FnRetTy, FnSig, GenericArg, GenericBounds, GenericParamKind, HirId,
ImplItem, ImplItemKind, Item, ItemKind, Lifetime, Lit, Local, MatchSource, MutTy, Mutability, Node, QPath, Stmt,
StmtKind, SyntheticTyParamKind, TraitFn, TraitItem, TraitItemKind, TyKind, UnOp,
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::TypeFoldable;
use rustc_middle::ty::{self, FloatTy, InferTy, IntTy, Ty, TyCtxt, TyS, TypeAndMut, TypeckResults, UintTy};
use rustc_semver::RustcVersion;
use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::source_map::Span;
use rustc_span::symbol::sym;
use rustc_target::abi::LayoutOf;
use rustc_target::spec::abi::Abi;
use rustc_typeck::hir_ty_to_ty;
use crate::consts::{constant, Constant};
use crate::utils::paths;
use crate::utils::sugg::Sugg;
use crate::utils::{
clip, comparisons, differing_macro_contexts, higher, in_constant, indent_of, int_bits, is_hir_ty_cfg_dependant,
is_type_diagnostic_item, last_path_segment, match_def_path, match_path, meets_msrv, method_chain_args,
multispan_sugg, numeric_literal::NumericLiteral, reindent_multiline, sext, snippet, snippet_opt,
snippet_with_applicability, snippet_with_macro_callsite, span_lint, span_lint_and_help, span_lint_and_sugg,
span_lint_and_then, unsext,
};
declare_clippy_lint! {
/// **What it does:** Checks for use of `Box<Vec<_>>` anywhere in the code.
/// Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information.
///
/// **Why is this bad?** `Vec` already keeps its contents in a separate area on
/// the heap. So if you `Box` it, you just add another level of indirection
/// without any benefit whatsoever.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// struct X {
/// values: Box<Vec<Foo>>,
/// }
/// ```
///
/// Better:
///
/// ```rust,ignore
/// struct X {
/// values: Vec<Foo>,
/// }
/// ```
pub BOX_VEC,
perf,
"usage of `Box<Vec<T>>`, vector elements are already on the heap"
}
declare_clippy_lint! {
/// **What it does:** Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
/// Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information.
///
/// **Why is this bad?** `Vec` already keeps its contents in a separate area on
/// the heap. So if you `Box` its contents, you just add another level of indirection.
///
/// **Known problems:** Vec<Box<T: Sized>> makes sense if T is a large type (see [#3530](https://github.com/rust-lang/rust-clippy/issues/3530),
/// 1st comment).
///
/// **Example:**
/// ```rust
/// struct X {
/// values: Vec<Box<i32>>,
/// }
/// ```
///
/// Better:
///
/// ```rust
/// struct X {
/// values: Vec<i32>,
/// }
/// ```
pub VEC_BOX,
complexity,
"usage of `Vec<Box<T>>` where T: Sized, vector elements are already on the heap"
}
declare_clippy_lint! {
/// **What it does:** Checks for use of `Option<Option<_>>` in function signatures and type
/// definitions
///
/// **Why is this bad?** `Option<_>` represents an optional value. `Option<Option<_>>`
/// represents an optional optional value which is logically the same thing as an optional
/// value but has an unneeded extra level of wrapping.
///
/// If you have a case where `Some(Some(_))`, `Some(None)` and `None` are distinct cases,
/// consider a custom `enum` instead, with clear names for each case.
///
/// **Known problems:** None.
///
/// **Example**
/// ```rust
/// fn get_data() -> Option<Option<u32>> {
/// None
/// }
/// ```
///
/// Better:
///
/// ```rust
/// pub enum Contents {
/// Data(Vec<u8>), // Was Some(Some(Vec<u8>))
/// NotYetFetched, // Was Some(None)
/// None, // Was None
/// }
///
/// fn get_data() -> Contents {
/// Contents::None
/// }
/// ```
pub OPTION_OPTION,
pedantic,
"usage of `Option<Option<T>>`"
}
declare_clippy_lint! {
/// **What it does:** Checks for usage of any `LinkedList`, suggesting to use a
/// `Vec` or a `VecDeque` (formerly called `RingBuf`).
///
/// **Why is this bad?** Gankro says:
///
/// > The TL;DR of `LinkedList` is that it's built on a massive amount of
/// pointers and indirection.
/// > It wastes memory, it has terrible cache locality, and is all-around slow.
/// `RingBuf`, while
/// > "only" amortized for push/pop, should be faster in the general case for
/// almost every possible
/// > workload, and isn't even amortized at all if you can predict the capacity
/// you need.
/// >
/// > `LinkedList`s are only really good if you're doing a lot of merging or
/// splitting of lists.
/// > This is because they can just mangle some pointers instead of actually
/// copying the data. Even
/// > if you're doing a lot of insertion in the middle of the list, `RingBuf`
/// can still be better
/// > because of how expensive it is to seek to the middle of a `LinkedList`.
///
/// **Known problems:** False positives – the instances where using a
/// `LinkedList` makes sense are few and far between, but they can still happen.
///
/// **Example:**
/// ```rust
/// # use std::collections::LinkedList;
/// let x: LinkedList<usize> = LinkedList::new();
/// ```
pub LINKEDLIST,
pedantic,
"usage of LinkedList, usually a vector is faster, or a more specialized data structure like a `VecDeque`"
}
declare_clippy_lint! {
/// **What it does:** Checks for use of `&Box<T>` anywhere in the code.
/// Check the [Box documentation](https://doc.rust-lang.org/std/boxed/index.html) for more information.
///
/// **Why is this bad?** Any `&Box<T>` can also be a `&T`, which is more
/// general.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// fn foo(bar: &Box<T>) { ... }
/// ```
///
/// Better:
///
/// ```rust,ignore
/// fn foo(bar: &T) { ... }
/// ```
pub BORROWED_BOX,
complexity,
"a borrow of a boxed type"
}
declare_clippy_lint! {
/// **What it does:** Checks for use of redundant allocations anywhere in the code.
///
/// **Why is this bad?** Expressions such as `Rc<&T>`, `Rc<Rc<T>>`, `Rc<Box<T>>`, `Box<&T>`
/// add an unnecessary level of indirection.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # use std::rc::Rc;
/// fn foo(bar: Rc<&usize>) {}
/// ```
///
/// Better:
///
/// ```rust
/// fn foo(bar: &usize) {}
/// ```
pub REDUNDANT_ALLOCATION,
perf,
"redundant allocation"
}
declare_clippy_lint! {
/// **What it does:** Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`.
///
/// **Why is this bad?** Expressions such as `Rc<String>` usually have no advantage over `Rc<str>`, since
/// it is larger and involves an extra level of indirection, and doesn't implement `Borrow<str>`.
///
/// While mutating a buffer type would still be possible with `Rc::get_mut()`, it only
/// works if there are no additional references yet, which usually defeats the purpose of
/// enclosing it in a shared ownership type. Instead, additionally wrapping the inner
/// type with an interior mutable container (such as `RefCell` or `Mutex`) would normally
/// be used.
///
/// **Known problems:** This pattern can be desirable to avoid the overhead of a `RefCell` or `Mutex` for
/// cases where mutation only happens before there are any additional references.
///
/// **Example:**
/// ```rust,ignore
/// # use std::rc::Rc;
/// fn foo(interned: Rc<String>) { ... }
/// ```
///
/// Better:
///
/// ```rust,ignore
/// fn foo(interned: Rc<str>) { ... }
/// ```
pub RC_BUFFER,
restriction,
"shared ownership of a buffer type"
}
pub struct Types {
vec_box_size_threshold: u64,
}
impl_lint_pass!(Types => [BOX_VEC, VEC_BOX, OPTION_OPTION, LINKEDLIST, BORROWED_BOX, REDUNDANT_ALLOCATION, RC_BUFFER]);
impl<'tcx> LateLintPass<'tcx> for Types {
fn check_fn(&mut self, cx: &LateContext<'_>, _: FnKind<'_>, decl: &FnDecl<'_>, _: &Body<'_>, _: Span, id: HirId) {
// Skip trait implementations; see issue #605.
if let Some(hir::Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_item(id)) {
if let ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }) = item.kind {
return;
}
}
self.check_fn_decl(cx, decl);
}
fn check_struct_field(&mut self, cx: &LateContext<'_>, field: &hir::StructField<'_>) {
self.check_ty(cx, &field.ty, false);
}
fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &TraitItem<'_>) {
match item.kind {
TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => self.check_ty(cx, ty, false),
TraitItemKind::Fn(ref sig, _) => self.check_fn_decl(cx, &sig.decl),
_ => (),
}
}
fn check_local(&mut self, cx: &LateContext<'_>, local: &Local<'_>) {
if let Some(ref ty) = local.ty {
self.check_ty(cx, ty, true);
}
}
}
/// Checks if `qpath` has last segment with type parameter matching `path`
fn match_type_parameter(cx: &LateContext<'_>, qpath: &QPath<'_>, path: &[&str]) -> Option<Span> {
let last = last_path_segment(qpath);
if_chain! {
if let Some(ref params) = last.args;
if !params.parenthesized;
if let Some(ty) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
});
if let TyKind::Path(ref qpath) = ty.kind;
if let Some(did) = cx.qpath_res(qpath, ty.hir_id).opt_def_id();
if match_def_path(cx, did, path);
then {
return Some(ty.span);
}
}
None
}
fn match_buffer_type(cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<&'static str> {
if match_type_parameter(cx, qpath, &paths::STRING).is_some() {
return Some("str");
}
if match_type_parameter(cx, qpath, &paths::OS_STRING).is_some() {
return Some("std::ffi::OsStr");
}
if match_type_parameter(cx, qpath, &paths::PATH_BUF).is_some() {
return Some("std::path::Path");
}
None
}
fn match_borrows_parameter(_cx: &LateContext<'_>, qpath: &QPath<'_>) -> Option<Span> {
let last = last_path_segment(qpath);
if_chain! {
if let Some(ref params) = last.args;
if !params.parenthesized;
if let Some(ty) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
});
if let TyKind::Rptr(..) = ty.kind;
then {
return Some(ty.span);
}
}
None
}
impl Types {
pub fn new(vec_box_size_threshold: u64) -> Self {
Self { vec_box_size_threshold }
}
fn check_fn_decl(&mut self, cx: &LateContext<'_>, decl: &FnDecl<'_>) {
for input in decl.inputs {
self.check_ty(cx, input, false);
}
if let FnRetTy::Return(ref ty) = decl.output {
self.check_ty(cx, ty, false);
}
}
/// Recursively check for `TypePass` lints in the given type. Stop at the first
/// lint found.
///
/// The parameter `is_local` distinguishes the context of the type; types from
/// local bindings should only be checked for the `BORROWED_BOX` lint.
#[allow(clippy::too_many_lines)]
fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, is_local: bool) {
if hir_ty.span.from_expansion() {
return;
}
match hir_ty.kind {
TyKind::Path(ref qpath) if !is_local => {
let hir_id = hir_ty.hir_id;
let res = cx.qpath_res(qpath, hir_id);
if let Some(def_id) = res.opt_def_id() {
if Some(def_id) == cx.tcx.lang_items().owned_box() {
if let Some(span) = match_borrows_parameter(cx, qpath) {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
REDUNDANT_ALLOCATION,
hir_ty.span,
"usage of `Box<&T>`",
"try",
snippet_with_applicability(cx, span, "..", &mut applicability).to_string(),
applicability,
);
return; // don't recurse into the type
}
if match_type_parameter(cx, qpath, &paths::VEC).is_some() {
span_lint_and_help(
cx,
BOX_VEC,
hir_ty.span,
"you seem to be trying to use `Box<Vec<T>>`. Consider using just `Vec<T>`",
None,
"`Vec<T>` is already on the heap, `Box<Vec<T>>` makes an extra allocation",
);
return; // don't recurse into the type
}
} else if cx.tcx.is_diagnostic_item(sym::Rc, def_id) {
if let Some(span) = match_type_parameter(cx, qpath, &paths::RC) {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
REDUNDANT_ALLOCATION,
hir_ty.span,
"usage of `Rc<Rc<T>>`",
"try",
snippet_with_applicability(cx, span, "..", &mut applicability).to_string(),
applicability,
);
return; // don't recurse into the type
}
if match_type_parameter(cx, qpath, &paths::BOX).is_some() {
let box_ty = match &last_path_segment(qpath).args.unwrap().args[0] {
GenericArg::Type(ty) => match &ty.kind {
TyKind::Path(qpath) => qpath,
_ => return,
},
_ => return,
};
let inner_span = match &last_path_segment(&box_ty).args.unwrap().args[0] {
GenericArg::Type(ty) => ty.span,
_ => return,
};
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
REDUNDANT_ALLOCATION,
hir_ty.span,
"usage of `Rc<Box<T>>`",
"try",
format!(
"Rc<{}>",
snippet_with_applicability(cx, inner_span, "..", &mut applicability)
),
applicability,
);
return; // don't recurse into the type
}
if let Some(alternate) = match_buffer_type(cx, qpath) {
span_lint_and_sugg(
cx,
RC_BUFFER,
hir_ty.span,
"usage of `Rc<T>` when T is a buffer type",
"try",
format!("Rc<{}>", alternate),
Applicability::MachineApplicable,
);
return; // don't recurse into the type
}
if match_type_parameter(cx, qpath, &paths::VEC).is_some() {
let vec_ty = match &last_path_segment(qpath).args.unwrap().args[0] {
GenericArg::Type(ty) => match &ty.kind {
TyKind::Path(qpath) => qpath,
_ => return,
},
_ => return,
};
let inner_span = match &last_path_segment(&vec_ty).args.unwrap().args[0] {
GenericArg::Type(ty) => ty.span,
_ => return,
};
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
RC_BUFFER,
hir_ty.span,
"usage of `Rc<T>` when T is a buffer type",
"try",
format!(
"Rc<[{}]>",
snippet_with_applicability(cx, inner_span, "..", &mut applicability)
),
Applicability::MachineApplicable,
);
return; // don't recurse into the type
}
if let Some(span) = match_borrows_parameter(cx, qpath) {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
REDUNDANT_ALLOCATION,
hir_ty.span,
"usage of `Rc<&T>`",
"try",
snippet_with_applicability(cx, span, "..", &mut applicability).to_string(),
applicability,
);
return; // don't recurse into the type
}
} else if cx.tcx.is_diagnostic_item(sym::Arc, def_id) {
if let Some(alternate) = match_buffer_type(cx, qpath) {
span_lint_and_sugg(
cx,
RC_BUFFER,
hir_ty.span,
"usage of `Arc<T>` when T is a buffer type",
"try",
format!("Arc<{}>", alternate),
Applicability::MachineApplicable,
);
return; // don't recurse into the type
}
if match_type_parameter(cx, qpath, &paths::VEC).is_some() {
let vec_ty = match &last_path_segment(qpath).args.unwrap().args[0] {
GenericArg::Type(ty) => match &ty.kind {
TyKind::Path(qpath) => qpath,
_ => return,
},
_ => return,
};
let inner_span = match &last_path_segment(&vec_ty).args.unwrap().args[0] {
GenericArg::Type(ty) => ty.span,
_ => return,
};
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
cx,
RC_BUFFER,
hir_ty.span,
"usage of `Arc<T>` when T is a buffer type",
"try",
format!(
"Arc<[{}]>",
snippet_with_applicability(cx, inner_span, "..", &mut applicability)
),
Applicability::MachineApplicable,
);
return; // don't recurse into the type
}
} else if cx.tcx.is_diagnostic_item(sym::vec_type, def_id) {
if_chain! {
// Get the _ part of Vec<_>
if let Some(ref last) = last_path_segment(qpath).args;
if let Some(ty) = last.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
});
// ty is now _ at this point
if let TyKind::Path(ref ty_qpath) = ty.kind;
let res = cx.qpath_res(ty_qpath, ty.hir_id);
if let Some(def_id) = res.opt_def_id();
if Some(def_id) == cx.tcx.lang_items().owned_box();
// At this point, we know ty is Box<T>, now get T
if let Some(ref last) = last_path_segment(ty_qpath).args;
if let Some(boxed_ty) = last.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
});
let ty_ty = hir_ty_to_ty(cx.tcx, boxed_ty);
if !ty_ty.has_escaping_bound_vars();
if ty_ty.is_sized(cx.tcx.at(ty.span), cx.param_env);
if let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes());
if ty_ty_size <= self.vec_box_size_threshold;
then {
span_lint_and_sugg(
cx,
VEC_BOX,
hir_ty.span,
"`Vec<T>` is already on the heap, the boxing is unnecessary",
"try",
format!("Vec<{}>", snippet(cx, boxed_ty.span, "..")),
Applicability::MachineApplicable,
);
return; // don't recurse into the type
}
}
} else if cx.tcx.is_diagnostic_item(sym::option_type, def_id) {
if match_type_parameter(cx, qpath, &paths::OPTION).is_some() {
span_lint(
cx,
OPTION_OPTION,
hir_ty.span,
"consider using `Option<T>` instead of `Option<Option<T>>` or a custom \
enum if you need to distinguish all 3 cases",
);
return; // don't recurse into the type
}
} else if match_def_path(cx, def_id, &paths::LINKED_LIST) {
span_lint_and_help(
cx,
LINKEDLIST,
hir_ty.span,
"you seem to be using a `LinkedList`! Perhaps you meant some other data structure?",
None,
"a `VecDeque` might work",
);
return; // don't recurse into the type
}
}
match *qpath {
QPath::Resolved(Some(ref ty), ref p) => {
self.check_ty(cx, ty, is_local);
for ty in p.segments.iter().flat_map(|seg| {
seg.args
.as_ref()
.map_or_else(|| [].iter(), |params| params.args.iter())
.filter_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
})
}) {
self.check_ty(cx, ty, is_local);
}
},
QPath::Resolved(None, ref p) => {
for ty in p.segments.iter().flat_map(|seg| {
seg.args
.as_ref()
.map_or_else(|| [].iter(), |params| params.args.iter())
.filter_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
})
}) {
self.check_ty(cx, ty, is_local);
}
},
QPath::TypeRelative(ref ty, ref seg) => {
self.check_ty(cx, ty, is_local);
if let Some(ref params) = seg.args {
for ty in params.args.iter().filter_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
}) {
self.check_ty(cx, ty, is_local);
}
}
},
QPath::LangItem(..) => {},
}
},
TyKind::Rptr(ref lt, ref mut_ty) => self.check_ty_rptr(cx, hir_ty, is_local, lt, mut_ty),
// recurse
TyKind::Slice(ref ty) | TyKind::Array(ref ty, _) | TyKind::Ptr(MutTy { ref ty, .. }) => {
self.check_ty(cx, ty, is_local)
},
TyKind::Tup(tys) => {
for ty in tys {
self.check_ty(cx, ty, is_local);
}
},
_ => {},
}
}
fn check_ty_rptr(
&mut self,
cx: &LateContext<'_>,
hir_ty: &hir::Ty<'_>,
is_local: bool,
lt: &Lifetime,
mut_ty: &MutTy<'_>,
) {
match mut_ty.ty.kind {
TyKind::Path(ref qpath) => {
let hir_id = mut_ty.ty.hir_id;
let def = cx.qpath_res(qpath, hir_id);
if_chain! {
if let Some(def_id) = def.opt_def_id();
if Some(def_id) == cx.tcx.lang_items().owned_box();
if let QPath::Resolved(None, ref path) = *qpath;
if let [ref bx] = *path.segments;
if let Some(ref params) = bx.args;
if !params.parenthesized;
if let Some(inner) = params.args.iter().find_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
});
then {
if is_any_trait(inner) {
// Ignore `Box<Any>` types; see issue #1884 for details.
return;
}
let ltopt = if lt.is_elided() {
String::new()
} else {
format!("{} ", lt.name.ident().as_str())
};
if mut_ty.mutbl == Mutability::Mut {
// Ignore `&mut Box<T>` types; see issue #2907 for
// details.
return;
}
// When trait objects or opaque types have lifetime or auto-trait bounds,
// we need to add parentheses to avoid a syntax error due to its ambiguity.
// Originally reported as the issue #3128.
let inner_snippet = snippet(cx, inner.span, "..");
let suggestion = match &inner.kind {
TyKind::TraitObject(bounds, lt_bound) if bounds.len() > 1 || !lt_bound.is_elided() => {
format!("&{}({})", ltopt, &inner_snippet)
},
TyKind::Path(qpath)
if get_bounds_if_impl_trait(cx, qpath, inner.hir_id)
.map_or(false, |bounds| bounds.len() > 1) =>
{
format!("&{}({})", ltopt, &inner_snippet)
},
_ => format!("&{}{}", ltopt, &inner_snippet),
};
span_lint_and_sugg(
cx,
BORROWED_BOX,
hir_ty.span,
"you seem to be trying to use `&Box<T>`. Consider using just `&T`",
"try",
suggestion,
// To make this `MachineApplicable`, at least one needs to check if it isn't a trait item
// because the trait impls of it will break otherwise;
// and there may be other cases that result in invalid code.
// For example, type coercion doesn't work nicely.
Applicability::Unspecified,
);
return; // don't recurse into the type
}
};
self.check_ty(cx, &mut_ty.ty, is_local);
},
_ => self.check_ty(cx, &mut_ty.ty, is_local),
}
}
}
// Returns true if given type is `Any` trait.
fn is_any_trait(t: &hir::Ty<'_>) -> bool {
if_chain! {
if let TyKind::TraitObject(ref traits, _) = t.kind;
if !traits.is_empty();
// Only Send/Sync can be used as additional traits, so it is enough to
// check only the first trait.
if match_path(&traits[0].trait_ref.path, &paths::ANY_TRAIT);
then {
return true;
}
}
false
}
fn get_bounds_if_impl_trait<'tcx>(cx: &LateContext<'tcx>, qpath: &QPath<'_>, id: HirId) -> Option<GenericBounds<'tcx>> {
if_chain! {
if let Some(did) = cx.qpath_res(qpath, id).opt_def_id();
if let Some(Node::GenericParam(generic_param)) = cx.tcx.hir().get_if_local(did);
if let GenericParamKind::Type { synthetic, .. } = generic_param.kind;
if synthetic == Some(SyntheticTyParamKind::ImplTrait);
then {
Some(generic_param.bounds)
} else {
None
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for binding a unit value.
///
/// **Why is this bad?** A unit value cannot usefully be used anywhere. So
/// binding one is kind of pointless.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x = {
/// 1;
/// };
/// ```
pub LET_UNIT_VALUE,
pedantic,
"creating a `let` binding to a value of unit type, which usually can't be used afterwards"
}
declare_lint_pass!(LetUnitValue => [LET_UNIT_VALUE]);
impl<'tcx> LateLintPass<'tcx> for LetUnitValue {
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
if let StmtKind::Local(ref local) = stmt.kind {
if is_unit(cx.typeck_results().pat_ty(&local.pat)) {
if in_external_macro(cx.sess(), stmt.span) || local.pat.span.from_expansion() {
return;
}
if higher::is_from_for_desugar(local) {
return;
}
span_lint_and_then(
cx,
LET_UNIT_VALUE,
stmt.span,
"this let-binding has unit value",
|diag| {
if let Some(expr) = &local.init {
let snip = snippet_with_macro_callsite(cx, expr.span, "()");
diag.span_suggestion(
stmt.span,
"omit the `let` binding",
format!("{};", snip),
Applicability::MachineApplicable, // snippet
);
}
},
);
}
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for comparisons to unit. This includes all binary
/// comparisons (like `==` and `<`) and asserts.
///
/// **Why is this bad?** Unit is always equal to itself, and thus is just a
/// clumsily written constant. Mostly this happens when someone accidentally
/// adds semicolons at the end of the operands.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # fn foo() {};
/// # fn bar() {};
/// # fn baz() {};
/// if {
/// foo();
/// } == {
/// bar();
/// } {
/// baz();
/// }
/// ```
/// is equal to
/// ```rust
/// # fn foo() {};
/// # fn bar() {};
/// # fn baz() {};
/// {
/// foo();
/// bar();
/// baz();
/// }
/// ```
///
/// For asserts:
/// ```rust
/// # fn foo() {};
/// # fn bar() {};
/// assert_eq!({ foo(); }, { bar(); });
/// ```
/// will always succeed
pub UNIT_CMP,
correctness,
"comparing unit values"
}
declare_lint_pass!(UnitCmp => [UNIT_CMP]);
impl<'tcx> LateLintPass<'tcx> for UnitCmp {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
if expr.span.from_expansion() {
if let Some(callee) = expr.span.source_callee() {
if let ExpnKind::Macro(MacroKind::Bang, symbol) = callee.kind {
if let ExprKind::Binary(ref cmp, ref left, _) = expr.kind {
let op = cmp.node;
if op.is_comparison() && is_unit(cx.typeck_results().expr_ty(left)) {
let result = match &*symbol.as_str() {
"assert_eq" | "debug_assert_eq" => "succeed",
"assert_ne" | "debug_assert_ne" => "fail",
_ => return,
};
span_lint(
cx,
UNIT_CMP,
expr.span,
&format!(
"`{}` of unit values detected. This will always {}",
symbol.as_str(),
result
),
);
}
}
}
}
return;
}
if let ExprKind::Binary(ref cmp, ref left, _) = expr.kind {
let op = cmp.node;
if op.is_comparison() && is_unit(cx.typeck_results().expr_ty(left)) {
let result = match op {
BinOpKind::Eq | BinOpKind::Le | BinOpKind::Ge => "true",
_ => "false",
};
span_lint(
cx,
UNIT_CMP,
expr.span,
&format!(
"{}-comparison of unit values detected. This will always be {}",
op.as_str(),
result
),
);
}
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for passing a unit value as an argument to a function without using a
/// unit literal (`()`).
///
/// **Why is this bad?** This is likely the result of an accidental semicolon.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// foo({
/// let a = bar();
/// baz(a);
/// })
/// ```
pub UNIT_ARG,
complexity,
"passing unit to a function"
}
declare_lint_pass!(UnitArg => [UNIT_ARG]);
impl<'tcx> LateLintPass<'tcx> for UnitArg {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if expr.span.from_expansion() {
return;
}
// apparently stuff in the desugaring of `?` can trigger this
// so check for that here
// only the calls to `Try::from_error` is marked as desugared,
// so we need to check both the current Expr and its parent.
if is_questionmark_desugar_marked_call(expr) {
return;
}
if_chain! {
let map = &cx.tcx.hir();
let opt_parent_node = map.find(map.get_parent_node(expr.hir_id));
if let Some(hir::Node::Expr(parent_expr)) = opt_parent_node;
if is_questionmark_desugar_marked_call(parent_expr);
then {
return;
}
}
match expr.kind {
ExprKind::Call(_, args) | ExprKind::MethodCall(_, _, args, _) => {
let args_to_recover = args
.iter()
.filter(|arg| {
if is_unit(cx.typeck_results().expr_ty(arg)) && !is_unit_literal(arg) {
!matches!(
&arg.kind,
ExprKind::Match(.., MatchSource::TryDesugar) | ExprKind::Path(..)
)
} else {
false
}
})
.collect::<Vec<_>>();
if !args_to_recover.is_empty() {
lint_unit_args(cx, expr, &args_to_recover);
}
},
_ => (),
}
}
}
fn fmt_stmts_and_call(
cx: &LateContext<'_>,
call_expr: &Expr<'_>,
call_snippet: &str,
args_snippets: &[impl AsRef<str>],
non_empty_block_args_snippets: &[impl AsRef<str>],
) -> String {
let call_expr_indent = indent_of(cx, call_expr.span).unwrap_or(0);
let call_snippet_with_replacements = args_snippets
.iter()
.fold(call_snippet.to_owned(), |acc, arg| acc.replacen(arg.as_ref(), "()", 1));
let mut stmts_and_call = non_empty_block_args_snippets
.iter()
.map(|it| it.as_ref().to_owned())
.collect::<Vec<_>>();
stmts_and_call.push(call_snippet_with_replacements);
stmts_and_call = stmts_and_call
.into_iter()
.map(|v| reindent_multiline(v.into(), true, Some(call_expr_indent)).into_owned())
.collect();
let mut stmts_and_call_snippet = stmts_and_call.join(&format!("{}{}", ";\n", " ".repeat(call_expr_indent)));
// expr is not in a block statement or result expression position, wrap in a block
let parent_node = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(call_expr.hir_id));
if !matches!(parent_node, Some(Node::Block(_))) && !matches!(parent_node, Some(Node::Stmt(_))) {
let block_indent = call_expr_indent + 4;
stmts_and_call_snippet =
reindent_multiline(stmts_and_call_snippet.into(), true, Some(block_indent)).into_owned();
stmts_and_call_snippet = format!(
"{{\n{}{}\n{}}}",
" ".repeat(block_indent),
&stmts_and_call_snippet,
" ".repeat(call_expr_indent)
);
}
stmts_and_call_snippet
}
fn lint_unit_args(cx: &LateContext<'_>, expr: &Expr<'_>, args_to_recover: &[&Expr<'_>]) {
let mut applicability = Applicability::MachineApplicable;
let (singular, plural) = if args_to_recover.len() > 1 {
("", "s")
} else {
("a ", "")
};
span_lint_and_then(
cx,
UNIT_ARG,
expr.span,
&format!("passing {}unit value{} to a function", singular, plural),
|db| {
let mut or = "";
args_to_recover
.iter()
.filter_map(|arg| {
if_chain! {
if let ExprKind::Block(block, _) = arg.kind;
if block.expr.is_none();
if let Some(last_stmt) = block.stmts.iter().last();
if let StmtKind::Semi(last_expr) = last_stmt.kind;
if let Some(snip) = snippet_opt(cx, last_expr.span);
then {
Some((
last_stmt.span,
snip,
))
}
else {
None
}
}
})
.for_each(|(span, sugg)| {
db.span_suggestion(
span,
"remove the semicolon from the last statement in the block",
sugg,
Applicability::MaybeIncorrect,
);
or = "or ";
applicability = Applicability::MaybeIncorrect;
});
let arg_snippets: Vec<String> = args_to_recover
.iter()
.filter_map(|arg| snippet_opt(cx, arg.span))
.collect();
let arg_snippets_without_empty_blocks: Vec<String> = args_to_recover
.iter()
.filter(|arg| !is_empty_block(arg))
.filter_map(|arg| snippet_opt(cx, arg.span))
.collect();
if let Some(call_snippet) = snippet_opt(cx, expr.span) {
let sugg = fmt_stmts_and_call(
cx,
expr,
&call_snippet,
&arg_snippets,
&arg_snippets_without_empty_blocks,
);
if arg_snippets_without_empty_blocks.is_empty() {
db.multipart_suggestion(
&format!("use {}unit literal{} instead", singular, plural),
args_to_recover
.iter()
.map(|arg| (arg.span, "()".to_string()))
.collect::<Vec<_>>(),
applicability,
);
} else {
let plural = arg_snippets_without_empty_blocks.len() > 1;
let empty_or_s = if plural { "s" } else { "" };
let it_or_them = if plural { "them" } else { "it" };
db.span_suggestion(
expr.span,
&format!(
"{}move the expression{} in front of the call and replace {} with the unit literal `()`",
or, empty_or_s, it_or_them
),
sugg,
applicability,
);
}
}
},
);
}
fn is_empty_block(expr: &Expr<'_>) -> bool {
matches!(
expr.kind,
ExprKind::Block(
Block {
stmts: &[],
expr: None,
..
},
_,
)
)
}
fn is_questionmark_desugar_marked_call(expr: &Expr<'_>) -> bool {
use rustc_span::hygiene::DesugaringKind;
if let ExprKind::Call(ref callee, _) = expr.kind {
callee.span.is_desugaring(DesugaringKind::QuestionMark)
} else {
false
}
}
fn is_unit(ty: Ty<'_>) -> bool {
matches!(ty.kind(), ty::Tuple(slice) if slice.is_empty())
}
fn is_unit_literal(expr: &Expr<'_>) -> bool {
matches!(expr.kind, ExprKind::Tup(ref slice) if slice.is_empty())
}
declare_clippy_lint! {
/// **What it does:** Checks for casts from any numerical to a float type where
/// the receiving type cannot store all values from the original type without
/// rounding errors. This possible rounding is to be expected, so this lint is
/// `Allow` by default.
///
/// Basically, this warns on casting any integer with 32 or more bits to `f32`
/// or any 64-bit integer to `f64`.
///
/// **Why is this bad?** It's not bad at all. But in some applications it can be
/// helpful to know where precision loss can take place. This lint can help find
/// those places in the code.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let x = u64::MAX;
/// x as f64;
/// ```
pub CAST_PRECISION_LOSS,
pedantic,
"casts that cause loss of precision, e.g., `x as f32` where `x: u64`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts from a signed to an unsigned numerical
/// type. In this case, negative values wrap around to large positive values,
/// which can be quite surprising in practice. However, as the cast works as
/// defined, this lint is `Allow` by default.
///
/// **Why is this bad?** Possibly surprising results. You can activate this lint
/// as a one-time check to see where numerical wrapping can arise.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let y: i8 = -1;
/// y as u128; // will return 18446744073709551615
/// ```
pub CAST_SIGN_LOSS,
pedantic,
"casts from signed types to unsigned types, e.g., `x as u32` where `x: i32`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts between numerical types that may
/// truncate large values. This is expected behavior, so the cast is `Allow` by
/// default.
///
/// **Why is this bad?** In some problem domains, it is good practice to avoid
/// truncation. This lint can be activated to help assess where additional
/// checks could be beneficial.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// fn as_u8(x: u64) -> u8 {
/// x as u8
/// }
/// ```
pub CAST_POSSIBLE_TRUNCATION,
pedantic,
"casts that may cause truncation of the value, e.g., `x as u8` where `x: u32`, or `x as i32` where `x: f32`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts from an unsigned type to a signed type of
/// the same size. Performing such a cast is a 'no-op' for the compiler,
/// i.e., nothing is changed at the bit level, and the binary representation of
/// the value is reinterpreted. This can cause wrapping if the value is too big
/// for the target signed type. However, the cast works as defined, so this lint
/// is `Allow` by default.
///
/// **Why is this bad?** While such a cast is not bad in itself, the results can
/// be surprising when this is not the intended behavior, as demonstrated by the
/// example below.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// u32::MAX as i32; // will yield a value of `-1`
/// ```
pub CAST_POSSIBLE_WRAP,
pedantic,
"casts that may cause wrapping around the value, e.g., `x as i32` where `x: u32` and `x > i32::MAX`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts between numerical types that may
/// be replaced by safe conversion functions.
///
/// **Why is this bad?** Rust's `as` keyword will perform many kinds of
/// conversions, including silently lossy conversions. Conversion functions such
/// as `i32::from` will only perform lossless conversions. Using the conversion
/// functions prevents conversions from turning into silent lossy conversions if
/// the types of the input expressions ever change, and make it easier for
/// people reading the code to know that the conversion is lossless.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// fn as_u64(x: u8) -> u64 {
/// x as u64
/// }
/// ```
///
/// Using `::from` would look like this:
///
/// ```rust
/// fn as_u64(x: u8) -> u64 {
/// u64::from(x)
/// }
/// ```
pub CAST_LOSSLESS,
pedantic,
"casts using `as` that are known to be lossless, e.g., `x as u64` where `x: u8`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts to the same type, casts of int literals to integer types
/// and casts of float literals to float types.
///
/// **Why is this bad?** It's just unnecessary.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// let _ = 2i32 as i32;
/// let _ = 0.5 as f32;
/// ```
///
/// Better:
///
/// ```rust
/// let _ = 2_i32;
/// let _ = 0.5_f32;
/// ```
pub UNNECESSARY_CAST,
complexity,
"cast to the same type, e.g., `x as i32` where `x: i32`"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts, using `as` or `pointer::cast`,
/// from a less-strictly-aligned pointer to a more-strictly-aligned pointer
///
/// **Why is this bad?** Dereferencing the resulting pointer may be undefined
/// behavior.
///
/// **Known problems:** Using `std::ptr::read_unaligned` and `std::ptr::write_unaligned` or similar
/// on the resulting pointer is fine. Is over-zealous: Casts with manual alignment checks or casts like
/// u64-> u8 -> u16 can be fine. Miri is able to do a more in-depth analysis.
///
/// **Example:**
/// ```rust
/// let _ = (&1u8 as *const u8) as *const u16;
/// let _ = (&mut 1u8 as *mut u8) as *mut u16;
///
/// (&1u8 as *const u8).cast::<u16>();
/// (&mut 1u8 as *mut u8).cast::<u16>();
/// ```
pub CAST_PTR_ALIGNMENT,
pedantic,
"cast from a pointer to a more-strictly-aligned pointer"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts of function pointers to something other than usize
///
/// **Why is this bad?**
/// Casting a function pointer to anything other than usize/isize is not portable across
/// architectures, because you end up losing bits if the target type is too small or end up with a
/// bunch of extra bits that waste space and add more instructions to the final binary than
/// strictly necessary for the problem
///
/// Casting to isize also doesn't make sense since there are no signed addresses.
///
/// **Example**
///
/// ```rust
/// // Bad
/// fn fun() -> i32 { 1 }
/// let a = fun as i64;
///
/// // Good
/// fn fun2() -> i32 { 1 }
/// let a = fun2 as usize;
/// ```
pub FN_TO_NUMERIC_CAST,
style,
"casting a function pointer to a numeric type other than usize"
}
declare_clippy_lint! {
/// **What it does:** Checks for casts of a function pointer to a numeric type not wide enough to
/// store address.
///
/// **Why is this bad?**
/// Such a cast discards some bits of the function's address. If this is intended, it would be more
/// clearly expressed by casting to usize first, then casting the usize to the intended type (with
/// a comment) to perform the truncation.
///
/// **Example**
///
/// ```rust
/// // Bad
/// fn fn1() -> i16 {
/// 1
/// };
/// let _ = fn1 as i32;
///
/// // Better: Cast to usize first, then comment with the reason for the truncation
/// fn fn2() -> i16 {
/// 1
/// };
/// let fn_ptr = fn2 as usize;
/// let fn_ptr_truncated = fn_ptr as i32;
/// ```
pub FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
style,
"casting a function pointer to a numeric type not wide enough to store the address"
}
/// Returns the size in bits of an integral type.
/// Will return 0 if the type is not an int or uint variant
fn int_ty_to_nbits(typ: Ty<'_>, tcx: TyCtxt<'_>) -> u64 {
match typ.kind() {
ty::Int(i) => match i {
IntTy::Isize => tcx.data_layout.pointer_size.bits(),
IntTy::I8 => 8,
IntTy::I16 => 16,
IntTy::I32 => 32,
IntTy::I64 => 64,
IntTy::I128 => 128,
},
ty::Uint(i) => match i {
UintTy::Usize => tcx.data_layout.pointer_size.bits(),
UintTy::U8 => 8,
UintTy::U16 => 16,
UintTy::U32 => 32,
UintTy::U64 => 64,
UintTy::U128 => 128,
},
_ => 0,
}
}
fn is_isize_or_usize(typ: Ty<'_>) -> bool {
matches!(typ.kind(), ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize))
}
fn span_precision_loss_lint(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to_f64: bool) {
let mantissa_nbits = if cast_to_f64 { 52 } else { 23 };
let arch_dependent = is_isize_or_usize(cast_from) && cast_to_f64;
let arch_dependent_str = "on targets with 64-bit wide pointers ";
let from_nbits_str = if arch_dependent {
"64".to_owned()
} else if is_isize_or_usize(cast_from) {
"32 or 64".to_owned()
} else {
int_ty_to_nbits(cast_from, cx.tcx).to_string()
};
span_lint(
cx,
CAST_PRECISION_LOSS,
expr.span,
&format!(
"casting `{0}` to `{1}` causes a loss of precision {2}(`{0}` is {3} bits wide, \
but `{1}`'s mantissa is only {4} bits wide)",
cast_from,
if cast_to_f64 { "f64" } else { "f32" },
if arch_dependent { arch_dependent_str } else { "" },
from_nbits_str,
mantissa_nbits
),
);
}
fn should_strip_parens(op: &Expr<'_>, snip: &str) -> bool {
if let ExprKind::Binary(_, _, _) = op.kind {
if snip.starts_with('(') && snip.ends_with(')') {
return true;
}
}
false
}
fn span_lossless_lint(cx: &LateContext<'_>, expr: &Expr<'_>, op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
// Do not suggest using From in consts/statics until it is valid to do so (see #2267).
if in_constant(cx, expr.hir_id) {
return;
}
// The suggestion is to use a function call, so if the original expression
// has parens on the outside, they are no longer needed.
let mut applicability = Applicability::MachineApplicable;
let opt = snippet_opt(cx, op.span);
let sugg = opt.as_ref().map_or_else(
|| {
applicability = Applicability::HasPlaceholders;
".."
},
|snip| {
if should_strip_parens(op, snip) {
&snip[1..snip.len() - 1]
} else {
snip.as_str()
}
},
);
span_lint_and_sugg(
cx,
CAST_LOSSLESS,
expr.span,
&format!(
"casting `{}` to `{}` may become silently lossy if you later change the type",
cast_from, cast_to
),
"try",
format!("{}::from({})", cast_to, sugg),
applicability,
);
}
enum ArchSuffix {
_32,
_64,
None,
}
fn check_loss_of_sign(cx: &LateContext<'_>, expr: &Expr<'_>, op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
if !cast_from.is_signed() || cast_to.is_signed() {
return;
}
// don't lint for positive constants
let const_val = constant(cx, &cx.typeck_results(), op);
if_chain! {
if let Some((Constant::Int(n), _)) = const_val;
if let ty::Int(ity) = *cast_from.kind();
if sext(cx.tcx, n, ity) >= 0;
then {
return
}
}
// don't lint for the result of methods that always return non-negative values
if let ExprKind::MethodCall(ref path, _, _, _) = op.kind {
let mut method_name = path.ident.name.as_str();
let allowed_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
if_chain! {
if method_name == "unwrap";
if let Some(arglist) = method_chain_args(op, &["unwrap"]);
if let ExprKind::MethodCall(ref inner_path, _, _, _) = &arglist[0][0].kind;
then {
method_name = inner_path.ident.name.as_str();
}
}
if allowed_methods.iter().any(|&name| method_name == name) {
return;
}
}
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
fn check_truncation_and_wrapping(cx: &LateContext<'_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
let arch_64_suffix = " on targets with 64-bit wide pointers";
let arch_32_suffix = " on targets with 32-bit wide pointers";
let cast_unsigned_to_signed = !cast_from.is_signed() && cast_to.is_signed();
let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
let (span_truncation, suffix_truncation, span_wrap, suffix_wrap) =
match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
(true, true) | (false, false) => (
to_nbits < from_nbits,
ArchSuffix::None,
to_nbits == from_nbits && cast_unsigned_to_signed,
ArchSuffix::None,
),
(true, false) => (
to_nbits <= 32,
if to_nbits == 32 {
ArchSuffix::_64
} else {
ArchSuffix::None
},
to_nbits <= 32 && cast_unsigned_to_signed,
ArchSuffix::_32,
),
(false, true) => (
from_nbits == 64,
ArchSuffix::_32,
cast_unsigned_to_signed,
if from_nbits == 64 {
ArchSuffix::_64
} else {
ArchSuffix::_32
},
),
};
if span_truncation {
span_lint(
cx,
CAST_POSSIBLE_TRUNCATION,
expr.span,
&format!(
"casting `{}` to `{}` may truncate the value{}",
cast_from,
cast_to,
match suffix_truncation {
ArchSuffix::_32 => arch_32_suffix,
ArchSuffix::_64 => arch_64_suffix,
ArchSuffix::None => "",
}
),
);
}
if span_wrap {
span_lint(
cx,
CAST_POSSIBLE_WRAP,
expr.span,
&format!(
"casting `{}` to `{}` may wrap around the value{}",
cast_from,
cast_to,
match suffix_wrap {
ArchSuffix::_32 => arch_32_suffix,
ArchSuffix::_64 => arch_64_suffix,
ArchSuffix::None => "",
}
),
);
}
}
fn check_lossless(cx: &LateContext<'_>, expr: &Expr<'_>, op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
let cast_signed_to_unsigned = cast_from.is_signed() && !cast_to.is_signed();
let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
if !is_isize_or_usize(cast_from) && !is_isize_or_usize(cast_to) && from_nbits < to_nbits && !cast_signed_to_unsigned
{
span_lossless_lint(cx, expr, op, cast_from, cast_to);
}
}
declare_lint_pass!(Casts => [
CAST_PRECISION_LOSS,
CAST_SIGN_LOSS,
CAST_POSSIBLE_TRUNCATION,
CAST_POSSIBLE_WRAP,
CAST_LOSSLESS,
UNNECESSARY_CAST,
CAST_PTR_ALIGNMENT,
FN_TO_NUMERIC_CAST,
FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
]);
// Check if the given type is either `core::ffi::c_void` or
// one of the platform specific `libc::<platform>::c_void` of libc.
fn is_c_void(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
if let ty::Adt(adt, _) = ty.kind() {
let names = cx.get_def_path(adt.did);
if names.is_empty() {
return false;
}
if names[0] == sym::libc || names[0] == sym::core && *names.last().unwrap() == sym!(c_void) {
return true;
}
}
false
}
/// Returns the mantissa bits wide of a fp type.
/// Will return 0 if the type is not a fp
fn fp_ty_mantissa_nbits(typ: Ty<'_>) -> u32 {
match typ.kind() {
ty::Float(FloatTy::F32) => 23,
ty::Float(FloatTy::F64) | ty::Infer(InferTy::FloatVar(_)) => 52,
_ => 0,
}
}
impl<'tcx> LateLintPass<'tcx> for Casts {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if expr.span.from_expansion() {
return;
}
if let ExprKind::Cast(ref ex, cast_to) = expr.kind {
if is_hir_ty_cfg_dependant(cx, cast_to) {
return;
}
let (cast_from, cast_to) = (cx.typeck_results().expr_ty(ex), cx.typeck_results().expr_ty(expr));
lint_fn_to_numeric_cast(cx, expr, ex, cast_from, cast_to);
if let Some(lit) = get_numeric_literal(ex) {
let literal_str = snippet_opt(cx, ex.span).unwrap_or_default();
if_chain! {
if let LitKind::Int(n, _) = lit.node;
if let Some(src) = snippet_opt(cx, lit.span);
if cast_to.is_floating_point();
if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node);
let from_nbits = 128 - n.leading_zeros();
let to_nbits = fp_ty_mantissa_nbits(cast_to);
if from_nbits != 0 && to_nbits != 0 && from_nbits <= to_nbits && num_lit.is_decimal();
then {
let literal_str = if is_unary_neg(ex) { format!("-{}", num_lit.integer) } else { num_lit.integer.into() };
show_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
return;
}
}
match lit.node {
LitKind::Int(_, LitIntType::Unsuffixed) if cast_to.is_integral() => {
show_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
},
LitKind::Float(_, LitFloatType::Unsuffixed) if cast_to.is_floating_point() => {
show_unnecessary_cast(cx, expr, &literal_str, cast_from, cast_to);
},
LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed) => {},
_ => {
if cast_from.kind() == cast_to.kind() && !in_external_macro(cx.sess(), expr.span) {
span_lint(
cx,
UNNECESSARY_CAST,
expr.span,
&format!(
"casting to the same type is unnecessary (`{}` -> `{}`)",
cast_from, cast_to
),
);
}
},
}
}
if cast_from.is_numeric() && cast_to.is_numeric() && !in_external_macro(cx.sess(), expr.span) {
lint_numeric_casts(cx, expr, ex, cast_from, cast_to);
}
lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
} else if let ExprKind::MethodCall(method_path, _, args, _) = expr.kind {
if_chain! {
if method_path.ident.name == sym!(cast);
if let Some(generic_args) = method_path.args;
if let [GenericArg::Type(cast_to)] = generic_args.args;
// There probably is no obvious reason to do this, just to be consistent with `as` cases.
if !is_hir_ty_cfg_dependant(cx, cast_to);
then {
let (cast_from, cast_to) =
(cx.typeck_results().expr_ty(&args[0]), cx.typeck_results().expr_ty(expr));
lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
}
}
}
}
}
fn is_unary_neg(expr: &Expr<'_>) -> bool {
matches!(expr.kind, ExprKind::Unary(UnOp::Neg, _))
}
fn get_numeric_literal<'e>(expr: &'e Expr<'e>) -> Option<&'e Lit> {
match expr.kind {
ExprKind::Lit(ref lit) => Some(lit),
ExprKind::Unary(UnOp::Neg, e) => {
if let ExprKind::Lit(ref lit) = e.kind {
Some(lit)
} else {
None
}
},
_ => None,
}
}
fn show_unnecessary_cast(cx: &LateContext<'_>, expr: &Expr<'_>, literal_str: &str, cast_from: Ty<'_>, cast_to: Ty<'_>) {
| fn lint_numeric_casts<'tcx>(
cx: &LateContext<'tcx>,
expr: &Expr<'tcx>,
cast_expr: &Expr<'_>,
cast_from: Ty<'tcx>,
cast_to: Ty<'tcx>,
) {
match (cast_from.is_integral(), cast_to.is_integral()) {
(true, false) => {
let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.kind() {
32
} else {
64
};
if is_isize_or_usize(cast_from) || from_nbits >= to_nbits {
span_precision_loss_lint(cx, expr, cast_from, to_nbits == 64);
}
if from_nbits < to_nbits {
span_lossless_lint(cx, expr, cast_expr, cast_from, cast_to);
}
},
(false, true) => {
span_lint(
cx,
CAST_POSSIBLE_TRUNCATION,
expr.span,
&format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to),
);
if !cast_to.is_signed() {
span_lint(
cx,
CAST_SIGN_LOSS,
expr.span,
&format!(
"casting `{}` to `{}` may lose the sign of the value",
cast_from, cast_to
),
);
}
},
(true, true) => {
check_loss_of_sign(cx, expr, cast_expr, cast_from, cast_to);
check_truncation_and_wrapping(cx, expr, cast_from, cast_to);
check_lossless(cx, expr, cast_expr, cast_from, cast_to);
},
(false, false) => {
if let (&ty::Float(FloatTy::F64), &ty::Float(FloatTy::F32)) = (&cast_from.kind(), &cast_to.kind()) {
span_lint(
cx,
CAST_POSSIBLE_TRUNCATION,
expr.span,
"casting `f64` to `f32` may truncate the value",
);
}
if let (&ty::Float(FloatTy::F32), &ty::Float(FloatTy::F64)) = (&cast_from.kind(), &cast_to.kind()) {
span_lossless_lint(cx, expr, cast_expr, cast_from, cast_to);
}
},
}
}
fn lint_cast_ptr_alignment<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>, cast_from: Ty<'tcx>, cast_to: Ty<'tcx>) {
if_chain! {
if let ty::RawPtr(from_ptr_ty) = &cast_from.kind();
if let ty::RawPtr(to_ptr_ty) = &cast_to.kind();
if let Ok(from_layout) = cx.layout_of(from_ptr_ty.ty);
if let Ok(to_layout) = cx.layout_of(to_ptr_ty.ty);
if from_layout.align.abi < to_layout.align.abi;
// with c_void, we inherently need to trust the user
if !is_c_void(cx, from_ptr_ty.ty);
// when casting from a ZST, we don't know enough to properly lint
if !from_layout.is_zst();
then {
span_lint(
cx,
CAST_PTR_ALIGNMENT,
expr.span,
&format!(
"casting from `{}` to a more-strictly-aligned pointer (`{}`) ({} < {} bytes)",
cast_from,
cast_to,
from_layout.align.abi.bytes(),
to_layout.align.abi.bytes(),
),
);
}
}
}
fn lint_fn_to_numeric_cast(
cx: &LateContext<'_>,
expr: &Expr<'_>,
cast_expr: &Expr<'_>,
cast_from: Ty<'_>,
cast_to: Ty<'_>,
) {
// We only want to check casts to `ty::Uint` or `ty::Int`
match cast_to.kind() {
ty::Uint(_) | ty::Int(..) => { /* continue on */ },
_ => return,
}
match cast_from.kind() {
ty::FnDef(..) | ty::FnPtr(_) => {
let mut applicability = Applicability::MaybeIncorrect;
let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability);
let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
if to_nbits < cx.tcx.data_layout.pointer_size.bits() {
span_lint_and_sugg(
cx,
FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
expr.span,
&format!(
"casting function pointer `{}` to `{}`, which truncates the value",
from_snippet, cast_to
),
"try",
format!("{} as usize", from_snippet),
applicability,
);
} else if *cast_to.kind() != ty::Uint(UintTy::Usize) {
span_lint_and_sugg(
cx,
FN_TO_NUMERIC_CAST,
expr.span,
&format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
"try",
format!("{} as usize", from_snippet),
applicability,
);
}
},
_ => {},
}
}
declare_clippy_lint! {
/// **What it does:** Checks for types used in structs, parameters and `let`
/// declarations above a certain complexity threshold.
///
/// **Why is this bad?** Too complex types make the code less readable. Consider
/// using a `type` definition to simplify them.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// # use std::rc::Rc;
/// struct Foo {
/// inner: Rc<Vec<Vec<Box<(u32, u32, u32, u32)>>>>,
/// }
/// ```
pub TYPE_COMPLEXITY,
complexity,
"usage of very complex types that might be better factored into `type` definitions"
}
pub struct TypeComplexity {
threshold: u64,
}
impl TypeComplexity {
#[must_use]
pub fn new(threshold: u64) -> Self {
Self { threshold }
}
}
impl_lint_pass!(TypeComplexity => [TYPE_COMPLEXITY]);
impl<'tcx> LateLintPass<'tcx> for TypeComplexity {
fn check_fn(
&mut self,
cx: &LateContext<'tcx>,
_: FnKind<'tcx>,
decl: &'tcx FnDecl<'_>,
_: &'tcx Body<'_>,
_: Span,
_: HirId,
) {
self.check_fndecl(cx, decl);
}
fn check_struct_field(&mut self, cx: &LateContext<'tcx>, field: &'tcx hir::StructField<'_>) {
// enum variants are also struct fields now
self.check_type(cx, &field.ty);
}
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
match item.kind {
ItemKind::Static(ref ty, _, _) | ItemKind::Const(ref ty, _) => self.check_type(cx, ty),
// functions, enums, structs, impls and traits are covered
_ => (),
}
}
fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'_>) {
match item.kind {
TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => self.check_type(cx, ty),
TraitItemKind::Fn(FnSig { ref decl, .. }, TraitFn::Required(_)) => self.check_fndecl(cx, decl),
// methods with default impl are covered by check_fn
_ => (),
}
}
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'_>) {
match item.kind {
ImplItemKind::Const(ref ty, _) | ImplItemKind::TyAlias(ref ty) => self.check_type(cx, ty),
// methods are covered by check_fn
_ => (),
}
}
fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'_>) {
if let Some(ref ty) = local.ty {
self.check_type(cx, ty);
}
}
}
impl<'tcx> TypeComplexity {
fn check_fndecl(&self, cx: &LateContext<'tcx>, decl: &'tcx FnDecl<'_>) {
for arg in decl.inputs {
self.check_type(cx, arg);
}
if let FnRetTy::Return(ref ty) = decl.output {
self.check_type(cx, ty);
}
}
fn check_type(&self, cx: &LateContext<'_>, ty: &hir::Ty<'_>) {
if ty.span.from_expansion() {
return;
}
let score = {
let mut visitor = TypeComplexityVisitor { score: 0, nest: 1 };
visitor.visit_ty(ty);
visitor.score
};
if score > self.threshold {
span_lint(
cx,
TYPE_COMPLEXITY,
ty.span,
"very complex type used. Consider factoring parts into `type` definitions",
);
}
}
}
/// Walks a type and assigns a complexity score to it.
struct TypeComplexityVisitor {
/// total complexity score of the type
score: u64,
/// current nesting level
nest: u64,
}
impl<'tcx> Visitor<'tcx> for TypeComplexityVisitor {
type Map = Map<'tcx>;
fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
let (add_score, sub_nest) = match ty.kind {
// _, &x and *x have only small overhead; don't mess with nesting level
TyKind::Infer | TyKind::Ptr(..) | TyKind::Rptr(..) => (1, 0),
// the "normal" components of a type: named types, arrays/tuples
TyKind::Path(..) | TyKind::Slice(..) | TyKind::Tup(..) | TyKind::Array(..) => (10 * self.nest, 1),
// function types bring a lot of overhead
TyKind::BareFn(ref bare) if bare.abi == Abi::Rust => (50 * self.nest, 1),
TyKind::TraitObject(ref param_bounds, _) => {
let has_lifetime_parameters = param_bounds.iter().any(|bound| {
bound
.bound_generic_params
.iter()
.any(|gen| matches!(gen.kind, GenericParamKind::Lifetime { .. }))
});
if has_lifetime_parameters {
// complex trait bounds like A<'a, 'b>
(50 * self.nest, 1)
} else {
// simple trait bounds like A + B
(20 * self.nest, 0)
}
},
_ => (0, 0),
};
self.score += add_score;
self.nest += sub_nest;
walk_ty(self, ty);
self.nest -= sub_nest;
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
declare_clippy_lint! {
/// **What it does:** Checks for expressions where a character literal is cast
/// to `u8` and suggests using a byte literal instead.
///
/// **Why is this bad?** In general, casting values to smaller types is
/// error-prone and should be avoided where possible. In the particular case of
/// converting a character literal to u8, it is easy to avoid by just using a
/// byte literal instead. As an added bonus, `b'a'` is even slightly shorter
/// than `'a' as u8`.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// 'x' as u8
/// ```
///
/// A better version, using the byte literal:
///
/// ```rust,ignore
/// b'x'
/// ```
pub CHAR_LIT_AS_U8,
complexity,
"casting a character literal to `u8` truncates"
}
declare_lint_pass!(CharLitAsU8 => [CHAR_LIT_AS_U8]);
impl<'tcx> LateLintPass<'tcx> for CharLitAsU8 {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if !expr.span.from_expansion();
if let ExprKind::Cast(e, _) = &expr.kind;
if let ExprKind::Lit(l) = &e.kind;
if let LitKind::Char(c) = l.node;
if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(expr).kind();
then {
let mut applicability = Applicability::MachineApplicable;
let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
span_lint_and_then(
cx,
CHAR_LIT_AS_U8,
expr.span,
"casting a character literal to `u8` truncates",
|diag| {
diag.note("`char` is four bytes wide, but `u8` is a single byte");
if c.is_ascii() {
diag.span_suggestion(
expr.span,
"use a byte literal instead",
format!("b{}", snippet),
applicability,
);
}
});
}
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for comparisons where one side of the relation is
/// either the minimum or maximum value for its type and warns if it involves a
/// case that is always true or always false. Only integer and boolean types are
/// checked.
///
/// **Why is this bad?** An expression like `min <= x` may misleadingly imply
/// that it is possible for `x` to be less than the minimum. Expressions like
/// `max < x` are probably mistakes.
///
/// **Known problems:** For `usize` the size of the current compile target will
/// be assumed (e.g., 64 bits on 64 bit systems). This means code that uses such
/// a comparison to detect target pointer width will trigger this lint. One can
/// use `mem::sizeof` and compare its value or conditional compilation
/// attributes
/// like `#[cfg(target_pointer_width = "64")] ..` instead.
///
/// **Example:**
///
/// ```rust
/// let vec: Vec<isize> = Vec::new();
/// if vec.len() <= 0 {}
/// if 100 > i32::MAX {}
/// ```
pub ABSURD_EXTREME_COMPARISONS,
correctness,
"a comparison with a maximum or minimum value that is always true or false"
}
declare_lint_pass!(AbsurdExtremeComparisons => [ABSURD_EXTREME_COMPARISONS]);
enum ExtremeType {
Minimum,
Maximum,
}
struct ExtremeExpr<'a> {
which: ExtremeType,
expr: &'a Expr<'a>,
}
enum AbsurdComparisonResult {
AlwaysFalse,
AlwaysTrue,
InequalityImpossible,
}
fn is_cast_between_fixed_and_target<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
if let ExprKind::Cast(ref cast_exp, _) = expr.kind {
let precast_ty = cx.typeck_results().expr_ty(cast_exp);
let cast_ty = cx.typeck_results().expr_ty(expr);
return is_isize_or_usize(precast_ty) != is_isize_or_usize(cast_ty);
}
false
}
fn detect_absurd_comparison<'tcx>(
cx: &LateContext<'tcx>,
op: BinOpKind,
lhs: &'tcx Expr<'_>,
rhs: &'tcx Expr<'_>,
) -> Option<(ExtremeExpr<'tcx>, AbsurdComparisonResult)> {
use crate::types::AbsurdComparisonResult::{AlwaysFalse, AlwaysTrue, InequalityImpossible};
use crate::types::ExtremeType::{Maximum, Minimum};
use crate::utils::comparisons::{normalize_comparison, Rel};
// absurd comparison only makes sense on primitive types
// primitive types don't implement comparison operators with each other
if cx.typeck_results().expr_ty(lhs) != cx.typeck_results().expr_ty(rhs) {
return None;
}
// comparisons between fix sized types and target sized types are considered unanalyzable
if is_cast_between_fixed_and_target(cx, lhs) || is_cast_between_fixed_and_target(cx, rhs) {
return None;
}
let (rel, normalized_lhs, normalized_rhs) = normalize_comparison(op, lhs, rhs)?;
let lx = detect_extreme_expr(cx, normalized_lhs);
let rx = detect_extreme_expr(cx, normalized_rhs);
Some(match rel {
Rel::Lt => {
match (lx, rx) {
(Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, AlwaysFalse), // max < x
(_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, AlwaysFalse), // x < min
_ => return None,
}
},
Rel::Le => {
match (lx, rx) {
(Some(l @ ExtremeExpr { which: Minimum, .. }), _) => (l, AlwaysTrue), // min <= x
(Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, InequalityImpossible), // max <= x
(_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, InequalityImpossible), // x <= min
(_, Some(r @ ExtremeExpr { which: Maximum, .. })) => (r, AlwaysTrue), // x <= max
_ => return None,
}
},
Rel::Ne | Rel::Eq => return None,
})
}
fn detect_extreme_expr<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<ExtremeExpr<'tcx>> {
use crate::types::ExtremeType::{Maximum, Minimum};
let ty = cx.typeck_results().expr_ty(expr);
let cv = constant(cx, cx.typeck_results(), expr)?.0;
let which = match (ty.kind(), cv) {
(&ty::Bool, Constant::Bool(false)) | (&ty::Uint(_), Constant::Int(0)) => Minimum,
(&ty::Int(ity), Constant::Int(i)) if i == unsext(cx.tcx, i128::MIN >> (128 - int_bits(cx.tcx, ity)), ity) => {
Minimum
},
(&ty::Bool, Constant::Bool(true)) => Maximum,
(&ty::Int(ity), Constant::Int(i)) if i == unsext(cx.tcx, i128::MAX >> (128 - int_bits(cx.tcx, ity)), ity) => {
Maximum
},
(&ty::Uint(uty), Constant::Int(i)) if clip(cx.tcx, u128::MAX, uty) == i => Maximum,
_ => return None,
};
Some(ExtremeExpr { which, expr })
}
impl<'tcx> LateLintPass<'tcx> for AbsurdExtremeComparisons {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
use crate::types::AbsurdComparisonResult::{AlwaysFalse, AlwaysTrue, InequalityImpossible};
use crate::types::ExtremeType::{Maximum, Minimum};
if let ExprKind::Binary(ref cmp, ref lhs, ref rhs) = expr.kind {
if let Some((culprit, result)) = detect_absurd_comparison(cx, cmp.node, lhs, rhs) {
if !expr.span.from_expansion() {
let msg = "this comparison involving the minimum or maximum element for this \
type contains a case that is always true or always false";
let conclusion = match result {
AlwaysFalse => "this comparison is always false".to_owned(),
AlwaysTrue => "this comparison is always true".to_owned(),
InequalityImpossible => format!(
"the case where the two sides are not equal never occurs, consider using `{} == {}` \
instead",
snippet(cx, lhs.span, "lhs"),
snippet(cx, rhs.span, "rhs")
),
};
let help = format!(
"because `{}` is the {} value for this type, {}",
snippet(cx, culprit.expr.span, "x"),
match culprit.which {
Minimum => "minimum",
Maximum => "maximum",
},
conclusion
);
span_lint_and_help(cx, ABSURD_EXTREME_COMPARISONS, expr.span, msg, None, &help);
}
}
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for comparisons where the relation is always either
/// true or false, but where one side has been upcast so that the comparison is
/// necessary. Only integer types are checked.
///
/// **Why is this bad?** An expression like `let x : u8 = ...; (x as u32) > 300`
/// will mistakenly imply that it is possible for `x` to be outside the range of
/// `u8`.
///
/// **Known problems:**
/// https://github.com/rust-lang/rust-clippy/issues/886
///
/// **Example:**
/// ```rust
/// let x: u8 = 1;
/// (x as u32) > 300;
/// ```
pub INVALID_UPCAST_COMPARISONS,
pedantic,
"a comparison involving an upcast which is always true or false"
}
declare_lint_pass!(InvalidUpcastComparisons => [INVALID_UPCAST_COMPARISONS]);
#[derive(Copy, Clone, Debug, Eq)]
enum FullInt {
S(i128),
U(u128),
}
impl FullInt {
#[allow(clippy::cast_sign_loss)]
#[must_use]
fn cmp_s_u(s: i128, u: u128) -> Ordering {
if s < 0 {
Ordering::Less
} else if u > (i128::MAX as u128) {
Ordering::Greater
} else {
(s as u128).cmp(&u)
}
}
}
impl PartialEq for FullInt {
#[must_use]
fn eq(&self, other: &Self) -> bool {
self.partial_cmp(other).expect("`partial_cmp` only returns `Some(_)`") == Ordering::Equal
}
}
impl PartialOrd for FullInt {
#[must_use]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(match (self, other) {
(&Self::S(s), &Self::S(o)) => s.cmp(&o),
(&Self::U(s), &Self::U(o)) => s.cmp(&o),
(&Self::S(s), &Self::U(o)) => Self::cmp_s_u(s, o),
(&Self::U(s), &Self::S(o)) => Self::cmp_s_u(o, s).reverse(),
})
}
}
impl Ord for FullInt {
#[must_use]
fn cmp(&self, other: &Self) -> Ordering {
self.partial_cmp(other)
.expect("`partial_cmp` for FullInt can never return `None`")
}
}
fn numeric_cast_precast_bounds<'a>(cx: &LateContext<'_>, expr: &'a Expr<'_>) -> Option<(FullInt, FullInt)> {
if let ExprKind::Cast(ref cast_exp, _) = expr.kind {
let pre_cast_ty = cx.typeck_results().expr_ty(cast_exp);
let cast_ty = cx.typeck_results().expr_ty(expr);
// if it's a cast from i32 to u32 wrapping will invalidate all these checks
if cx.layout_of(pre_cast_ty).ok().map(|l| l.size) == cx.layout_of(cast_ty).ok().map(|l| l.size) {
return None;
}
match pre_cast_ty.kind() {
ty::Int(int_ty) => Some(match int_ty {
IntTy::I8 => (FullInt::S(i128::from(i8::MIN)), FullInt::S(i128::from(i8::MAX))),
IntTy::I16 => (FullInt::S(i128::from(i16::MIN)), FullInt::S(i128::from(i16::MAX))),
IntTy::I32 => (FullInt::S(i128::from(i32::MIN)), FullInt::S(i128::from(i32::MAX))),
IntTy::I64 => (FullInt::S(i128::from(i64::MIN)), FullInt::S(i128::from(i64::MAX))),
IntTy::I128 => (FullInt::S(i128::MIN), FullInt::S(i128::MAX)),
IntTy::Isize => (FullInt::S(isize::MIN as i128), FullInt::S(isize::MAX as i128)),
}),
ty::Uint(uint_ty) => Some(match uint_ty {
UintTy::U8 => (FullInt::U(u128::from(u8::MIN)), FullInt::U(u128::from(u8::MAX))),
UintTy::U16 => (FullInt::U(u128::from(u16::MIN)), FullInt::U(u128::from(u16::MAX))),
UintTy::U32 => (FullInt::U(u128::from(u32::MIN)), FullInt::U(u128::from(u32::MAX))),
UintTy::U64 => (FullInt::U(u128::from(u64::MIN)), FullInt::U(u128::from(u64::MAX))),
UintTy::U128 => (FullInt::U(u128::MIN), FullInt::U(u128::MAX)),
UintTy::Usize => (FullInt::U(usize::MIN as u128), FullInt::U(usize::MAX as u128)),
}),
_ => None,
}
} else {
None
}
}
fn node_as_const_fullint<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<FullInt> {
let val = constant(cx, cx.typeck_results(), expr)?.0;
if let Constant::Int(const_int) = val {
match *cx.typeck_results().expr_ty(expr).kind() {
ty::Int(ity) => Some(FullInt::S(sext(cx.tcx, const_int, ity))),
ty::Uint(_) => Some(FullInt::U(const_int)),
_ => None,
}
} else {
None
}
}
fn err_upcast_comparison(cx: &LateContext<'_>, span: Span, expr: &Expr<'_>, always: bool) {
if let ExprKind::Cast(ref cast_val, _) = expr.kind {
span_lint(
cx,
INVALID_UPCAST_COMPARISONS,
span,
&format!(
"because of the numeric bounds on `{}` prior to casting, this expression is always {}",
snippet(cx, cast_val.span, "the expression"),
if always { "true" } else { "false" },
),
);
}
}
fn upcast_comparison_bounds_err<'tcx>(
cx: &LateContext<'tcx>,
span: Span,
rel: comparisons::Rel,
lhs_bounds: Option<(FullInt, FullInt)>,
lhs: &'tcx Expr<'_>,
rhs: &'tcx Expr<'_>,
invert: bool,
) {
use crate::utils::comparisons::Rel;
if let Some((lb, ub)) = lhs_bounds {
if let Some(norm_rhs_val) = node_as_const_fullint(cx, rhs) {
if rel == Rel::Eq || rel == Rel::Ne {
if norm_rhs_val < lb || norm_rhs_val > ub {
err_upcast_comparison(cx, span, lhs, rel == Rel::Ne);
}
} else if match rel {
Rel::Lt => {
if invert {
norm_rhs_val < lb
} else {
ub < norm_rhs_val
}
},
Rel::Le => {
if invert {
norm_rhs_val <= lb
} else {
ub <= norm_rhs_val
}
},
Rel::Eq | Rel::Ne => unreachable!(),
} {
err_upcast_comparison(cx, span, lhs, true)
} else if match rel {
Rel::Lt => {
if invert {
norm_rhs_val >= ub
} else {
lb >= norm_rhs_val
}
},
Rel::Le => {
if invert {
norm_rhs_val > ub
} else {
lb > norm_rhs_val
}
},
Rel::Eq | Rel::Ne => unreachable!(),
} {
err_upcast_comparison(cx, span, lhs, false)
}
}
}
}
impl<'tcx> LateLintPass<'tcx> for InvalidUpcastComparisons {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Binary(ref cmp, ref lhs, ref rhs) = expr.kind {
let normalized = comparisons::normalize_comparison(cmp.node, lhs, rhs);
let (rel, normalized_lhs, normalized_rhs) = if let Some(val) = normalized {
val
} else {
return;
};
let lhs_bounds = numeric_cast_precast_bounds(cx, normalized_lhs);
let rhs_bounds = numeric_cast_precast_bounds(cx, normalized_rhs);
upcast_comparison_bounds_err(cx, expr.span, rel, lhs_bounds, normalized_lhs, normalized_rhs, false);
upcast_comparison_bounds_err(cx, expr.span, rel, rhs_bounds, normalized_rhs, normalized_lhs, true);
}
}
}
declare_clippy_lint! {
/// **What it does:** Checks for public `impl` or `fn` missing generalization
/// over different hashers and implicitly defaulting to the default hashing
/// algorithm (`SipHash`).
///
/// **Why is this bad?** `HashMap` or `HashSet` with custom hashers cannot be
/// used with them.
///
/// **Known problems:** Suggestions for replacing constructors can contain
/// false-positives. Also applying suggestions can require modification of other
/// pieces of code, possibly including external crates.
///
/// **Example:**
/// ```rust
/// # use std::collections::HashMap;
/// # use std::hash::{Hash, BuildHasher};
/// # trait Serialize {};
/// impl<K: Hash + Eq, V> Serialize for HashMap<K, V> { }
///
/// pub fn foo(map: &mut HashMap<i32, i32>) { }
/// ```
/// could be rewritten as
/// ```rust
/// # use std::collections::HashMap;
/// # use std::hash::{Hash, BuildHasher};
/// # trait Serialize {};
/// impl<K: Hash + Eq, V, S: BuildHasher> Serialize for HashMap<K, V, S> { }
///
/// pub fn foo<S: BuildHasher>(map: &mut HashMap<i32, i32, S>) { }
/// ```
pub IMPLICIT_HASHER,
pedantic,
"missing generalization over different hashers"
}
declare_lint_pass!(ImplicitHasher => [IMPLICIT_HASHER]);
impl<'tcx> LateLintPass<'tcx> for ImplicitHasher {
#[allow(clippy::cast_possible_truncation, clippy::too_many_lines)]
fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) {
use rustc_span::BytePos;
fn suggestion<'tcx>(
cx: &LateContext<'tcx>,
diag: &mut DiagnosticBuilder<'_>,
generics_span: Span,
generics_suggestion_span: Span,
target: &ImplicitHasherType<'_>,
vis: ImplicitHasherConstructorVisitor<'_, '_, '_>,
) {
let generics_snip = snippet(cx, generics_span, "");
// trim `<` `>`
let generics_snip = if generics_snip.is_empty() {
""
} else {
&generics_snip[1..generics_snip.len() - 1]
};
multispan_sugg(
diag,
"consider adding a type parameter",
vec![
(
generics_suggestion_span,
format!(
"<{}{}S: ::std::hash::BuildHasher{}>",
generics_snip,
if generics_snip.is_empty() { "" } else { ", " },
if vis.suggestions.is_empty() {
""
} else {
// request users to add `Default` bound so that generic constructors can be used
" + Default"
},
),
),
(
target.span(),
format!("{}<{}, S>", target.type_name(), target.type_arguments(),),
),
],
);
if !vis.suggestions.is_empty() {
multispan_sugg(diag, "...and use generic constructor", vis.suggestions);
}
}
if !cx.access_levels.is_exported(item.hir_id()) {
return;
}
match item.kind {
ItemKind::Impl(ref impl_) => {
let mut vis = ImplicitHasherTypeVisitor::new(cx);
vis.visit_ty(impl_.self_ty);
for target in &vis.found {
if differing_macro_contexts(item.span, target.span()) {
return;
}
let generics_suggestion_span = impl_.generics.span.substitute_dummy({
let pos = snippet_opt(cx, item.span.until(target.span()))
.and_then(|snip| Some(item.span.lo() + BytePos(snip.find("impl")? as u32 + 4)));
if let Some(pos) = pos {
Span::new(pos, pos, item.span.data().ctxt)
} else {
return;
}
});
let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
for item in impl_.items.iter().map(|item| cx.tcx.hir().impl_item(item.id)) {
ctr_vis.visit_impl_item(item);
}
span_lint_and_then(
cx,
IMPLICIT_HASHER,
target.span(),
&format!(
"impl for `{}` should be generalized over different hashers",
target.type_name()
),
move |diag| {
suggestion(cx, diag, impl_.generics.span, generics_suggestion_span, target, ctr_vis);
},
);
}
},
ItemKind::Fn(ref sig, ref generics, body_id) => {
let body = cx.tcx.hir().body(body_id);
for ty in sig.decl.inputs {
let mut vis = ImplicitHasherTypeVisitor::new(cx);
vis.visit_ty(ty);
for target in &vis.found {
if in_external_macro(cx.sess(), generics.span) {
continue;
}
let generics_suggestion_span = generics.span.substitute_dummy({
let pos = snippet_opt(cx, item.span.until(body.params[0].pat.span))
.and_then(|snip| {
let i = snip.find("fn")?;
Some(item.span.lo() + BytePos((i + (&snip[i..]).find('(')?) as u32))
})
.expect("failed to create span for type parameters");
Span::new(pos, pos, item.span.data().ctxt)
});
let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
ctr_vis.visit_body(body);
span_lint_and_then(
cx,
IMPLICIT_HASHER,
target.span(),
&format!(
"parameter of type `{}` should be generalized over different hashers",
target.type_name()
),
move |diag| {
suggestion(cx, diag, generics.span, generics_suggestion_span, target, ctr_vis);
},
);
}
}
},
_ => {},
}
}
}
enum ImplicitHasherType<'tcx> {
HashMap(Span, Ty<'tcx>, Cow<'static, str>, Cow<'static, str>),
HashSet(Span, Ty<'tcx>, Cow<'static, str>),
}
impl<'tcx> ImplicitHasherType<'tcx> {
/// Checks that `ty` is a target type without a `BuildHasher`.
fn new(cx: &LateContext<'tcx>, hir_ty: &hir::Ty<'_>) -> Option<Self> {
if let TyKind::Path(QPath::Resolved(None, ref path)) = hir_ty.kind {
let params: Vec<_> = path
.segments
.last()
.as_ref()?
.args
.as_ref()?
.args
.iter()
.filter_map(|arg| match arg {
GenericArg::Type(ty) => Some(ty),
_ => None,
})
.collect();
let params_len = params.len();
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
if is_type_diagnostic_item(cx, ty, sym!(hashmap_type)) && params_len == 2 {
Some(ImplicitHasherType::HashMap(
hir_ty.span,
ty,
snippet(cx, params[0].span, "K"),
snippet(cx, params[1].span, "V"),
))
} else if is_type_diagnostic_item(cx, ty, sym!(hashset_type)) && params_len == 1 {
Some(ImplicitHasherType::HashSet(
hir_ty.span,
ty,
snippet(cx, params[0].span, "T"),
))
} else {
None
}
} else {
None
}
}
fn type_name(&self) -> &'static str {
match *self {
ImplicitHasherType::HashMap(..) => "HashMap",
ImplicitHasherType::HashSet(..) => "HashSet",
}
}
fn type_arguments(&self) -> String {
match *self {
ImplicitHasherType::HashMap(.., ref k, ref v) => format!("{}, {}", k, v),
ImplicitHasherType::HashSet(.., ref t) => format!("{}", t),
}
}
fn ty(&self) -> Ty<'tcx> {
match *self {
ImplicitHasherType::HashMap(_, ty, ..) | ImplicitHasherType::HashSet(_, ty, ..) => ty,
}
}
fn span(&self) -> Span {
match *self {
ImplicitHasherType::HashMap(span, ..) | ImplicitHasherType::HashSet(span, ..) => span,
}
}
}
struct ImplicitHasherTypeVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
found: Vec<ImplicitHasherType<'tcx>>,
}
impl<'a, 'tcx> ImplicitHasherTypeVisitor<'a, 'tcx> {
fn new(cx: &'a LateContext<'tcx>) -> Self {
Self { cx, found: vec![] }
}
}
impl<'a, 'tcx> Visitor<'tcx> for ImplicitHasherTypeVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn visit_ty(&mut self, t: &'tcx hir::Ty<'_>) {
if let Some(target) = ImplicitHasherType::new(self.cx, t) {
self.found.push(target);
}
walk_ty(self, t);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
/// Looks for default-hasher-dependent constructors like `HashMap::new`.
struct ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
cx: &'a LateContext<'tcx>,
maybe_typeck_results: Option<&'tcx TypeckResults<'tcx>>,
target: &'b ImplicitHasherType<'tcx>,
suggestions: BTreeMap<Span, String>,
}
impl<'a, 'b, 'tcx> ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
fn new(cx: &'a LateContext<'tcx>, target: &'b ImplicitHasherType<'tcx>) -> Self {
Self {
cx,
maybe_typeck_results: cx.maybe_typeck_results(),
target,
suggestions: BTreeMap::new(),
}
}
}
impl<'a, 'b, 'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
type Map = Map<'tcx>;
fn visit_body(&mut self, body: &'tcx Body<'_>) {
let old_maybe_typeck_results = self.maybe_typeck_results.replace(self.cx.tcx.typeck_body(body.id()));
walk_body(self, body);
self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
if_chain! {
if let ExprKind::Call(ref fun, ref args) = e.kind;
if let ExprKind::Path(QPath::TypeRelative(ref ty, ref method)) = fun.kind;
if let TyKind::Path(QPath::Resolved(None, ty_path)) = ty.kind;
then {
if !TyS::same_type(self.target.ty(), self.maybe_typeck_results.unwrap().expr_ty(e)) {
return;
}
if match_path(ty_path, &paths::HASHMAP) {
if method.ident.name == sym::new {
self.suggestions
.insert(e.span, "HashMap::default()".to_string());
} else if method.ident.name == sym!(with_capacity) {
self.suggestions.insert(
e.span,
format!(
"HashMap::with_capacity_and_hasher({}, Default::default())",
snippet(self.cx, args[0].span, "capacity"),
),
);
}
} else if match_path(ty_path, &paths::HASHSET) {
if method.ident.name == sym::new {
self.suggestions
.insert(e.span, "HashSet::default()".to_string());
} else if method.ident.name == sym!(with_capacity) {
self.suggestions.insert(
e.span,
format!(
"HashSet::with_capacity_and_hasher({}, Default::default())",
snippet(self.cx, args[0].span, "capacity"),
),
);
}
}
}
}
walk_expr(self, e);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
}
}
declare_clippy_lint! {
/// **What it does:** Checks for casts of `&T` to `&mut T` anywhere in the code.
///
/// **Why is this bad?** It’s basically guaranteed to be undefined behaviour.
/// `UnsafeCell` is the only way to obtain aliasable data that is considered
/// mutable.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust,ignore
/// fn x(r: &i32) {
/// unsafe {
/// *(r as *const _ as *mut _) += 1;
/// }
/// }
/// ```
///
/// Instead consider using interior mutability types.
///
/// ```rust
/// use std::cell::UnsafeCell;
///
/// fn x(r: &UnsafeCell<i32>) {
/// unsafe {
/// *r.get() += 1;
/// }
/// }
/// ```
pub CAST_REF_TO_MUT,
correctness,
"a cast of reference to a mutable pointer"
}
declare_lint_pass!(RefToMut => [CAST_REF_TO_MUT]);
impl<'tcx> LateLintPass<'tcx> for RefToMut {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if let ExprKind::Unary(UnOp::Deref, e) = &expr.kind;
if let ExprKind::Cast(e, t) = &e.kind;
if let TyKind::Ptr(MutTy { mutbl: Mutability::Mut, .. }) = t.kind;
if let ExprKind::Cast(e, t) = &e.kind;
if let TyKind::Ptr(MutTy { mutbl: Mutability::Not, .. }) = t.kind;
if let ty::Ref(..) = cx.typeck_results().node_type(e.hir_id).kind();
then {
span_lint(
cx,
CAST_REF_TO_MUT,
expr.span,
"casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`",
);
}
}
}
}
const PTR_AS_PTR_MSRV: RustcVersion = RustcVersion::new(1, 38, 0);
declare_clippy_lint! {
/// **What it does:**
/// Checks for `as` casts between raw pointers without changing its mutability,
/// namely `*const T` to `*const U` and `*mut T` to `*mut U`.
///
/// **Why is this bad?**
/// Though `as` casts between raw pointers is not terrible, `pointer::cast` is safer because
/// it cannot accidentally change the pointer's mutability nor cast the pointer to other types like `usize`.
///
/// **Known problems:** None.
///
/// **Example:**
///
/// ```rust
/// let ptr: *const u32 = &42_u32;
/// let mut_ptr: *mut u32 = &mut 42_u32;
/// let _ = ptr as *const i32;
/// let _ = mut_ptr as *mut i32;
/// ```
/// Use instead:
/// ```rust
/// let ptr: *const u32 = &42_u32;
/// let mut_ptr: *mut u32 = &mut 42_u32;
/// let _ = ptr.cast::<i32>();
/// let _ = mut_ptr.cast::<i32>();
/// ```
pub PTR_AS_PTR,
pedantic,
"casting using `as` from and to raw pointers that doesn't change its mutability, where `pointer::cast` could take the place of `as`"
}
pub struct PtrAsPtr {
msrv: Option<RustcVersion>,
}
impl PtrAsPtr {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self { msrv }
}
}
impl_lint_pass!(PtrAsPtr => [PTR_AS_PTR]);
impl<'tcx> LateLintPass<'tcx> for PtrAsPtr {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if !meets_msrv(self.msrv.as_ref(), &PTR_AS_PTR_MSRV) {
return;
}
if expr.span.from_expansion() {
return;
}
if_chain! {
if let ExprKind::Cast(cast_expr, cast_to_hir_ty) = expr.kind;
let (cast_from, cast_to) = (cx.typeck_results().expr_ty(cast_expr), cx.typeck_results().expr_ty(expr));
if let ty::RawPtr(TypeAndMut { mutbl: from_mutbl, .. }) = cast_from.kind();
if let ty::RawPtr(TypeAndMut { ty: to_pointee_ty, mutbl: to_mutbl }) = cast_to.kind();
if matches!((from_mutbl, to_mutbl),
(Mutability::Not, Mutability::Not) | (Mutability::Mut, Mutability::Mut));
// The `U` in `pointer::cast` have to be `Sized`
// as explained here: https://github.com/rust-lang/rust/issues/60602.
if to_pointee_ty.is_sized(cx.tcx.at(expr.span), cx.param_env);
then {
let mut applicability = Applicability::MachineApplicable;
let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut applicability);
let turbofish = match &cast_to_hir_ty.kind {
TyKind::Infer => Cow::Borrowed(""),
TyKind::Ptr(mut_ty) if matches!(mut_ty.ty.kind, TyKind::Infer) => Cow::Borrowed(""),
_ => Cow::Owned(format!("::<{}>", to_pointee_ty)),
};
span_lint_and_sugg(
cx,
PTR_AS_PTR,
expr.span,
"`as` casting between raw pointers without changing its mutability",
"try `pointer::cast`, a safer alternative",
format!("{}.cast{}()", cast_expr_sugg.maybe_par(), turbofish),
applicability,
);
}
}
}
extract_msrv_attr!(LateContext);
}
| let literal_kind_name = if cast_from.is_integral() { "integer" } else { "float" };
span_lint_and_sugg(
cx,
UNNECESSARY_CAST,
expr.span,
&format!("casting {} literal to `{}` is unnecessary", literal_kind_name, cast_to),
"try",
format!("{}_{}", literal_str.trim_end_matches('.'), cast_to),
Applicability::MachineApplicable,
);
}
|
with-deps.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate dep;
fn | () {
unsafe {
let v = dep::my_function();
if cfg!(foo) {
assert_eq!(v, 1);
} else if cfg!(bar) {
assert_eq!(v, 2);
} else {
panic!("unknown");
}
}
}
| main |
isy994.py | """
Support for ISY994 lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/isy994/
"""
import logging
from homeassistant.components.isy994 import (
HIDDEN_STRING, ISY, SENSOR_STRING, ISYDeviceABC)
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.const import STATE_OFF, STATE_ON
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the ISY994 platform."""
logger = logging.getLogger(__name__)
devs = []
if ISY is None or not ISY.connected:
logger.error('A connection has not been made to the ISY controller.')
return False
# Import dimmable nodes
for (path, node) in ISY.nodes:
if node.dimmable and SENSOR_STRING not in node.name:
if HIDDEN_STRING in path:
node.name += HIDDEN_STRING
devs.append(ISYLightDevice(node))
add_devices(devs)
class ISYLightDevice(ISYDeviceABC):
| """Representation of a ISY light."""
_domain = 'light'
_dtype = 'analog'
_attrs = {ATTR_BRIGHTNESS: 'value'}
_onattrs = [ATTR_BRIGHTNESS]
_states = [STATE_ON, STATE_OFF]
def _attr_filter(self, attr):
"""Filter brightness out of entity while off."""
if ATTR_BRIGHTNESS in attr and not self.is_on:
del attr[ATTR_BRIGHTNESS]
return attr |
|
python_tuto_functions.py | # creating a function
def my_func():
print("Hello")
my_func()
# pass an argument
def my_function(fname):
print(fname + "Refsnes")
my_function('Emil')
# pass two arguments
def | (fname, lname):
print(fname + " " + lname)
my_function('Emil', 'Refsnes')
"""
if you do not know how many arguments that will be passed
into your function,
add a * before the parameter name in the function defintion
It receives a tuple of arguments,
can access the items accordingly.
"""
def function(*kids):
print('The youngest child is ' + kids[2])
function('Emil', 'Tobias', 'Linus')
# send arguments with key = value
# order of the arguments does not matter
def my_function(child3, child2, child1):
print('The youngest child is ' + child3)
my_function(child1 = 'Emil', child2 = 'Tobias', child3 = 'Linus')
# keyword arguments are shortened into **kwargs
def my_function(**kid):
print('His name is ' + kid['lname'])
my_function(fname = 'Tobias', lname = 'Refsnes')
def my_function(mylist):
for i in mylist:
print(i)
fruits = ['apple', 'orange', 'strawberry']
my_function(fruits)
def my_function(x):
return 5 * x
print(my_function(3))
# pass statement
def my_function():
pass
# recursion
def tri_ recursion(k):
if(k > 0):
result = k + tri_recursion(k - 1)
print(result)
else:
result = 0
return result
print('\n\n Rescursion Example Results')
tri_recursion(6) | my_function |
messages.py | from twilio.rest import Client
account = "ACeec37d10088b2826aa81746d709c57e3"
token = "e84982e1d3e37a080664f749f1027c0a"
client = Client(account, token)
def send_sms(number, body):
| message = client.messages.create(to="+14383996776", from_="+1 778 654 6641",
body=body)
return message.sid |
|
cs.rs | #[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Key Register"]
pub cskey: crate::Reg<cskey::CSKEY_SPEC>,
#[doc = "0x04 - Control 0 Register"]
pub csctl0: crate::Reg<csctl0::CSCTL0_SPEC>,
#[doc = "0x08 - Control 1 Register"]
pub csctl1: crate::Reg<csctl1::CSCTL1_SPEC>,
#[doc = "0x0c - Control 2 Register"]
pub csctl2: crate::Reg<csctl2::CSCTL2_SPEC>,
#[doc = "0x10 - Control 3 Register"]
pub csctl3: crate::Reg<csctl3::CSCTL3_SPEC>,
_reserved5: [u8; 28usize],
#[doc = "0x30 - Clock Enable Register"]
pub csclken: crate::Reg<csclken::CSCLKEN_SPEC>,
#[doc = "0x34 - Status Register"]
pub csstat: crate::Reg<csstat::CSSTAT_SPEC>,
_reserved7: [u8; 8usize],
#[doc = "0x40 - Interrupt Enable Register"]
pub csie: crate::Reg<csie::CSIE_SPEC>,
_reserved8: [u8; 4usize],
#[doc = "0x48 - Interrupt Flag Register"]
pub csifg: crate::Reg<csifg::CSIFG_SPEC>,
_reserved9: [u8; 4usize],
#[doc = "0x50 - Clear Interrupt Flag Register"]
pub csclrifg: crate::Reg<csclrifg::CSCLRIFG_SPEC>,
_reserved10: [u8; 4usize],
#[doc = "0x58 - Set Interrupt Flag Register"]
pub cssetifg: crate::Reg<cssetifg::CSSETIFG_SPEC>,
_reserved11: [u8; 4usize],
#[doc = "0x60 - DCO External Resistor Cailbration 0 Register"]
pub csdcoercal0: crate::Reg<csdcoercal0::CSDCOERCAL0_SPEC>,
#[doc = "0x64 - DCO External Resistor Calibration 1 Register"]
pub csdcoercal1: crate::Reg<csdcoercal1::CSDCOERCAL1_SPEC>,
}
#[doc = "CSKEY register accessor: an alias for `Reg<CSKEY_SPEC>`"]
pub type CSKEY = crate::Reg<cskey::CSKEY_SPEC>;
#[doc = "Key Register"]
pub mod cskey;
#[doc = "CSCTL0 register accessor: an alias for `Reg<CSCTL0_SPEC>`"]
pub type CSCTL0 = crate::Reg<csctl0::CSCTL0_SPEC>;
#[doc = "Control 0 Register"]
pub mod csctl0;
#[doc = "CSCTL1 register accessor: an alias for `Reg<CSCTL1_SPEC>`"]
pub type CSCTL1 = crate::Reg<csctl1::CSCTL1_SPEC>;
#[doc = "Control 1 Register"]
pub mod csctl1;
#[doc = "CSCTL2 register accessor: an alias for `Reg<CSCTL2_SPEC>`"]
pub type CSCTL2 = crate::Reg<csctl2::CSCTL2_SPEC>;
#[doc = "Control 2 Register"]
pub mod csctl2;
#[doc = "CSCTL3 register accessor: an alias for `Reg<CSCTL3_SPEC>`"]
pub type CSCTL3 = crate::Reg<csctl3::CSCTL3_SPEC>;
#[doc = "Control 3 Register"]
pub mod csctl3;
#[doc = "CSCLKEN register accessor: an alias for `Reg<CSCLKEN_SPEC>`"]
pub type CSCLKEN = crate::Reg<csclken::CSCLKEN_SPEC>;
#[doc = "Clock Enable Register"]
pub mod csclken;
#[doc = "CSSTAT register accessor: an alias for `Reg<CSSTAT_SPEC>`"]
pub type CSSTAT = crate::Reg<csstat::CSSTAT_SPEC>;
#[doc = "Status Register"]
pub mod csstat;
#[doc = "CSIE register accessor: an alias for `Reg<CSIE_SPEC>`"]
pub type CSIE = crate::Reg<csie::CSIE_SPEC>;
#[doc = "Interrupt Enable Register"]
pub mod csie;
#[doc = "CSIFG register accessor: an alias for `Reg<CSIFG_SPEC>`"]
pub type CSIFG = crate::Reg<csifg::CSIFG_SPEC>;
#[doc = "Interrupt Flag Register"]
pub mod csifg;
#[doc = "CSCLRIFG register accessor: an alias for `Reg<CSCLRIFG_SPEC>`"]
pub type CSCLRIFG = crate::Reg<csclrifg::CSCLRIFG_SPEC>;
#[doc = "Clear Interrupt Flag Register"]
pub mod csclrifg;
#[doc = "CSSETIFG register accessor: an alias for `Reg<CSSETIFG_SPEC>`"]
pub type CSSETIFG = crate::Reg<cssetifg::CSSETIFG_SPEC>;
#[doc = "Set Interrupt Flag Register"] | #[doc = "CSDCOERCAL0 register accessor: an alias for `Reg<CSDCOERCAL0_SPEC>`"]
pub type CSDCOERCAL0 = crate::Reg<csdcoercal0::CSDCOERCAL0_SPEC>;
#[doc = "DCO External Resistor Cailbration 0 Register"]
pub mod csdcoercal0;
#[doc = "CSDCOERCAL1 register accessor: an alias for `Reg<CSDCOERCAL1_SPEC>`"]
pub type CSDCOERCAL1 = crate::Reg<csdcoercal1::CSDCOERCAL1_SPEC>;
#[doc = "DCO External Resistor Calibration 1 Register"]
pub mod csdcoercal1; | pub mod cssetifg; |
main.py | # Copyright 2021 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""User module under test"""
from typing import NamedTuple
from kfp import components
from kfp.dsl import artifact
from kfp.dsl import ontology_artifacts
def test_func(
test_param: str,
test_artifact: components.InputArtifact('Dataset'),
test_output1: components.OutputArtifact('Model')
) -> NamedTuple('Outputs', [('test_output2', str)]):
assert test_param == 'hello from producer'
# In the associated test case, input artifact is produced by conventional
# KFP components, thus no concrete artifact type can be determined.
assert isinstance(test_artifact, artifact.Artifact)
assert isinstance(test_output1, ontology_artifacts.Model)
assert test_output1.uri
from collections import namedtuple
Outputs = namedtuple('Outputs', 'test_output2')
return Outputs('bye world')
def test_func2(
test_param: str,
test_artifact: components.InputArtifact('Dataset'),
test_output1: components.OutputArtifact('Model')
) -> NamedTuple('Outputs', [('test_output2', str)]):
assert test_param == 'hello from producer'
# In the associated test case, input artifact is produced by a new-styled
# KFP components with metadata, thus it's expected to be deserialized to
# Dataset object.
assert isinstance(test_artifact, ontology_artifacts.Dataset)
assert isinstance(test_output1, ontology_artifacts.Model)
assert test_output1.uri | Outputs = namedtuple('Outputs', 'test_output2')
return Outputs('bye world') | from collections import namedtuple
|
test_archives.py | # emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*-
# ex: set sts=4 ts=4 sw=4 noet:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Tests for customremotes archives providing dl+archive URLs handling"""
from unittest.mock import patch
import os
import os.path as op
import sys
import re
import logging
import glob
from time import sleep
from ..archives import (
ArchiveAnnexCustomRemote,
link_file_load,
)
from ..base import AnnexExchangeProtocol
from ...support.annexrepo import AnnexRepo
from ...consts import ARCHIVES_SPECIAL_REMOTE
from .test_base import (
BASE_INTERACTION_SCENARIOS,
check_interaction_scenario,
)
from ...tests.utils import (
abspath,
assert_equal,
assert_false,
assert_is_instance,
assert_not_in,
assert_true,
chpwd,
eq_,
get_most_obscure_supported_name,
in_,
known_failure_githubci_win,
ok_,
ok_file_has_content,
serve_path_via_http,
swallow_logs,
swallow_outputs,
with_tempfile,
with_tree,
)
from ...cmd import Runner, GitRunner
from ...utils import (
_path_,
on_linux,
on_osx,
unlink,
)
from . import _get_custom_runner
from ...tests.test_archives import (
fn_archive_obscure,
fn_archive_obscure_ext,
fn_in_archive_obscure,
)
#import line_profiler
#prof = line_profiler.LineProfiler()
# TODO: with_tree ATM for archives creates this nested top directory
# matching archive name, so it will be a/d/test.dat ... we don't want that probably
@known_failure_githubci_win
@with_tree(
tree=(('a.tar.gz', {'d': {fn_in_archive_obscure: '123'}}),
('simple.txt', '123'),
(fn_archive_obscure_ext, (('d', ((fn_in_archive_obscure, '123'),)),)),
(fn_archive_obscure, '123')))
@with_tempfile()
def test_basic_scenario(d, d2):
fn_archive, fn_extracted = fn_archive_obscure_ext, fn_archive_obscure
annex = AnnexRepo(d, runner=_get_custom_runner(d))
annex.init_remote(
ARCHIVES_SPECIAL_REMOTE,
['encryption=none', 'type=external', 'externaltype=%s' % ARCHIVES_SPECIAL_REMOTE,
'autoenable=true'
])
assert annex.is_special_annex_remote(ARCHIVES_SPECIAL_REMOTE)
# We want two maximally obscure names, which are also different
assert(fn_extracted != fn_in_archive_obscure)
annex.add(fn_archive)
annex.commit(msg="Added tarball")
annex.add(fn_extracted)
annex.commit(msg="Added the load file")
# Operations with archive remote URL
annexcr = ArchiveAnnexCustomRemote(path=d)
# few quick tests for get_file_url
eq_(annexcr.get_file_url(archive_key="xyz", file="a.dat"), "dl+archive:xyz#path=a.dat")
eq_(annexcr.get_file_url(archive_key="xyz", file="a.dat", size=999), "dl+archive:xyz#path=a.dat&size=999")
# see https://github.com/datalad/datalad/issues/441#issuecomment-223376906
# old style
eq_(annexcr._parse_url("dl+archive:xyz/a.dat#size=999"), ("xyz", "a.dat", {'size': 999}))
eq_(annexcr._parse_url("dl+archive:xyz/a.dat"), ("xyz", "a.dat", {})) # old format without size
# new style
eq_(annexcr._parse_url("dl+archive:xyz#path=a.dat&size=999"), ("xyz", "a.dat", {'size': 999}))
eq_(annexcr._parse_url("dl+archive:xyz#path=a.dat"), ("xyz", "a.dat", {})) # old format without size
file_url = annexcr.get_file_url(
archive_file=fn_archive,
file=fn_archive.replace('.tar.gz', '') + '/d/' + fn_in_archive_obscure)
annex.add_url_to_file(fn_extracted, file_url, ['--relaxed'])
annex.drop(fn_extracted)
list_of_remotes = annex.whereis(fn_extracted, output='descriptions')
in_('[%s]' % ARCHIVES_SPECIAL_REMOTE, list_of_remotes)
assert_false(annex.file_has_content(fn_extracted))
annex.get(fn_extracted)
assert_true(annex.file_has_content(fn_extracted))
annex.rm_url(fn_extracted, file_url)
assert_false(annex.drop(fn_extracted)['success'])
annex.add_url_to_file(fn_extracted, file_url)
annex.drop(fn_extracted)
annex.get(fn_extracted)
annex.drop(fn_extracted) # so we don't get from this one next
# Let's create a clone and verify chain of getting file through the tarball
cloned_annex = AnnexRepo.clone(d, d2, runner=_get_custom_runner(d2))
# we still need to enable manually atm that special remote for archives
# cloned_annex.enable_remote('annexed-archives')
assert_false(cloned_annex.file_has_content(fn_archive))
assert_false(cloned_annex.file_has_content(fn_extracted))
cloned_annex.get(fn_extracted)
assert_true(cloned_annex.file_has_content(fn_extracted))
# as a result it would also fetch tarball
assert_true(cloned_annex.file_has_content(fn_archive))
# Check if protocol was collected
if os.environ.get('DATALAD_TESTS_PROTOCOLREMOTE'):
assert_is_instance(annex.cmd_call_wrapper.protocol, AnnexExchangeProtocol)
protocol_file = _path_(annex.path,
'.git/bin/git-annex-remote-datalad-archive')
ok_file_has_content(protocol_file, "VERSION 1", re_=True, match=False)
ok_file_has_content(protocol_file, "GETAVAILABILITY", re_=True, match=False)
ok_file_has_content(protocol_file, "#!/bin/bash", re_=True, match=False)
else:
assert_false(isinstance(annex.cmd_call_wrapper.protocol, AnnexExchangeProtocol))
# verify that we can drop if original archive gets dropped but available online:
# -- done as part of the test_add_archive_content.py
# verify that we can't drop a file if archive key was dropped and online archive was removed or changed size! ;)
@known_failure_githubci_win
@with_tree(
tree={'a.tar.gz': {'d': {fn_in_archive_obscure: '123'}}}
)
def test_annex_get_from_subdir(topdir):
from datalad.api import add_archive_content
annex = AnnexRepo(topdir, init=True)
annex.add('a.tar.gz')
annex.commit()
add_archive_content('a.tar.gz', annex=annex, delete=True)
fpath = op.join(topdir, 'a', 'd', fn_in_archive_obscure)
with chpwd(op.join(topdir, 'a', 'd')): | runner = Runner()
runner(['git', 'annex', 'drop', '--', fn_in_archive_obscure]) # run git annex drop
assert_false(annex.file_has_content(fpath)) # and verify if file deleted from directory
runner(['git', 'annex', 'get', '--', fn_in_archive_obscure]) # run git annex get
assert_true(annex.file_has_content(fpath)) # and verify if file got into directory
@known_failure_githubci_win
def test_get_git_environ_adjusted():
gitrunner = GitRunner()
env = {"GIT_DIR": "../../.git", "GIT_WORK_TREE": "../../", "TEST_VAR": "Exists"}
# test conversion of relevant env vars from relative_path to correct absolute_path
adj_env = gitrunner.get_git_environ_adjusted(env)
assert_equal(adj_env["GIT_DIR"], abspath(env["GIT_DIR"]))
assert_equal(adj_env["GIT_WORK_TREE"], abspath(env["GIT_WORK_TREE"]))
# test if other environment variables passed to function returned unaltered
assert_equal(adj_env["TEST_VAR"], env["TEST_VAR"])
# test import of sys_env if no environment passed to function
sys_env = gitrunner.get_git_environ_adjusted()
assert_equal(sys_env["PWD"], os.environ.get("PWD"))
def test_no_rdflib_loaded():
# rely on rdflib polluting stdout to see that it is not loaded whenever we load this remote
# since that adds 300ms delay for no immediate use
from ...cmd import Runner
runner = Runner()
with swallow_outputs() as cmo:
runner.run(
[sys.executable,
'-c',
'import datalad.customremotes.archives, sys; '
'print([k for k in sys.modules if k.startswith("rdflib")])'],
log_stdout=False,
log_stderr=False)
# print cmo.out
assert_not_in("rdflib", cmo.out)
assert_not_in("rdflib", cmo.err)
@with_tree(tree={'archive.tar.gz': {'f1.txt': 'content'}})
def test_interactions(tdir):
# Just a placeholder since constructor expects a repo
repo = AnnexRepo(tdir, create=True, init=True)
repo.add('archive.tar.gz')
repo.commit('added')
for scenario in BASE_INTERACTION_SCENARIOS + [
[
('GETCOST', 'COST %d' % ArchiveAnnexCustomRemote.COST),
],
[
# by default we do not require any fancy init
# no urls supported by default
('CLAIMURL http://example.com', 'CLAIMURL-FAILURE'),
# we know that is just a single option, url, is expected so full
# one would be passed
('CLAIMURL http://example.com roguearg', 'CLAIMURL-FAILURE'),
],
# basic interaction failing to fetch content from archive
[
('TRANSFER RETRIEVE somekey somefile', 'GETURLS somekey dl+archive:'),
('VALUE dl+archive://somekey2#path', None),
('VALUE dl+archive://somekey3#path', None),
('VALUE',
re.compile(
'TRANSFER-FAILURE RETRIEVE somekey Failed to fetch any '
'archive containing somekey. Tried: \[\]')
)
],
# # incorrect response received from annex -- something isn't right but ... later
# [
# ('TRANSFER RETRIEVE somekey somefile', 'GETURLS somekey dl+archive:'),
# # We reply with UNSUPPORTED-REQUEST in these cases
# ('GETCOST', 'UNSUPPORTED-REQUEST'),
# ],
]:
check_interaction_scenario(ArchiveAnnexCustomRemote, tdir, scenario)
@with_tree(tree=
{'1.tar.gz':
{
'bu.dat': '52055957098986598349795121365535' * 10000,
'bu3.dat': '8236397048205454767887168342849275422' * 10000
},
'2.tar.gz':
{
'bu2.dat': '17470674346319559612580175475351973007892815102' * 10000
},
}
)
@serve_path_via_http()
@with_tempfile
def check_observe_tqdm(topdir, topurl, outdir):
# just a helper to enable/use when want quickly to get some
# repository with archives and observe tqdm
from datalad.api import add_archive_content
from datalad.api import create
ds = create(outdir)
for f in '1.tar.gz', '2.tar.gz':
with chpwd(outdir):
ds.repo.add_url_to_file(f, topurl + f)
ds.save(f)
add_archive_content(f, delete=True, drop_after=True)
files = glob.glob(op.join(outdir, '*'))
ds.drop(files) # will not drop tarballs
ds.repo.drop([], options=['--all', '--fast'])
ds.get(files)
ds.repo.drop([], options=['--all', '--fast'])
# now loop so we could play with it outside
print(outdir)
# import pdb; pdb.set_trace()
while True:
sleep(0.1)
@known_failure_githubci_win
@with_tempfile
def test_link_file_load(tempfile):
tempfile2 = tempfile + '_'
with open(tempfile, 'w') as f:
f.write("LOAD")
link_file_load(tempfile, tempfile2) # this should work in general
ok_(os.path.exists(tempfile2))
with open(tempfile2, 'r') as f:
assert_equal(f.read(), "LOAD")
def inode(fname):
with open(fname) as fd:
return os.fstat(fd.fileno()).st_ino
def stats(fname, times=True):
"""Return stats on the file which should have been preserved"""
with open(fname) as fd:
st = os.fstat(fd.fileno())
stats = (st.st_mode, st.st_uid, st.st_gid, st.st_size)
if times:
return stats + (st.st_atime, st.st_mtime)
else:
return stats
# despite copystat mtime is not copied. TODO
# st.st_mtime)
if on_linux or on_osx:
# above call should result in the hardlink
assert_equal(inode(tempfile), inode(tempfile2))
assert_equal(stats(tempfile), stats(tempfile2))
# and if we mock absence of .link
def raise_AttributeError(*args):
raise AttributeError("TEST")
with patch('os.link', raise_AttributeError):
with swallow_logs(logging.WARNING) as cm:
link_file_load(tempfile, tempfile2) # should still work
ok_("failed (TEST), copying file" in cm.out)
# should be a copy (either originally for windows, or after mocked call)
ok_(inode(tempfile) != inode(tempfile2))
with open(tempfile2, 'r') as f:
assert_equal(f.read(), "LOAD")
assert_equal(stats(tempfile, times=False), stats(tempfile2, times=False))
unlink(tempfile2) # TODO: next two with_tempfile | |
tctip-1.0.2.min.js | !function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define("tctip",[],e):"object"==typeof exports?exports.tctip=e():t.tctip=e()}(this,function(){return function(t){function e(r){if(n[r])return n[r].exports;var i=n[r]={i:r,l:!1,exports:{}};return t[r].call(i.exports,i,i.exports,e),i.l=!0,i.exports}var n={};return e.m=t,e.c=n,e.i=function(t){return t},e.d=function(t,n,r){e.o(t,n)||Object.defineProperty(t,n,{configurable:!1,enumerable:!0,get:r})},e.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(n,"a",n),n},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p="",e(e.s=46)}([function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function o(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function A(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var a=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),c=n(2),u=r(c),s=n(1),l=function(t){function e(t,n){i(this,e);var r=o(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t));return r._parentDom=n,r}return A(e,t),a(e,[{key:"createDom",value:function(t,e){var n=this.parentDom?this.parentDom.dom:document.body;return this._dom=(0,s.createElement)(t,e),n.appendChild(this.dom)}},{key:"appendDom",value:function(t,e){var n=(0,s.createElement)(t,e);return this.dom.appendChild(n)}},{key:"dom",get:function(){return this._dom}},{key:"parentDom",get:function(){return this._parentDom}}]),e}(u.default);e.default=l,t.exports=e.default},function(t,e,n){"use strict";function r(t){document.body?t&&t():setTimeout(function(){r(t)},0)}function i(t,e){var n=document.createElement(e||"div");for(var r in t)if("style"!==r)n[r]=t[r];else for(var i in t[r])n.style[i]=t[r][i];return n}function o(t,e){return e=e||document,e.getElementsByClassName?e.getElementsByClassName(t):function(t,e){var n=[],r=e.getElementsByTagName("*"),i=new RegExp("(^|\\s)"+t+"(\\s|$)"),o=!0,A=!1,a=void 0;try{for(var c,u=r[Symbol.iterator]();!(o=(c=u.next()).done);o=!0){var s=c.value;i.test(s.className)&&n.push(s)}}catch(t){A=!0,a=t}finally{try{!o&&u.return&&u.return()}finally{if(A)throw a}}return n}(t,e)}function A(){var t=window.navigator.userAgent.toLowerCase(),e=["msie","firefox","chrome","opera","safari"],n=!0,r=!1,i=void 0;try{for(var o,A=e[Symbol.iterator]();!(n=(o=A.next()).done);n=!0){var a=o.value;if(t.indexOf(a)>=0)return a}}catch(t){r=!0,i=t}finally{try{!n&&A.return&&A.return()}finally{if(r)throw i}}return"other"}Object.defineProperty(e,"__esModule",{value:!0});var a="innerText";"firefox"===A()&&(e.textKey=a="textContent"),e.ready=r,e.createElement=i,e.getElementsByClassName=o,e.textKey=a},function(t,e,n){"use strict";function r(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}Object.defineProperty(e,"__esModule",{value:!0});var i=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),o=function(){function t(e){r(this,t),this.config=e}return i(t,[{key:"config",get:function(){return this._config},set:function(t){this._config=t}}]),t}();e.default=o,t.exports=e.default},function(t,e){t.exports={name:"tctip",version:"1.0.2",description:"在页面右侧生成打赏界面的开源js插件",main:"dist/tctip-1.0.1.min.js",scripts:{build:"webpack --env build",dev:"webpack --progress --colors --watch --env dev",test:"mocha --compilers js:babel-core/register --colors ./test/*.spec.js","test:watch":"mocha --compilers js:babel-core/register --colors -w ./test/*.spec.js"},devDependencies:{"babel-core":"~6.22.1","babel-eslint":"~7.1.1","babel-loader":"~6.2.10","babel-plugin-add-module-exports":"0.1.2","babel-preset-es2015":"6.22.0",chai:"3.4.1","css-loader":"^0.26.2",eslint:"^3.14.1","eslint-config-standard":"^6.2.1","eslint-loader":"^1.6.1","eslint-plugin-html":"^2.0.1","eslint-plugin-promise":"^3.4.0","eslint-plugin-standard":"^2.0.1",less:"^2.7.2","less-loader":"^2.2.3",mocha:"2.3.4","postcss-loader":"^1.3.3","style-loader":"^0.13.2","url-loader":"^0.5.8",webpack:"2.2.1",yargs:"6.6.0"},repository:{type:"git",url:"https://github.com/greedying/tctip.git"},keywords:["tctip","tip","打赏","插件library","javascript"],author:"greedying <[email protected]>",license:"MIT",bugs:{url:"https://github.com/greedying/tctip/issues"},homepage:"https://github.com/greedying/tctip",dependencies:{"qrcode-generator":"^1.1.0"}}},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}Object.defineProperty(e,"__esModule",{value:!0});var i=n(20),o=r(i),A=new o.default.EventEmitter;e.default=A,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function o(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function A(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var a=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),c=n(0),u=r(c),s=n(9),l=r(s),f=n(10),p=r(f),h=function(t){function e(t,n){i(this,e);var r=o(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._btnBox=void 0,r._mainBox=void 0,r.genDom(),r}return A(e,t),a(e,[{key:"genDom",value:function(){var t=this;this.createDom({className:"tctip",style:{top:this.config.top},onmouseover:function(){t.show()},onmouseout:function(){t.hide()}}),this._btnBox=new l.default(this.config,this),this._mainBox=new p.default(this.config,this)}},{key:"show",value:function(){this.dom.style.width="240px"}},{key:"hide",value:function(){this.dom.style.width="0px"}}]),e}(u.default);e.default=h,t.exports=e.default},function(t,e,n){"use strict";function r(t){void 0===t.stat&&(t.stat=c),t.top||(t.top=a),i(t),o(t)}function i(t){t.button||(t.button=s);var e=t.button;e.imageUrl||(e.id||(e.id=s.id),e.type&&(0,A.inArray)(e.type,u)||(e.type=s.type),e.imageUrl=l[e.type][e.id],delete e.type,delete e.id)}function o(t){if(!t.list)return void console.error("必须传入list参数");var e=[],n=!1,r=!0,i=!1,o=void 0;try{for(var a,c=t.list[Symbol.iterator]();!(r=(a=c.next()).done);r=!0){var u=a.value;if(!u.type){console.error("缺少type,相应配置文件为",u);break}if(!u.qrImg&&!u.qrContent){console.error("缺少qrImg或者qrContent参数,相应配置文件为",u);break}var s=u.type;if(f.hasOwnProperty(s)&&(0,A.mergeObject)(u,f[s],!0),n?u.active="":u.active&&(n=!0),u.index=e.length,e.push(u),e.length>=5)break}}catch(t){i=!0,o=t}finally{try{!r&&c.return&&c.return()}finally{if(i)throw o}}n||(e[0].active=!0),t.list=e}Object.defineProperty(e,"__esModule",{value:!0}),e.formatConfig=void 0;var A=n(16),a="10%",c=!0,u=["dashang","zanzhu"],s={id:1,type:"dashang"},l={zanzhu:{1:n(33),2:n(34),3:n(35),4:n(36),5:n(37),6:n(38),7:n(39),8:n(40),9:n(41)},dashang:{1:n(24),2:n(25),3:n(26),4:n(27),5:n(28),6:n(29),7:n(30),8:n(31),9:n(32)}},f={alipay:{icon:n(42),name:"支付宝",desc:"支付宝打赏"},wechat:{icon:n(45),name:"微信",desc:"微信打赏"},bitcoin:{icon:n(43),name:"比特币",desc:"比特币打赏"},tenpay:{icon:n(44),name:"财付通",desc:"财付通打赏"}};e.formatConfig=r},function(t,e,n){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){var t=o+"/version/"+i;document.body.appendChild((0,r.createElement)({src:t},"script"))};var r=n(1),i=n(3).version,o="http://stat.tctip.com/stat/index";t.exports=e.default},function(t,e,n){var r=n(18);"string"==typeof r&&(r=[[t.i,r,""]]);n(22)(r,{});r.locals&&(t.exports=r.locals)},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function o(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function A(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var a=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),c=n(0),u=r(c),s=function(t){function e(t,n){i(this,e);var r=o(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._button=void 0,r.genDom(),r}return A(e,t),a(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-btn-box",href:"javascript:;"},"a"),this._button=new l(this.config,this)}}]),e}(u.default),l=function(t){function e(t,n){i(this,e);var r=o(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return A(e,t),a(e,[{key:"genDom",value:function(){this.createDom({src:this.config.button.imageUrl},"img")}}]),e}(u.default);e.default=s,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function o(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function A(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var a=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),c=n(0),u=r(c),s=n(15),l=r(s),f=n(11),p=r(f),h=n(14),g=r(h),d=function(t){function e(t,n){i(this,e);var r=o(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._title=void 0,r._bodyBox=void 0,r._footer=void 0,r.genDom(),r}return A(e,t),a(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-main-box"}),this._title=new l.default({},this),this._bodyBox=new p.default({list:this.config.list},this),this._footer=new g.default({},this)}}]),e}(u.default);e.default=d,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function o(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function A(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var a=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),c=n(0),u=r(c),s=n(12),l=r(s),f=n(13),p=r(f),h=function(t){function e(t,n){i(this,e);var r=o(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._iconBox=void 0,r._QRBox=void 0,r._currentItem=void 0,r.genDom(),r}return A(e,t),a(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-body-box"}),this._iconBox=new l.default({list:this.config.list},this),this._QRDetail=new p.default(this.currentItem,this)}},{key:"currentItem",get:function(){if(!this._currentItem){var t=!0,e=!1,n=void 0;try{for(var r,i=this.config.list[Symbol.iterator]();!(t=(r=i.next()).done);t=!0){var o=r.value;o.active&&(this._currentItem=o)}}catch(t){e=!0,n=t}finally{try{!t&&i.return&&i.return()}finally{if(e)throw n}}}return this._currentItem},set:function(t){this._currentItem=t}}]),e}(u.default);e.default=h,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function A(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function a(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var c=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),u=n(0),s=r(u),l=n(1),f=n(4),p=r(f),h=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._iconList=void 0,r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-icon-box"}),this._iconList=new g({list:this.config.list},this)}}]),e}(s.default),g=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._iconDoms=[],r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:this.className},"ul");var t=!0,e=!1,n=void 0;try{for(var r,i=this.config.list[Symbol.iterator]();!(t=(r=i.next()).done);t=!0){var o=r.value;this._iconDoms.push(new d(o,this))}}catch(t){e=!0,n=t}finally{try{!t&&i.return&&i.return()}finally{if(e)throw n}}}},{key:"className",get:function(){return this.config.list.length<5?"not-full":""}}]),e}(s.default),d=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._link=void 0,r._emitter=p.default,r.genDom(),r.addEvent(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:this.className},"li"),this._link=new b(this.config,this)}},{key:"addEvent",value:function(){var t=this;this._emitter.on("changeIcon",function(e){t.active=t.config.index===e.index})}},{key:"className",get:function(){return this.active?"tctip-current":""},set:function(t){this.dom.className=t}},{key:"active",get:function(){return this.config.active||!1},set:function(t){this.config.active=t,this.className=t?"tctip-current":""}}]),e}(s.default),b=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._img=void 0,r._emitter=p.default,r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){var t=this;this.createDom(i({className:this.className,href:"javascript:",onmouseover:function(){t.mouseover()}},l.textKey,this.config.name),"a"),this._img=new y(this.config,this)}},{key:"mouseover",value:function(){this._emitter.emit("changeIcon",this.config)}},{key:"className",get:function(){return 4===this.config.index?"fifth":""}}]),e}(s.default),y=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({src:this.config.icon,alt:this.config.name},"img")}}]),e}(s.default);e.default=h,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function A(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function a(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e | prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var c=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),u=n(0),s=r(u),l=n(1),f=n(17),p=r(f),h=n(4),g=r(h),d=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._box=void 0,r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-qr-detail"}),this._box=new b(this.config,this)}}]),e}(s.default),b=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._qrTitle=void 0,r._qrCode=void 0,r._qrDesc=void 0,r._emitter=g.default,r.genDom(),r.addEvent(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-qr-box"}),this.genChildren()}},{key:"genChildren",value:function(){this._qrTitle=new y({},this),this._qrCode=new v(this.config,this),this._qrDesc=new m({desc:this.config.desc||""},this)}},{key:"addEvent",value:function(){var t=this;this._emitter.on("changeIcon",function(e){t.config=e,t.dom.innerHTML="",t.genChildren()})}},{key:"regenDom",value:function(t){this._config=t,this.dom.innerHTML="",this._box=new e(this.config,this)}}]),e}(s.default),y=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom(i({className:"tctip-qr-title"},l.textKey,"扫描二维码打赏"),"p")}}]),e}(s.default),v=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-qr-code"}),this.genQR()}},{key:"genQR",value:function(){this.config.qrImg?this.appendDom({src:this.config.qrImg},"img"):this.config.qrContent?this.dom.innerHTML=(0,p.default)(this.config.qrContent):console.error("没有可展示的二维码")}}]),e}(s.default),m=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom(i({className:"tctip-qr-desc"},l.textKey,this.config.desc),"p")}}]),e}(s.default);e.default=d,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function A(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function a(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var c=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),u=n(0),s=r(u),l=n(1),f=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r._link=void 0,r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom({className:"tctip-footer"},"p"),this._link=new p({},this)}}]),e}(s.default),p=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom(i({href:"https://github.com/greedying/tctip",target:"_blank"},l.textKey,"了解更多"),"a")}}]),e}(s.default);e.default=f,t.exports=e.default},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}function o(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function A(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function a(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var c=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),u=n(0),s=r(u),l=n(1),f=function(t){function e(t,n){o(this,e);var r=A(this,(e.__proto__||Object.getPrototypeOf(e)).call(this,t,n));return r.genDom(),r}return a(e,t),c(e,[{key:"genDom",value:function(){this.createDom(i({className:"tctip-title"},l.textKey,"喜欢请打赏"),"h1")}}]),e}(s.default);e.default=f,t.exports=e.default},function(t,e,n){"use strict";function r(t,e){if("string"==typeof t||"number"==typeof t){var n=!0,r=!1,i=void 0;try{for(var o,A=e[Symbol.iterator]();!(n=(o=A.next()).done);n=!0){if(t===o.value)return!0}}catch(t){r=!0,i=t}finally{try{!n&&A.return&&A.return()}finally{if(r)throw i}}}return!1}function i(t,e,n){for(var r in e)(t.hasOwnProperty(r)||n)&&(t[r]=e[r]);return t}Object.defineProperty(e,"__esModule",{value:!0}),e.inArray=r,e.mergeObject=i},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(t){var e=(0,o.default)(a,A);return e.addData(t),e.make(),e.createImgTag(u,c)};var i=n(21),o=r(i),A="L",a=4,c=0,u=5;t.exports=e.default},function(t,e,n){e=t.exports=n(19)(),e.push([t.i,".tctip{position:fixed;right:0;z-index:10000;padding-left:34px;width:0;overflow:hidden;box-sizing:content-box}.tctip li,.tctip ul{margin:0;padding:0}.tctip img{border:0;display:block}.tctip a{color:#000;text-decoration:none;outline:0 none}.tctip .tctip-btn-box{position:absolute;left:0;top:50%;margin-top:-46px;width:34px;height:93px}.tctip .tctip-main-box{width:240px;height:332px;font:12px/1.5 microsoft yahei,tahoma,arial,sans-serif;color:#000;background-color:#fff;border:1px solid #dbdbdb;border-right:none}.tctip .tctip-main-box .tctip-footer,.tctip .tctip-main-box .tctip-title{margin:0;height:26px;line-height:26px;background-color:#e7e7e7}.tctip .tctip-main-box .tctip-title{padding:0 0 0 6px;font-size:14px;font-weight:400;background-image:none}.tctip .tctip-main-box .tctip-footer{padding-right:6px;font-size:12px;text-align:right}.tctip .tctip-main-box .tctip-footer a:hover{text-decoration:underline}.tctip .tctip-main-box .tctip-body-box{font-size:0;background-color:#eee;width:240px}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box{display:inline-block;vertical-align:middle;width:90px;font-size:12px}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul{list-style:none}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul a.fifth{border-bottom:none}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul.not-full{border-top:1px solid #dfdfdf}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul li{display:block;height:56px;-webkit-transition:background-color .2s linear;transition:background-color .2s linear}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul li.tctip-current{border-right:none}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul li.tctip-current a,.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul li.tctip-current a:hover{background-color:#fff}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul li a{position:relative;display:block;padding-left:40px;height:55px;line-height:55px;font-size:12px;border-bottom:1px solid #ccc}.tctip .tctip-main-box .tctip-body-box .tctip-icon-box ul li a img{position:absolute;left:6px;top:13px;width:30px;height:30px}.tctip .tctip-main-box .tctip-body-box .tctip-qr-detail{display:inline-block;vertical-align:middle;width:150px;height:280px;font-size:12px;background-color:#fff}.tctip .tctip-main-box .tctip-body-box .tctip-qr-detail .tctip-qr-box{padding:14px 0 0 10px}.tctip .tctip-main-box .tctip-body-box .tctip-qr-detail .tctip-qr-box .tctip-qr-title{margin:0 0 20px;width:132px;height:30px;line-height:30px;font-size:12px;text-align:center}.tctip .tctip-main-box .tctip-body-box .tctip-qr-detail .tctip-qr-box .tctip-qr-code{margin:0 0 20px;padding:12px;width:106px;height:106px;background:url("+n(23)+") no-repeat;box-sizing:content-box}.tctip .tctip-main-box .tctip-body-box .tctip-qr-detail .tctip-qr-box .tctip-qr-code img{width:106px;height:106px}.tctip .tctip-main-box .tctip-body-box .tctip-qr-detail .tctip-qr-box .tctip-qr-desc{font-size:12px;word-break:break-all;text-align:center}",""])},function(t,e){t.exports=function(){var t=[];return t.toString=function(){for(var t=[],e=0;e<this.length;e++){var n=this[e];n[2]?t.push("@media "+n[2]+"{"+n[1]+"}"):t.push(n[1])}return t.join("")},t.i=function(e,n){"string"==typeof e&&(e=[[null,e,""]]);for(var r={},i=0;i<this.length;i++){var o=this[i][0];"number"==typeof o&&(r[o]=!0)}for(i=0;i<e.length;i++){var A=e[i];"number"==typeof A[0]&&r[A[0]]||(n&&!A[2]?A[2]=n:n&&(A[2]="("+A[2]+") and ("+n+")"),t.push(A))}},t}},function(t,e){function n(){this._events=this._events||{},this._maxListeners=this._maxListeners||void 0}function r(t){return"function"==typeof t}function i(t){return"number"==typeof t}function o(t){return"object"==typeof t&&null!==t}function A(t){return void 0===t}t.exports=n,n.EventEmitter=n,n.prototype._events=void 0,n.prototype._maxListeners=void 0,n.defaultMaxListeners=10,n.prototype.setMaxListeners=function(t){if(!i(t)||t<0||isNaN(t))throw TypeError("n must be a positive number");return this._maxListeners=t,this},n.prototype.emit=function(t){var e,n,i,a,c,u;if(this._events||(this._events={}),"error"===t&&(!this._events.error||o(this._events.error)&&!this._events.error.length)){if(e=arguments[1],e instanceof Error)throw e;var s=new Error('Uncaught, unspecified "error" event. ('+e+")");throw s.context=e,s}if(n=this._events[t],A(n))return!1;if(r(n))switch(arguments.length){case 1:n.call(this);break;case 2:n.call(this,arguments[1]);break;case 3:n.call(this,arguments[1],arguments[2]);break;default:a=Array.prototype.slice.call(arguments,1),n.apply(this,a)}else if(o(n))for(a=Array.prototype.slice.call(arguments,1),u=n.slice(),i=u.length,c=0;c<i;c++)u[c].apply(this,a);return!0},n.prototype.addListener=function(t,e){var i;if(!r(e))throw TypeError("listener must be a function");return this._events||(this._events={}),this._events.newListener&&this.emit("newListener",t,r(e.listener)?e.listener:e),this._events[t]?o(this._events[t])?this._events[t].push(e):this._events[t]=[this._events[t],e]:this._events[t]=e,o(this._events[t])&&!this._events[t].warned&&(i=A(this._maxListeners)?n.defaultMaxListeners:this._maxListeners,i&&i>0&&this._events[t].length>i&&(this._events[t].warned=!0,console.error("(node) warning: possible EventEmitter memory leak detected. %d listeners added. Use emitter.setMaxListeners() to increase limit.",this._events[t].length),"function"==typeof console.trace&&console.trace())),this},n.prototype.on=n.prototype.addListener,n.prototype.once=function(t,e){function n(){this.removeListener(t,n),i||(i=!0,e.apply(this,arguments))}if(!r(e))throw TypeError("listener must be a function");var i=!1;return n.listener=e,this.on(t,n),this},n.prototype.removeListener=function(t,e){var n,i,A,a;if(!r(e))throw TypeError("listener must be a function");if(!this._events||!this._events[t])return this;if(n=this._events[t],A=n.length,i=-1,n===e||r(n.listener)&&n.listener===e)delete this._events[t],this._events.removeListener&&this.emit("removeListener",t,e);else if(o(n)){for(a=A;a-- >0;)if(n[a]===e||n[a].listener&&n[a].listener===e){i=a;break}if(i<0)return this;1===n.length?(n.length=0,delete this._events[t]):n.splice(i,1),this._events.removeListener&&this.emit("removeListener",t,e)}return this},n.prototype.removeAllListeners=function(t){var e,n;if(!this._events)return this;if(!this._events.removeListener)return 0===arguments.length?this._events={}:this._events[t]&&delete this._events[t],this;if(0===arguments.length){for(e in this._events)"removeListener"!==e&&this.removeAllListeners(e);return this.removeAllListeners("removeListener"),this._events={},this}if(n=this._events[t],r(n))this.removeListener(t,n);else if(n)for(;n.length;)this.removeListener(t,n[n.length-1]);return delete this._events[t],this},n.prototype.listeners=function(t){return this._events&&this._events[t]?r(this._events[t])?[this._events[t]]:this._events[t].slice():[]},n.prototype.listenerCount=function(t){if(this._events){var e=this._events[t];if(r(e))return 1;if(e)return e.length}return 0},n.listenerCount=function(t,e){return t.listenerCount(e)}},function(t,e,n){var r,i,o,A=function(){function t(e,n){if(void 0===e.length)throw new Error(e.length+"/"+n);var r=function(){for(var t=0;t<e.length&&0==e[t];)t+=1;for(var r=new Array(e.length-t+n),i=0;i<e.length-t;i+=1)r[i]=e[i+t];return r}(),i={};return i.getAt=function(t){return r[t]},i.getLength=function(){return r.length},i.multiply=function(e){for(var n=new Array(i.getLength()+e.getLength()-1),r=0;r<i.getLength();r+=1)for(var o=0;o<e.getLength();o+=1)n[r+o]^=A.gexp(A.glog(i.getAt(r))+A.glog(e.getAt(o)));return t(n,0)},i.mod=function(e){if(i.getLength()-e.getLength()<0)return i;for(var n=A.glog(i.getAt(0))-A.glog(e.getAt(0)),r=new Array(i.getLength()),o=0;o<i.getLength();o+=1)r[o]=i.getAt(o);for(var o=0;o<e.getLength();o+=1)r[o]^=A.gexp(A.glog(e.getAt(o))+n);return t(r,0).mod(e)},i}var e=function(e,n){var i=236,A=17,p=e,h=r[n],g=null,d=0,y=null,v=new Array,m={},E=function(t,e){d=4*p+17,g=function(t){for(var e=new Array(t),n=0;n<t;n+=1){e[n]=new Array(t);for(var r=0;r<t;r+=1)e[n][r]=null}return e}(d),w(0,0),w(d-7,0),w(0,d-7),M(),R(),B(t,e),p>=7&&D(t),null==y&&(y=U(p,h,v)),k(y,e)},w=function(t,e){for(var n=-1;n<=7;n+=1)if(!(t+n<=-1||d<=t+n))for(var r=-1;r<=7;r+=1)e+r<=-1||d<=e+r||(g[t+n][e+r]=0<=n&&n<=6&&(0==r||6==r)||0<=r&&r<=6&&(0==n||6==n)||2<=n&&n<=4&&2<=r&&r<=4)},I=function(){for(var t=0,e=0,n=0;n<8;n+=1){E(!0,n);var r=o.getLostPoint(m);(0==n||t>r)&&(t=r,e=n)}return e},R=function(){for(var t=8;t<d-8;t+=1)null==g[t][6]&&(g[t][6]=t%2==0);for(var e=8;e<d-8;e+=1)null==g[6][e]&&(g[6][e]=e%2==0)},M=function(){for(var t=o.getPatternPosition(p),e=0;e<t.length;e+=1)for(var n=0;n<t.length;n+=1){var r=t[e],i=t[n];if(null==g[r][i])for(var A=-2;A<=2;A+=1)for(var a=-2;a<=2;a+=1)g[r+A][i+a]=A==-2||2==A||a==-2||2==a||0==A&&0==a}},D=function(t){for(var e=o.getBCHTypeNumber(p),n=0;n<18;n+=1){var r=!t&&1==(e>>n&1);g[Math.floor(n/3)][n%3+d-8-3]=r}for(var n=0;n<18;n+=1){var r=!t&&1==(e>>n&1);g[n%3+d-8-3][Math.floor(n/3)]=r}},B=function(t,e){for(var n=h<<3|e,r=o.getBCHTypeInfo(n),i=0;i<15;i+=1){var A=!t&&1==(r>>i&1);i<6?g[i][8]=A:i<8?g[i+1][8]=A:g[d-15+i][8]=A}for(var i=0;i<15;i+=1){var A=!t&&1==(r>>i&1);i<8?g[8][d-i-1]=A:i<9?g[8][15-i-1+1]=A:g[8][15-i-1]=A}g[d-8][8]=!t},k=function(t,e){for(var n=-1,r=d-1,i=7,A=0,a=o.getMaskFunction(e),c=d-1;c>0;c-=2)for(6==c&&(c-=1);;){for(var u=0;u<2;u+=1)if(null==g[r][c-u]){var s=!1;A<t.length&&(s=1==(t[A]>>>i&1));var l=a(r,c-u);l&&(s=!s),g[r][c-u]=s,i-=1,i==-1&&(A+=1,i=7)}if(r+=n,r<0||d<=r){r-=n,n=-n;break}}},j=function(e,n){for(var r=0,i=0,A=0,a=new Array(n.length),c=new Array(n.length),u=0;u<n.length;u+=1){var s=n[u].dataCount,l=n[u].totalCount-s;i=Math.max(i,s),A=Math.max(A,l),a[u]=new Array(s);for(var f=0;f<a[u].length;f+=1)a[u][f]=255&e.getBuffer()[f+r];r+=s;var p=o.getErrorCorrectPolynomial(l),h=t(a[u],p.getLength()-1),g=h.mod(p);c[u]=new Array(p.getLength()-1);for(var f=0;f<c[u].length;f+=1){var d=f+g.getLength()-c[u].length;c[u][f]=d>=0?g.getAt(d):0}}for(var b=0,f=0;f<n.length;f+=1)b+=n[f].totalCount;for(var y=new Array(b),v=0,f=0;f<i;f+=1)for(var u=0;u<n.length;u+=1)f<a[u].length&&(y[v]=a[u][f],v+=1);for(var f=0;f<A;f+=1)for(var u=0;u<n.length;u+=1)f<c[u].length&&(y[v]=c[u][f],v+=1);return y},U=function(t,e,n){for(var r=a.getRSBlocks(t,e),u=c(),s=0;s<n.length;s+=1){var l=n[s];u.put(l.getMode(),4),u.put(l.getLength(),o.getLengthInBits(l.getMode(),t)),l.write(u)}for(var f=0,s=0;s<r.length;s+=1)f+=r[s].dataCount;if(u.getLengthInBits()>8*f)throw new Error("code length overflow. ("+u.getLengthInBits()+">"+8*f+")");for(u.getLengthInBits()+4<=8*f&&u.put(0,4);u.getLengthInBits()%8!=0;)u.putBit(!1);for(;;){if(u.getLengthInBits()>=8*f)break;if(u.put(i,8),u.getLengthInBits()>=8*f)break;u.put(A,8)}return j(u,r)};return m.addData=function(t,e){e=e||"Byte";var n=null;switch(e){case"Numeric":n=u(t);break;case"Alphanumeric":n=s(t);break;case"Byte":n=l(t);break;case"Kanji":n=f(t);break;default:throw"mode:"+e}v.push(n),y=null},m.isDark=function(t,e){if(t<0||d<=t||e<0||d<=e)throw new Error(t+","+e);return g[t][e]},m.getModuleCount=function(){return d},m.make=function(){E(!1,I())},m.createTableTag=function(t,e){t=t||2,e=void 0===e?4*t:e;var n="";n+='<table style="',n+=" border-width: 0px; border-style: none;",n+=" border-collapse: collapse;",n+=" padding: 0px; margin: "+e+"px;",n+='">',n+="<tbody>";for(var r=0;r<m.getModuleCount();r+=1){n+="<tr>";for(var i=0;i<m.getModuleCount();i+=1)n+='<td style="',n+=" border-width: 0px; border-style: none;",n+=" border-collapse: collapse;",n+=" padding: 0px; margin: 0px;",n+=" width: "+t+"px;",n+=" height: "+t+"px;",n+=" background-color: ",n+=m.isDark(r,i)?"#000000":"#ffffff",n+=";",n+='"/>';n+="</tr>"}return n+="</tbody>",n+="</table>"},m.createSvgTag=function(t,e){t=t||2,e=void 0===e?4*t:e;var n,r,i,o,A,a=m.getModuleCount()*t+2*e,c="";for(A="l"+t+",0 0,"+t+" -"+t+",0 0,-"+t+"z ",c+="<svg",c+=' width="'+a+'px"',c+=' height="'+a+'px"',c+=' xmlns="http://www.w3.org/2000/svg"',c+=">",c+='<path d="',i=0;i<m.getModuleCount();i+=1)for(o=i*t+e,n=0;n<m.getModuleCount();n+=1)m.isDark(i,n)&&(r=n*t+e,c+="M"+r+","+o+A);return c+='" stroke="transparent" fill="black"/>',c+="</svg>"},m.createImgTag=function(t,e){t=t||2,e=void 0===e?4*t:e;var n=m.getModuleCount()*t+2*e,r=e,i=n-e;return b(n,n,function(e,n){if(r<=e&&e<i&&r<=n&&n<i){var o=Math.floor((e-r)/t),A=Math.floor((n-r)/t);return m.isDark(A,o)?0:1}return 1})},m};e.stringToBytes=function(t){for(var e=new Array,n=0;n<t.length;n+=1){var r=t.charCodeAt(n);e.push(255&r)}return e},e.createStringToBytes=function(t,e){var n=function(){for(var n=g(t),r=function(){var t=n.read();if(t==-1)throw new Error;return t},i=0,o={};;){var A=n.read();if(A==-1)break;var a=r(),c=r(),u=r(),s=String.fromCharCode(A<<8|a),l=c<<8|u;o[s]=l,i+=1}if(i!=e)throw new Error(i+" != "+e);return o}(),r="?".charCodeAt(0);return function(t){for(var e=new Array,i=0;i<t.length;i+=1){var o=t.charCodeAt(i);if(o<128)e.push(o);else{var A=n[t.charAt(i)];"number"==typeof A?(255&A)==A?e.push(A):(e.push(A>>>8),e.push(255&A)):e.push(r)}}return e}};var n={MODE_NUMBER:1,MODE_ALPHA_NUM:2,MODE_8BIT_BYTE:4,MODE_KANJI:8},r={L:1,M:0,Q:3,H:2},i={PATTERN000:0,PATTERN001:1,PATTERN010:2,PATTERN011:3,PATTERN100:4,PATTERN101:5,PATTERN110:6,PATTERN111:7},o=function(){var e=[[],[6,18],[6,22],[6,26],[6,30],[6,34],[6,22,38],[6,24,42],[6,26,46],[6,28,50],[6,30,54],[6,32,58],[6,34,62],[6,26,46,66],[6,26,48,70],[6,26,50,74],[6,30,54,78],[6,30,56,82],[6,30,58,86],[6,34,62,90],[6,28,50,72,94],[6,26,50,74,98],[6,30,54,78,102],[6,28,54,80,106],[6,32,58,84,110],[6,30,58,86,114],[6,34,62,90,118],[6,26,50,74,98,122],[6,30,54,78,102,126],[6,26,52,78,104,130],[6,30,56,82,108,134],[6,34,60,86,112,138],[6,30,58,86,114,142],[6,34,62,90,118,146],[6,30,54,78,102,126,150],[6,24,50,76,102,128,154],[6,28,54,80,106,132,158],[6,32,58,84,110,136,162],[6,26,54,82,110,138,166],[6,30,58,86,114,142,170]],r=1335,o=7973,a={},c=function(t){for(var e=0;0!=t;)e+=1,t>>>=1;return e};return a.getBCHTypeInfo=function(t){for(var e=t<<10;c(e)-c(r)>=0;)e^=r<<c(e)-c(r);return 21522^(t<<10|e)},a.getBCHTypeNumber=function(t){for(var e=t<<12;c(e)-c(o)>=0;)e^=o<<c(e)-c(o);return t<<12|e},a.getPatternPosition=function(t){return e[t-1]},a.getMaskFunction=function(t){switch(t){case i.PATTERN000:return function(t,e){return(t+e)%2==0};case i.PATTERN001:return function(t,e){return t%2==0};case i.PATTERN010:return function(t,e){return e%3==0};case i.PATTERN011:return function(t,e){return(t+e)%3==0};case i.PATTERN100:return function(t,e){return(Math.floor(t/2)+Math.floor(e/3))%2==0};case i.PATTERN101:return function(t,e){return t*e%2+t*e%3==0};case i.PATTERN110:return function(t,e){return(t*e%2+t*e%3)%2==0};case i.PATTERN111:return function(t,e){return(t*e%3+(t+e)%2)%2==0};default:throw new Error("bad maskPattern:"+t)}},a.getErrorCorrectPolynomial=function(e){for(var n=t([1],0),r=0;r<e;r+=1)n=n.multiply(t([1,A.gexp(r)],0));return n},a.getLengthInBits=function(t,e){if(1<=e&&e<10)switch(t){case n.MODE_NUMBER:return 10;case n.MODE_ALPHA_NUM:return 9;case n.MODE_8BIT_BYTE:return 8;case n.MODE_KANJI:return 8;default:throw new Error("mode:"+t)}else if(e<27)switch(t){case n.MODE_NUMBER:return 12;case n.MODE_ALPHA_NUM:return 11;case n.MODE_8BIT_BYTE:return 16;case n.MODE_KANJI:return 10;default:throw new Error("mode:"+t)}else{if(!(e<41))throw new Error("type:"+e);switch(t){case n.MODE_NUMBER:return 14;case n.MODE_ALPHA_NUM:return 13;case n.MODE_8BIT_BYTE:return 16;case n.MODE_KANJI:return 12;default:throw new Error("mode:"+t)}}},a.getLostPoint=function(t){for(var e=t.getModuleCount(),n=0,r=0;r<e;r+=1)for(var i=0;i<e;i+=1){for(var o=0,A=t.isDark(r,i),a=-1;a<=1;a+=1)if(!(r+a<0||e<=r+a))for(var c=-1;c<=1;c+=1)i+c<0||e<=i+c||0==a&&0==c||A==t.isDark(r+a,i+c)&&(o+=1);o>5&&(n+=3+o-5)}for(var r=0;r<e-1;r+=1)for(var i=0;i<e-1;i+=1){var u=0;t.isDark(r,i)&&(u+=1),t.isDark(r+1,i)&&(u+=1),t.isDark(r,i+1)&&(u+=1),t.isDark(r+1,i+1)&&(u+=1),0!=u&&4!=u||(n+=3)}for(var r=0;r<e;r+=1)for(var i=0;i<e-6;i+=1)t.isDark(r,i)&&!t.isDark(r,i+1)&&t.isDark(r,i+2)&&t.isDark(r,i+3)&&t.isDark(r,i+4)&&!t.isDark(r,i+5)&&t.isDark(r,i+6)&&(n+=40);for(var i=0;i<e;i+=1)for(var r=0;r<e-6;r+=1)t.isDark(r,i)&&!t.isDark(r+1,i)&&t.isDark(r+2,i)&&t.isDark(r+3,i)&&t.isDark(r+4,i)&&!t.isDark(r+5,i)&&t.isDark(r+6,i)&&(n+=40);for(var s=0,i=0;i<e;i+=1)for(var r=0;r<e;r+=1)t.isDark(r,i)&&(s+=1);return n+=10*(Math.abs(100*s/e/e-50)/5)},a}(),A=function(){for(var t=new Array(256),e=new Array(256),n=0;n<8;n+=1)t[n]=1<<n;for(var n=8;n<256;n+=1)t[n]=t[n-4]^t[n-5]^t[n-6]^t[n-8];for(var n=0;n<255;n+=1)e[t[n]]=n;var r={};return r.glog=function(t){if(t<1)throw new Error("glog("+t+")");return e[t]},r.gexp=function(e){for(;e<0;)e+=255;for(;e>=256;)e-=255;return t[e]},r}(),a=function(){var t=[[1,26,19],[1,26,16],[1,26,13],[1,26,9],[1,44,34],[1,44,28],[1,44,22],[1,44,16],[1,70,55],[1,70,44],[2,35,17],[2,35,13],[1,100,80],[2,50,32],[2,50,24],[4,25,9],[1,134,108],[2,67,43],[2,33,15,2,34,16],[2,33,11,2,34,12],[2,86,68],[4,43,27],[4,43,19],[4,43,15],[2,98,78],[4,49,31],[2,32,14,4,33,15],[4,39,13,1,40,14],[2,121,97],[2,60,38,2,61,39],[4,40,18,2,41,19],[4,40,14,2,41,15],[2,146,116],[3,58,36,2,59,37],[4,36,16,4,37,17],[4,36,12,4,37,13],[2,86,68,2,87,69],[4,69,43,1,70,44],[6,43,19,2,44,20],[6,43,15,2,44,16],[4,101,81],[1,80,50,4,81,51],[4,50,22,4,51,23],[3,36,12,8,37,13],[2,116,92,2,117,93],[6,58,36,2,59,37],[4,46,20,6,47,21],[7,42,14,4,43,15],[4,133,107],[8,59,37,1,60,38],[8,44,20,4,45,21],[12,33,11,4,34,12],[3,145,115,1,146,116],[4,64,40,5,65,41],[11,36,16,5,37,17],[11,36,12,5,37,13],[5,109,87,1,110,88],[5,65,41,5,66,42],[5,54,24,7,55,25],[11,36,12,7,37,13],[5,122,98,1,123,99],[7,73,45,3,74,46],[15,43,19,2,44,20],[3,45,15,13,46,16],[1,135,107,5,136,108],[10,74,46,1,75,47],[1,50,22,15,51,23],[2,42,14,17,43,15],[5,150,120,1,151,121],[9,69,43,4,70,44],[17,50,22,1,51,23],[2,42,14,19,43,15],[3,141,113,4,142,114],[3,70,44,11,71,45],[17,47,21,4,48,22],[9,39,13,16,40,14],[3,135,107,5,136,108],[3,67,41,13,68,42],[15,54,24,5,55,25],[15,43,15,10,44,16],[4,144,116,4,145,117],[17,68,42],[17,50,22,6,51,23],[19,46,16,6,47,17],[2,139,111,7,140,112],[17,74,46],[7,54,24,16,55,25],[34,37,13],[4,151,121,5,152,122],[4,75,47,14,76,48],[11,54,24,14,55,25],[16,45,15,14,46,16],[6,147,117,4,148,118],[6,73,45,14,74,46],[11,54,24,16,55,25],[30,46,16,2,47,17],[8,132,106,4,133,107],[8,75,47,13,76,48],[7,54,24,22,55,25],[22,45,15,13,46,16],[10,142,114,2,143,115],[19,74,46,4,75,47],[28,50,22,6,51,23],[33,46,16,4,47,17],[8,152,122,4,153,123],[22,73,45,3,74,46],[8,53,23,26,54,24],[12,45,15,28,46,16],[3,147,117,10,148,118],[3,73,45,23,74,46],[4,54,24,31,55,25],[11,45,15,31,46,16],[7,146,116,7,147,117],[21,73,45,7,74,46],[1,53,23,37,54,24],[19,45,15,26,46,16],[5,145,115,10,146,116],[19,75,47,10,76,48],[15,54,24,25,55,25],[23,45,15,25,46,16],[13,145,115,3,146,116],[2,74,46,29,75,47],[42,54,24,1,55,25],[23,45,15,28,46,16],[17,145,115],[10,74,46,23,75,47],[10,54,24,35,55,25],[19,45,15,35,46,16],[17,145,115,1,146,116],[14,74,46,21,75,47],[29,54,24,19,55,25],[11,45,15,46,46,16],[13,145,115,6,146,116],[14,74,46,23,75,47],[44,54,24,7,55,25],[59,46,16,1,47,17],[12,151,121,7,152,122],[12,75,47,26,76,48],[39,54,24,14,55,25],[22,45,15,41,46,16],[6,151,121,14,152,122],[6,75,47,34,76,48],[46,54,24,10,55,25],[2,45,15,64,46,16],[17,152,122,4,153,123],[29,74,46,14,75,47],[49,54,24,10,55,25],[24,45,15,46,46,16],[4,152,122,18,153,123],[13,74,46,32,75,47],[48,54,24,14,55,25],[42,45,15,32,46,16],[20,147,117,4,148,118],[40,75,47,7,76,48],[43,54,24,22,55,25],[10,45,15,67,46,16],[19,148,118,6,149,119],[18,75,47,31,76,48],[34,54,24,34,55,25],[20,45,15,61,46,16]],e=function(t,e){var n={};return n.totalCount=t,n.dataCount=e,n},n={},i=function(e,n){switch(n){case r.L:return t[4*(e-1)+0];case r.M:return t[4*(e-1)+1];case r.Q:return t[4*(e-1)+2];case r.H:return t[4*(e-1)+3];default:return}};return n.getRSBlocks=function(t,n){var r=i(t,n);if(void 0===r)throw new Error("bad rs block @ typeNumber:"+t+"/errorCorrectionLevel:"+n);for(var o=r.length/3,A=new Array,a=0;a<o;a+=1)for(var c=r[3*a+0],u=r[3*a+1],s=r[3*a+2],l=0;l<c;l+=1)A.push(e(u,s));return A},n}(),c=function(){var t=new Array,e=0,n={};return n.getBuffer=function(){return t},n.getAt=function(e){return 1==(t[Math.floor(e/8)]>>>7-e%8&1)},n.put=function(t,e){for(var r=0;r<e;r+=1)n.putBit(1==(t>>>e-r-1&1))},n.getLengthInBits=function(){return e},n.putBit=function(n){var r=Math.floor(e/8);t.length<=r&&t.push(0),n&&(t[r]|=128>>>e%8),e+=1},n},u=function(t){var e=n.MODE_NUMBER,r=t,i={};i.getMode=function(){return e},i.getLength=function(t){return r.length},i.write=function(t){for(var e=r,n=0;n+2<e.length;)t.put(o(e.substring(n,n+3)),10),n+=3;n<e.length&&(e.length-n==1?t.put(o(e.substring(n,n+1)),4):e.length-n==2&&t.put(o(e.substring(n,n+2)),7))};var o=function(t){for(var e=0,n=0;n<t.length;n+=1)e=10*e+A(t.charAt(n));return e},A=function(t){if("0"<=t&&t<="9")return t.charCodeAt(0)-"0".charCodeAt(0);throw"illegal char :"+t};return i},s=function(t){var e=n.MODE_ALPHA_NUM,r=t,i={};i.getMode=function(){return e},i.getLength=function(t){return r.length},i.write=function(t){for(var e=r,n=0;n+1<e.length;)t.put(45*o(e.charAt(n))+o(e.charAt(n+1)),11),n+=2;n<e.length&&t.put(o(e.charAt(n)),6)};var o=function(t){if("0"<=t&&t<="9")return t.charCodeAt(0)-"0".charCodeAt(0);if("A"<=t&&t<="Z")return t.charCodeAt(0)-"A".charCodeAt(0)+10;switch(t){case" ":return 36;case"$":return 37;case"%":return 38;case"*":return 39;case"+":return 40;case"-":return 41;case".":return 42;case"/":return 43;case":":return 44;default:throw"illegal char :"+t}};return i},l=function(t){var r=n.MODE_8BIT_BYTE,i=e.stringToBytes(t),o={};return o.getMode=function(){return r},o.getLength=function(t){return i.length},o.write=function(t){for(var e=0;e<i.length;e+=1)t.put(i[e],8)},o},f=function(t){var r=n.MODE_KANJI,i=e.stringToBytes(t);!function(t,n){var r=e.stringToBytes(t);if(2!=r.length||(r[0]<<8|r[1])!=n)throw"sjis not supported."}("友",38726);var o={};return o.getMode=function(){return r},o.getLength=function(t){return~~(i.length/2)},o.write=function(t){for(var e=i,n=0;n+1<e.length;){var r=(255&e[n])<<8|255&e[n+1];if(33088<=r&&r<=40956)r-=33088;else{if(!(57408<=r&&r<=60351))throw"illegal char at "+(n+1)+"/"+r;r-=49472}r=192*(r>>>8&255)+(255&r),t.put(r,13),n+=2}if(n<e.length)throw"illegal char at "+(n+1)},o},p=function(){var t=new Array,e={};return e.writeByte=function(e){t.push(255&e)},e.writeShort=function(t){e.writeByte(t),e.writeByte(t>>>8)},e.writeBytes=function(t,n,r){n=n||0,r=r||t.length;for(var i=0;i<r;i+=1)e.writeByte(t[i+n])},e.writeString=function(t){for(var n=0;n<t.length;n+=1)e.writeByte(t.charCodeAt(n))},e.toByteArray=function(){return t},e.toString=function(){var e="";e+="[";for(var n=0;n<t.length;n+=1)n>0&&(e+=","),e+=t[n];return e+="]"},e},h=function(){var t=0,e=0,n=0,r="",i={},o=function(t){r+=String.fromCharCode(A(63&t))},A=function(t){if(t<0);else{if(t<26)return 65+t;if(t<52)return 97+(t-26);if(t<62)return 48+(t-52);if(62==t)return 43;if(63==t)return 47}throw new Error("n:"+t)};return i.writeByte=function(r){for(t=t<<8|255&r,e+=8,n+=1;e>=6;)o(t>>>e-6),e-=6},i.flush=function(){if(e>0&&(o(t<<6-e),t=0,e=0),n%3!=0)for(var i=3-n%3,A=0;A<i;A+=1)r+="="},i.toString=function(){return r},i},g=function(t){var e=t,n=0,r=0,i=0,o={};o.read=function(){for(;i<8;){if(n>=e.length){if(0==i)return-1;throw new Error("unexpected end of file./"+i)}var t=e.charAt(n);if(n+=1,"="==t)return i=0,-1;t.match(/^\s$/)||(r=r<<6|A(t.charCodeAt(0)),i+=6)}var o=r>>>i-8&255;return i-=8,o};var A=function(t){if(65<=t&&t<=90)return t-65;if(97<=t&&t<=122)return t-97+26;if(48<=t&&t<=57)return t-48+52;if(43==t)return 62;if(47==t)return 63;throw new Error("c:"+t)};return o},d=function(t,e){var n=t,r=e,i=new Array(t*e),o={};o.setPixel=function(t,e,r){i[e*n+t]=r},o.write=function(t){t.writeString("GIF87a"),t.writeShort(n),t.writeShort(r),t.writeByte(128),t.writeByte(0),t.writeByte(0),t.writeByte(0),t.writeByte(0),t.writeByte(0),t.writeByte(255),t.writeByte(255),t.writeByte(255),t.writeString(","),t.writeShort(0),t.writeShort(0),t.writeShort(n),t.writeShort(r),t.writeByte(0);var e=2,i=a(e);t.writeByte(e);for(var o=0;i.length-o>255;)t.writeByte(255),t.writeBytes(i,o,255),o+=255;t.writeByte(i.length-o),t.writeBytes(i,o,i.length-o),t.writeByte(0),t.writeString(";")};var A=function(t){var e=t,n=0,r=0,i={};return i.write=function(t,i){if(t>>>i!=0)throw new Error("length over");for(;n+i>=8;)e.writeByte(255&(t<<n|r)),i-=8-n,t>>>=8-n,r=0,n=0;r|=t<<n,n+=i},i.flush=function(){n>0&&e.writeByte(r)},i},a=function(t){for(var e=1<<t,n=(1<<t)+1,r=t+1,o=c(),a=0;a<e;a+=1)o.add(String.fromCharCode(a));o.add(String.fromCharCode(e)),o.add(String.fromCharCode(n));var u=p(),s=A(u);s.write(e,r);var l=0,f=String.fromCharCode(i[l]);for(l+=1;l<i.length;){var h=String.fromCharCode(i[l]);l+=1,o.contains(f+h)?f+=h:(s.write(o.indexOf(f),r),o.size()<4095&&(o.size()==1<<r&&(r+=1),o.add(f+h)),f=h)}return s.write(o.indexOf(f),r),s.write(n,r),s.flush(),u.toByteArray()},c=function(){var t={},e=0,n={};return n.add=function(r){if(n.contains(r))throw new Error("dup key:"+r);t[r]=e,e+=1},n.size=function(){return e},n.indexOf=function(e){return t[e]},n.contains=function(e){return void 0!==t[e]},n};return o},b=function(t,e,n,r){for(var i=d(t,e),o=0;o<e;o+=1)for(var A=0;A<t;A+=1)i.setPixel(A,o,n(A,o));var a=p();i.write(a);for(var c=h(),u=a.toByteArray(),s=0;s<u.length;s+=1)c.writeByte(u[s]);c.flush();var l="";return l+="<img",l+=' src="',l+="data:image/gif;base64,",l+=c,l+='"',l+=' width="',l+=t,l+='"',l+=' height="',l+=e,l+='"',r&&(l+=' alt="',l+=r,l+='"'),l+="/>"};return e}();!function(n){i=[],r=n,o="function"==typeof r?r.apply(e,i):r,void 0!==o&&(t.exports=o)}(function(){return A})},function(t,e){function n(t,e){for(var n=0;n<t.length;n++){var r=t[n],i=f[r.id];if(i){i.refs++;for(var o=0;o<i.parts.length;o++)i.parts[o](r.parts[o]);for(;o<r.parts.length;o++)i.parts.push(c(r.parts[o],e))}else{for(var A=[],o=0;o<r.parts.length;o++)A.push(c(r.parts[o],e));f[r.id]={id:r.id,refs:1,parts:A}}}}function r(t){for(var e=[],n={},r=0;r<t.length;r++){var i=t[r],o=i[0],A=i[1],a=i[2],c=i[3],u={css:A,media:a,sourceMap:c};n[o]?n[o].parts.push(u):e.push(n[o]={id:o,parts:[u]})}return e}function i(t,e){var n=g(),r=y[y.length-1];if("top"===t.insertAt)r?r.nextSibling?n.insertBefore(e,r.nextSibling):n.appendChild(e):n.insertBefore(e,n.firstChild),y.push(e);else{if("bottom"!==t.insertAt)throw new Error("Invalid value for parameter 'insertAt'. Must be 'top' or 'bottom'.");n.appendChild(e)}}function o(t){t.parentNode.removeChild(t);var e=y.indexOf(t);e>=0&&y.splice(e,1)}function A(t){var e=document.createElement("style");return e.type="text/css",i(t,e),e}function a(t){var e=document.createElement("link");return e.rel="stylesheet",i(t,e),e}function c(t,e){var n,r,i;if(e.singleton){var c=b++;n=d||(d=A(e)),r=u.bind(null,n,c,!1),i=u.bind(null,n,c,!0)}else t.sourceMap&&"function"==typeof URL&&"function"==typeof URL.createObjectURL&&"function"==typeof URL.revokeObjectURL&&"function"==typeof Blob&&"function"==typeof btoa?(n=a(e),r=l.bind(null,n),i=function(){o(n),n.href&&URL.revokeObjectURL(n.href)}):(n=A(e),r=s.bind(null,n),i=function(){o(n)});return r(t),function(e){if(e){if(e.css===t.css&&e.media===t.media&&e.sourceMap===t.sourceMap)return;r(t=e)}else i()}}function u(t,e,n,r){var i=n?"":r.css;if(t.styleSheet)t.styleSheet.cssText=v(e,i);else{var o=document.createTextNode(i),A=t.childNodes;A[e]&&t.removeChild(A[e]),A.length?t.insertBefore(o,A[e]):t.appendChild(o)}}function s(t,e){var n=e.css,r=e.media;if(r&&t.setAttribute("media",r),t.styleSheet)t.styleSheet.cssText=n;else{for(;t.firstChild;)t.removeChild(t.firstChild);t.appendChild(document.createTextNode(n))}}function l(t,e){var n=e.css,r=e.sourceMap;r&&(n+="\n/*# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(JSON.stringify(r))))+" */");var i=new Blob([n],{type:"text/css"}),o=t.href;t.href=URL.createObjectURL(i),o&&URL.revokeObjectURL(o)}var f={},p=function(t){var e;return function(){return void 0===e&&(e=t.apply(this,arguments)),e}},h=p(function(){return/msie [6-9]\b/.test(self.navigator.userAgent.toLowerCase())}),g=p(function(){return document.head||document.getElementsByTagName("head")[0]}),d=null,b=0,y=[];t.exports=function(t,e){if("undefined"!=typeof DEBUG&&DEBUG&&"object"!=typeof document)throw new Error("The style-loader cannot be used in a non-browser environment");e=e||{},void 0===e.singleton&&(e.singleton=h()),void 0===e.insertAt&&(e.insertAt="bottom");var i=r(t);return n(i,e),function(t){for(var o=[],A=0;A<i.length;A++){var a=i[A],c=f[a.id];c.refs--,o.push(c)}if(t){n(r(t),e)}for(var A=0;A<o.length;A++){var c=o[A];if(0===c.refs){for(var u=0;u<c.parts.length;u++)c.parts[u]();delete f[c.id]}}}};var v=function(){var t=[];return function(e,n){return t[e]=n,t.filter(Boolean).join("\n")}}()},function(t,e){t.exports="data:image/jpeg;base64,/9j/4QAYRXhpZgAASUkqAAgAAAAAAAAAAAAAAP/sABFEdWNreQABAAQAAABkAAD/4QNvaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLwA8P3hwYWNrZXQgYmVnaW49Iu+7vyIgaWQ9Ilc1TTBNcENlaGlIenJlU3pOVGN6a2M5ZCI/PiA8eDp4bXBtZXRhIHhtbG5zOng9ImFkb2JlOm5zOm1ldGEvIiB4OnhtcHRrPSJBZG9iZSBYTVAgQ29yZSA1LjMtYzAxMSA2Ni4xNDU2NjEsIDIwMTIvMDIvMDYtMTQ6NTY6MjcgICAgICAgICI+IDxyZGY6UkRGIHhtbG5zOnJkZj0iaHR0cDovL3d3dy53My5vcmcvMTk5OS8wMi8yMi1yZGYtc3ludGF4LW5zIyI+IDxyZGY6RGVzY3JpcHRpb24gcmRmOmFib3V0PSIiIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bXBNTTpPcmlnaW5hbERvY3VtZW50SUQ9InhtcC5kaWQ6QjgxOTIzMjRGMjhCRTQxMTk5QjZGNERBNDI5NUVBNjEiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6NjkxODAwMUY4Q0Y4MTFFNEE0NkVCQUVGNjA3MDhFMzUiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6NjkxODAwMUU4Q0Y4MTFFNEE0NkVCQUVGNjA3MDhFMzUiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoV2luZG93cykiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDpBNzMwMTIzMEZBOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/Pv/uAA5BZG9iZQBkwAAAAAH/2wCEAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQECAgICAgICAgICAgMDAwMDAwMDAwMBAQEBAQEBAgEBAgICAQICAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDA//AABEIAIIAggMBEQACEQEDEQH/xAB+AAEAAwEBAQEAAAAAAAAAAAAABgcIBQMECgEBAAAAAAAAAAAAAAAAAAAAABAAAQIEAAoGBgcJAQAAAAAAAAIDAQQFBhHUlbUWNlZ2mNgxYRITMzUhUVODFDRBIjJSYmNlcYEjcyRUdRc4SBEBAAAAAAAAAAAAAAAAAAAAAP/aAAwDAQACEQMRAD8A/eRLy6rpi7PTzr8KHB99im01h52Xan2pd1bC6lUlsqbdmW5l1tUWWYq7mDPZUpKlK+oH26HWl9Nr27HrVRaapUeuKlS0VRj+0BodaOy1uZEpmKgNDrR2WtzIlMxUBodaOy1uZEpmKgNDrR2WtzIlMxUBodaOy1uZEpmKgNDrR2WtzIlMxUBodaOy1uZEpmKgNDrR2WtzIlMxUBodaOy1uZEpmKgNDrR2WtzIlMxUBodaOy1uZEpmKgNDrR2WtzIlMxUBodaOy1uZEpmKgeL1uNSCVTVswRSJ1qEVok2Yqao0/wBmGH4Wdp6P6ZpL3RB5pCHm4xwwjGGFKg5n+wKZ/azflHx3QnzPZ7/K9X2OsDs2dqla8fXbtFVHrUqmyylRj1xVHCBIwAAAAAAAAAAAAAAAADOeCHq/9D9x7n2X8vq6ALps7VG1t3KJmyVAkYAAAAAAAAAAAAAAAABnPmMAumztUbW3combJUCRgAAAAAAAAAAAAAAAAGc+YwC6bO1RtbdyiZslQJGAAAAAAAAAAAAAAAAAZz5jALps7VG1t3KJmyVAkYAAAAAAAAAAAAAAAABnPmMAumztUbW3combJUCRgAAAABznaRSX5pE8/S6c9OtqQtucdkpZyaQtpSFtqRMLai8lTa20xTGEcMIphGHRADogAAAAAAAAAGc+YwC6bO1RtbdyiZslQJGAAAAAAAAAAAAAAAAAZz5jALps7VG1t3KJmyVAkYAAAAAAAAAAAAAAAABnPmMAumztUbW3combJUCRgAAAAAAAAAAAAAAAAGc+YwC6bO1RtbdyiZslQJGAAAAAAAAAAAAAAAAAZz5jALps7VG1t3KJmyVAkYAAAAAAAAAAAAAAAABnPmMAumztUbW3combJUCRgAAAAB4PTDcv3XbS+rvXm2EdzLTEzgW6qCUqd+Had7lmEY/WcX2W0Q9KlQh6QPcAAAAAAAAAAznzGAXTZ2qNrbuUTNkqBIwAAAAAAAAAAAAAAAADOfMYBdNnao2tu5RM2SoEjAAAAAAAAAAAAAAAAAM58xgFxW2+iQaTbM0qDU7SUKZk21xgn46jNLiinzsrhwd8luV7Db0E4YtvJjCPoimKglQAAAAAAAAAAAAAAADm1Sqy1Jlu/fipbriu6k5NrAqbn5tUP4UpKNYcLr7qv3JhhUqMEwjGAVjoNW/ayvj6Y+JHXn2PR5d+Z0/hAkt/eVy3lPzUPnvMvsR1f/Vfu/hwgVdDoh/0P7jwfdfl+rqAcRgDiMAcRgDiMAcRgDiMAcRgDiMAcRgDiMAcRgDiMAcRgEssbzl3xvlV64a8/R4P6b9/rwAXAB//2Q=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDowM0I0MjdDMjhDRjMxMUU0QjM5NEEyODQ2OTA3RUYyMyIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDowM0I0MjdDMThDRjMxMUU0QjM5NEEyODQ2OTA3RUYyMyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+3hRSMgAABPlJREFUeNrsW1toFGcUPn/cjZlk3XgLWS+rxku8VG1DLfvQVrAKIqgQn3xWfBAUwQcFn6QFqalPSqEvtvSlliIUrFBatJWghQiiJt4nRE1izM0kG9ddNZe/5/yzk53dnZ3LZmdnaefAz7+X2f2/Of/5zvnOzy5rbB6ETGtaMBzA6SscjTgW4fCBw+bTAfE1Tkel+FN/IHYPcAbfeBQYnygOEARQjlOz//1gpKb/MkiJZ1BM03qkWUo8j4R6foKyyXdQbCtLeuMMeSLUc8EVEGRMlmUMTD6yqPuHGRWJTnDLyCNNGJCuglCB7ArE7oPbRkBCRNFSAOKjPFEKQBxPVpaBlIJ5QDwgHhAPiAfkPwvEfpuwci8wfxXwzj8B3nS5B4SFGgBmYasTfeouEF3zVwPUfAwwcMv82iq8icoQQM9fDgBBEOzDffhgn+WPcAI9Fi0wkLE3AK9fZL8uzcUVJP33xqI2PbLwC2DVdZpPVCZj5ROA5OscgfDrx7Pj6bPTIp54xx9ZW2E7RgSIZVuy36jdkLoG75i3/+xssAqaRlMqny3frtylfBkg3p/aGsdZQxTV0pSAkBEIE3f/TzKr1mavA7ZkK/D4AIDTMZIziOsbkZ5zlMfP/gbuuEeqwgDzG5A9SwHm1k8tLuxFC/CXNzGjtji/NWz9fgSwUnmSGAYYT4gkxZ/8Wtxg5a8eAQy1A7/7PfBrhxHMkEvBikHI2z2F5klFi+KnPDhVfSEqm9OeZEBBgdREgG06nHpOFM4hE9lapPu8ekVOqmYG2vLWUMKixWkQle9fyE13SvcqCJIHD36xpG0tbw3/50trYvn5JRRKMcULNsS19Rixo9jzyLj5sYYCUSc2RHwUpfoic1jkhGBD2lYRo1Q52X4xTRizyElla7uaDT1lzyO0wKsnouixhkPpr+spdWSbKJh+ybTnsb01/OF5gL42hRmo8A0r9/Idgmn83o9Z7UPWtbIs8xXySeOLNmIOCS7O2FRMbONx4x6GwBLltRV7tBt467k8Y4RA0JdqF9GCEFpFRx5kbpc2yU0nWK00SoaBvu3bAmpWSver99iLq97bmOh+nz596YsYsYWyZfUqxcVCNsZNkkOl0Lj0WY7BSiovVzNmzSOk0tTHBISs747CIKMgpwSnbVcNtjX/viYYNs+kwbCDfY16BEFJisq9mVEFttAb2wcy8gh4yymXW046yFu1M7+VqBs0iCl7QNRjCBRHMNplOZamGrSC974IIu3uKLfMXw+czlEymCEC2gKQwqj45KkSU89OHG8n/AH9fDFvTSqDJo8rxHAsWAdvY3aMpalyUZnJ9URpOiOhmvLRQeXUgIQ2CaIrF01lgD0g2mMsYtDiT5UFVc2hloTuG8p7CJARyKHNpgrNkh5Rz9+FURJTtYdI9W3A277Tv2M6Gg1vTgUreegxthcjD6bhEW3NoILX1wq886rul2prCycvECA6YQpOV4+Iva9S6BnvNV48FyDSrNSy5mhLrDdYJpXWkvA2CFjvWMID4gHxgHhAPCDFAMIny2aWBJCxCd+skgDSn5CWlgSQ32KBD0oCyLFEZd3E24qwu0COvZyD/QE7O1C7GybLKtylL4I5+r685mbvwr3gFoO0eeTzhLSstTt8AHAuOhCW+ZeFpgXD3+B0RIp3+Om30DiDb3zU8R/fMoP/TjTh2I2jForw34l/BRgAVFHkBw+sh8kAAAAASUVORK5CYII="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoyNjFBRjZDNjhDRjMxMUU0QTlEREZFMjlDOTRDRjI1NiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoyNjFBRjZDNThDRjMxMUU0QTlEREZFMjlDOTRDRjI1NiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+YVT9BgAABQ1JREFUeNrsm19oW1Ucx39Jmia9WdN0TTRt1z8r1TKd6LRYmBTcXlTE4fDFPW4g+FAR9jDBF5GBaPVJ9uLLxCcFEYQhKsKUTgYW5urUSmzp0kW7rEmWZNlN016SeL7H3XDz996kN/cGvT843PTm3ns+Oef3/Z3f76SxHV+IU6XNDyb3sMNZ1o6zNsxaF7XZumpAvMsOp8Xideed4u/EjiRRmoqUNwaEAXSzw8I2xWeiha8YQJiMNOWILGSL6zM3Cp9SgbbJaLPfG40PdihhGgTMtrKyAsdMhQvnHdlihMwyjMi8WFwzFUIGeQHqMNsAEjRaIfVAuhAnOgGk7cFKM0gnmAVigVggFogF8p8FabpMmBp4mZx2gcKp7+iuFDEPZLD3EHldQ5TKhc0FqWXdjj66T3iCNrNXVK/1OIdZC9JfmYv6gwDi8aGT7NVJzfdsrl6hnXxaXxCpcJfubG9Une9x9jN/6qn5nhJCE8i+3qPkc4+X/saDYUO906XzUiFLP4TfqLr36fH3yOnqodXEt1VT0bSPoLOJvUeqzgd7Hym9xicOJT5rr7NCplCIbJMDz3DVhOKsPt7ZLE1N21UDiSplChAYINSG+/8RWZXW736Ixn1HKSvF2+8j9exA4EUuT9ja7e/bPyJ7nCMU8DzG1DNGfuHBUuewSPon2sgs0i1xsf0gjwZP0YAwyV9vSUmmkC0eS/6Ifamrs6qCxLMhflxPXeIdy0HKcGeFE4YSVoZmpYrakp9uu7e0+qZyK6qyV0pdF5D7PU/SzL7XFPnHVt008WDgFPk9U3xhlE0NWjMIAhY650t+7m8u5XomSjGacB0ppQfrqR815baap+ZS+KymB15PXSApL/JRaCa51gzSzENbibgtqQaOWMs30AxZfaGcwyNvcjUopwrn5XTyz9tflCXGT428VbZE6DIi6CAuhriEp4fnys7XytShNiyYToegWvM0PTW/xc5TNPMrlycy/EY2ufc5rrZfop9UlQ+Vhm8nisv5txtedCg4R33ucr/AqMiSrlfDABbXIH2QLZ2L0NXoudZ8BBB4qLITJYScq2xRsuxc5XQpg9yunFVLodTI0Z+dPKdfzgoHPBB4qal7bmauskD39e7liwdBLYiWPvcDfIjltLGRwWfgO7gXzvrzxsd1izFNIMpSASD/wi1xBTUyBDhludpoWlsuJ+DAapG0Umm61jXyFgSCFJZ7NcO12C3QHSSZC9HlyDvmlpzYyJvyP99SR6gGG/lUUyDyNkQiu8ojpFZfkgs03WtfQCg/HWJLQDjI91EqlQGH1gKiSxbvc09wmcp7J20vJ5wOT83zfmGqFPjk7Qq0tjlrTFxi0VEsy8qxMmPo5X00rCnTQ6/yiApfQkL0zeqcahrQFIhyGwsKGu07zDuUcw7ZbqQv8/cAiJbIzqpmaJryEXn/nU8DC2Jy7gFDkrR066OanxiJ05hvtuSsGKHl2OcsFi23PiLKNQMLXjRzjcKpizUfqlxb0ACEHSave3h3U4O5x4hAnqIUbdh5PSDkrChZ65UlmkdEbaXVkng3clhrW8ICsUAsEAvEAjECpGin7o4AkZzk7QiQTcE22hEgF7y2hzsC5IzHNpEXbCPmgpy52Y+dkw8H7ccYlctc+TKY0y7yL47aT5BZClLGkVnBNnZtv+MV8tjGDQexVf5kYX4w+T47vC4W1yp+KVAwFuQeDP5FfZ61Yyj2yYDfTvwjwAC60lEN3F6dRAAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDozQUMzMEMxNDhDRjMxMUU0ODFDMEFBNEUwMjQ3RDMyMCIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDozQUMzMEMxMzhDRjMxMUU0ODFDMEFBNEUwMjQ3RDMyMCIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+BHkTQgAABP5JREFUeNrsm0tvG1UUx4/tsT02TuSkzsMqbgilLyRqCYqCEIWyQAjEq0jsKpasoYt+ATZVVKkSS1Ys+AKoSIgVIkFILaRSIqSqDVVJ05IntpVYsT3jsXv+44wzicfzcDweA3Okq1Fsz9zfPff8zz33Og5cnNmkgzadzif48iW3i9yOchPIZRMMIK7y5fJaJRJeLonEV9pRglSrB3oDwgARvsxsVYWpucIArVci1EvTe2RmoxKZmv0nSbLLozey4K43rm2zJ2Zz3kCoILuB+fmt/CDJNW8gNI9Mc0CGNqUweWkAef8hq8NrA8h4rxXSDkRAnugHENeTlW2QfjAfxAfxQXwQH+Q/C+J4m/DGRIREIUBzf8u0Wap5B3IqJdBIIkSr24q3IEYWDwfoxFCIFvOK5WePxII0LAZofr3afRBAvHcm5uiexXyRduR6d0FKPLiNYqs3BsUgRTmejN7TQ9gCyY4KND4Qav6NB8NOj4Sbr5erdfp6bqfl3s9eiqvxdHNZapkKxzGCzs493VpcP5fauxUj/nlJcjdYIVMoRLOpTEQd5S9/Vaiwq5pStQeqgUT1MgUIDBBW7v5/ZFa9ZQZD9GI6TIVyzf0YaWevT0ZVecJ+fyS575EUZ8LjwyEa5wA9lgw1O4f9sSrTnY0q3ctV3Qd596RImWQjX2zxFFQ4ZyCXzDyodDVYLUGWCo3O5lcaSUlLUj0PVjUIlyTXVeNXaP/+hIbiJ85q0Vbfx9s1S9nrpd4VkJPDAn3ywl7hAwm3KxPfPh6lCZa7XllW0LZBkLDQOWy9WFOl3M6Q7s8lIs3yYH7VXpFte2q+ub1j64E3H8tU5uoLXnBSXNsGcfLQTjJuR6pBIBrFBlpPVl8o59LZmKoG/VThda2cnH0o7SuMP83G9y0RXfEIOlgqKKqEP35e3Pe6UaUOtWHBFAWy3PM4npof71foz82qKk9U+Gb2CpeVUNsPi5WW7UNHU/PhKZHGEvuZURKgftVqWC15Yeo0AyxA3jmxFztrLP/v7pY7AwEEHqp3v5ZX9GAH7eB0mZUPjoLVzkbJLNC/eDXRvZoVAXhh0tm3GXc5pn7j/dGhYwQPglqQLY8ONKZJKxvNDOpC7OBeBOv3d0ptN2O2QPRbBYCo6w/DQUFmhgSn366aTWvH24kx9opVJh1zUNs6BtGOIJCkJpLWHeGzZYsp7AjkEW/Iv10oebvlxEHea890trBhN2gWU45AtGOIZVbBWlGxHUsZG1PYUbACQj865JbJocZJ40FlIKDtgHSlik+zpCFTbd1xfTshho2/DZ1ICs3Epx1XoLkWrPdzLEXOjvqqHCszXK+do2FN+eiMqGZUxBIKouu/FrtTBhgdY0FBZ8fDaodazaHZAlfueA+AmWSsCXTozKqdvzemIdQ8P4WhSLpxr9wcMa7wDBoKp2w60gTKphX66UGFlreUzj2iXzOw4C0ywO0V2fCh+rUFDUA4YRpNBA/nEYwOHoE8c+W6aeftgFCzYsvablti2yNWK62dwtssYP1jCR/EB/FBfBAfpBcgdSFQ7wsQOR5S+gJkPRWV+wLkxrFYuS9AroxFJSUV8dYrwSsrQ0W+fvXy0BaFg3Vv5cswlweF6q3zwwXySkH6PHJ+JCotvDWao9Go5B0Ie0XilmXPXHszlZcvpPL07FMlSggKBXvwf7cBk99OTHP7ACcL1IPfTjwRYAB3Gz7KSXQw2wAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo3OTE0MUJBQjhDRjMxMUU0OUY1RjlGMjU0Q0QxQjcxNiIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo3OTE0MUJBQThDRjMxMUU0OUY1RjlGMjU0Q0QxQjcxNiIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+FQN5ZAAABLxJREFUeNrsW91OE0EUnpatLVgaChaEivyoEYSIEAMXhkfQxFfxwhfwxhCvfBXfAOINwRhNDGgR+ce2QBsgtLUt9Xxjpw7tdn/KbrfRPclkYdnd+fbMd85855R6ni8esmpb6E8F6fCKxnMaURoKs9kUFRCv6fAil836zs/PGR1ZsVhkpVKpOUAIwDU6LBby+blUKsUBNNNkjyzmcrm5o2SSXVxcsGabt+yNN/CEUyBgnlgsBmKmE/F4269cjjll8MgC8cFREALIU0SH0wYgN5sdIfWAKMgTrQDE9mRlGEgrmAvEBeICcYG4QP5ZIKbLhKHhYebz+dje3h7LWKhjTAOJ9PayYDDITk9OnAWiZvBQd3c3Oz4+1r22vb2dj3g8bj0QgBifmDB1D0Dn83lrgeQLBXZ2dlZzPhAIMEVRVP8mgzAEpK+vj3WGQn9vUJQKV8R5PHRlebnm3sezs5xPO1tbNUthmiOY7NbgYM35G5FI5We88dbmpr1kRZgiQoQNDg3xt/yxscGymUxlaWyPGoSoHKYAAgMIPXf/H5lVthDxZyAaZRnyju0cqWcjd+7w8ITt7uzY75H2jg6esDo7O1lXOFyZHPbz4IAlEgl2fHRkP5D7Y2Osq6vrD0GpRi5QhCCX/Pj+3VKy6gJJlfePAwpjTCySVNPJChJelYiuQvs/EhrEj4JR3n1PTk91w14OdUuAdPf0sIdTU5XfEcL1ZOLde/d4vpEjSw+0YSBIWIXyDostH6Fcd5OkdC9kA792f9+QtjW8NB9WVgw9cG93lxVIKMELZsS1YSBmHtpIxm0oakBENW5gNGX3ReRMTU/zaJCXCucFL5CFZWH8aGbm0hZhiUcwQTqV4iE8MTl56byaUke0YcPE9Xo1j+mlWY/F2GEyycMTCl/LbpOsRLR9W1urKR+qDZ9OlHa3tzUvGhsfZ0HSI5fWlN6yIIlmtRoGYHFNVmqxn1E0ra2uNsYRgMBD5UkKVco9q9LPr14uLflgiqxGCiUtoj+Zn7dOs4KAo6RXzViS5OS+RjY2DAQPQrQgW4bKyyRko+bDiTPgDu4FWVe/fKlbjBkCIiu0UJm0iBxEkJYhwcnlqtayNlxOgMB6mbQ60iyta0QLAm7Hdq9nuFYvhzQEBAX5p48fnS050cgbGR1taCJUg1qcMgVEtCHS6TTPkEa5JAo0y2tfgJDfDrkFZSmWrToyuHQ0AMQSFY+6GGEqeie2lxNQ8WoWJm+IxCfaFSGp72Y5WaEpkB1lVY6dGa4XfTTsKQ9Iq/CMSlyCIHq/tKQbwqaAyG0sRFD/wACfUGgOYVDu+BsAYghAWpnVkB4R/XeY6JGIQgup/msd4QPh1B+NVsgKQBvr6+xEag6a9oi8Z2DDAwDsqGoPlfcWDABCh+nKekSsPf8ggHKJ1uT1AIFfIHu9ssSwR/R2WiPCW4uwblvCBeICcYG4QFwgzQBS8ni9LQEk39bW1hJAEn6/vyWAvOtQaVc6AeSlPxAoXnPYK96XB2E0ut6iNvE6SFo+M4F5QZp0uScSYU5FkDzrPJH2MxS338TnLJYDIa/8ojFFSvtNpLc3j3/LuE7yH/WLx+OxHYhH47sTCzSeoU5iTfjuxG8BBgDBa1HO6bfAUQAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo4RTc1NEE3RjhDRjMxMUU0OUI2N0Q0MzE3RjhDNTk3RSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo4RTc1NEE3RThDRjMxMUU0OUI2N0Q0MzE3RjhDNTk3RSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+SB4evwAABRlJREFUeNrsW91vFFUUP/OxOy0xNbbbbsuGEpUPfaFG0UJCBR4AoyIUok+gb/po7AP/gC+mGk18kxdM9EHfhBLQhM+FB8RibEM0sOWBFgK727SbYtiP2ZnxnLs729mv+VhmdjYyN7mZZrZz72/OPb9zfuckw43HF6F2TA4tP4OXz3GO44zhFMHjITYA8QVeJqRkMtQ9vwDhZBKEx4+BU9X2AEEAYbzExZWV0Wenb4CUSkE7h9Ei8XA6PdoXvwKcLEO7B1+2xlfio0ejvT6BoMElEglyzEzk3HkhvLgIfg2yyCQ6pq8gdCD7u+fnwe9BQAbDyVRHABEpTnQCEM+DlW0gnTACIAGQAEgAJADyvwXiuEwI7doJ0NUFRRTYmotiyjEQYfNm4Pv7QX3wEBQ/gTQUvmvWAL9xI6iJhPX/9vUB19sLysyM+0AIhLT/XUfPZBG0ZhBk7pSS2Syo6XT92/f0ACdJDX/TalShJRBhZAT4ocHVxXFh9uDLL63ez+Ug993xume7PvkYOPQn+drvdUfh2Edos9DWrfUAN2wAofw3vbF86bK3zko0JYZU6LttlLFGvnoV1OVM5Wg8Zw3FCiNNCQizAoKwMvfTEVmr3mLdOhBfexW0TMZ7H2k2wjvfZPSkIU9Pe28RLhJBhrwI/OAgCMPDlc2ZI9+8Ccrf/4By+7b3QMLvvA0CHgFz3JUV0PJ5FksKl+OuOqslEPXu3dLb/zXDNtaDVNud9UmdMFBoT09AI/EDOPXsq96/b0l7I9VdASJs2gTSB++v5h+kcDO9GnprHwjr17PEWGGeBWjbQChg0eZs0VSKUblpksRwz5dlA8mDIlLejsi2fTS5E9/bWrCIIkjL5pgVnKh820CcLNpKxG2JNeSIjXyDZluyLzFHOnqEscF4VHRfl5PF+JUqYSx99GFVinDFIrSBgrmHKCwdPlR1v5FSJ7ZRwuSwMrSqeRwfjfzrb6DMzTF6ksI3Nff2bYxthTNn68qHOmsnEglt7U8/m0uBgweAj0arH0Sr6JRuVsMQWBZzUD5UYkoyCYVfTrbmIwSC1buGTYwgKlqlVkLUHBdvIh8cOaudQsnM0bsnPnOxG4AOGNq9y9Ezyq1bULz+hwshHhcitlC05GOx0tmXZaOpFdBnyHfoWXLW/NTppsWYLSBVKi0WK8UFzD/EINOmDgU5Q7lqdqwtlxPkwFaRtJZp7tY15RYEBSlK95bim5iTy7kPRL13D/I//OhvyUmNvNCOHS1tRNWgmU85AqK3IZSFBRYh7fqSXqC5XvsSCOPbUWzhX3i+1GmsYQaTjjaAuKLi+bVDjKZ678TzcoLr7mq8SJk9FPj0dgVvwwotH40ydwfyZT1qzMxker2PxsTT+MFSREVfIkGU/fobSxngCIixjUUMErdsYRvqmqMioGdn2W8EkKYOyCyy2tIjev+dOSYeg649Slaag8KpqYZvTMJJfGWk4qwESL5wEVS8tmwRY86ghFdE6Ve88WfDRY25hSYBog4TPzDwZEfDkh7pTqSntrRkunkzQDnSrOg/zcoS2xaxyrR2hDeYOGzQlgiABEACIAGQAEg7gGiaKHYEEFmhrnIHAEkV+iMdAWQqOzzcEUCO5aNRpRDx1yr8sQfP/YvXbzNvvA5qKOQvfRHMRLGn5/rS2Bj4xSBjHBkrDPTPpvfugbyJ2vZqcLWfLEwOLX+Jl09LXwrMg5RMAd+GLwU4k28nJnG+hzMKbfh24j8BBgC4JERNv0iY2AAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCOEJCMjM1NDhDRjMxMUU0OTU2QkVFNzk3RkI1RDFEQyIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCOEJCMjM1MzhDRjMxMUU0OTU2QkVFNzk3RkI1RDFEQyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+dzo+mQAABTxJREFUeNrsm39sE2UYx5/qMKywhB8zY1nY5n5vbgm6sCkTlZhlCShmBWKmS4jyB8QokG0uSGREhAQrM9kwwyHoCMNphCkoS8iiwJAfljUQtnU/6pRJ2A+tLlpoTdxS7/vKnbf22l67Xq+Be5I3116v937ufZ/neb/Pm1ZX2mEjdzPGj8/mDu9wrZRrCVyLIoUtSgJiN3eo6O7tn3HRZCbuSLY/xmliYiI8IBzAA9yh4+bIaOGBw5+RpW+AwmniEenoHfix8N26BnI6/6Zw2313RmPP8OhYobFunyoQDASO6XK5Nn/4STM5nE5SyzAiRs4h7++3DpKaBpDnLl42k9oGkAU9vQMRARKFPBEJIIonK9kgkWAaiAaigWggGshdCxJwmVD1+nqKmTWbPm/9iiwDg+qBLCksoKTEhdRvtaoLImVxsfPpmaeK6Nuz5/1em5aSTEkLE+jTYydCDwKIyk2vsSbXAD1m+z20IH/+ZaehX254nH8wNpb0+mjJz8QQskBeXLWSMtPThfez9Hp2fPKJIuG8/fYtMpSv8/hua/NB5k9HvzzuMRUB+wg6e3Z5icf5xfmPCK/ZE+9tVNZZEaaIEN5Wlz7PnrLli2M0PDwiTI3iUYMQFYcpQGCA8Dfc90ZmFVtRQT4ZVq6gkbEx2qO0j3iztS+VUSyXyGDftJ1SfkRyMlJp6ZLHKCMtlXJzsoXOYWc6vqfvzp6jk+2nlQfZWrWZsrMy2Wsbl4QcDidLUoeOtITXWa92dVNvXz/V1n1AJYYy+s1mU8dZp+uEmkK7dxIaxM/8eXOE1fdqV4/fsIcMCCnIiuJltGPbm8J7hLA3mfj2lkrKy81hC+P/kdcTmqlBwkLnaAjlfR8d9Hrt8OioAAF5sP/jQ7K0reypWb+xQtYNG5uOkN1uZ6MQiLiWDRLITYPJuEFFDRxRyjfQwhK+iBzo0Mb696fA4DzkJFqcaFGENe/fyxq0b8hAoLy7ui1s0dtZs3XKeSmljmjDgqnnQt5fzRPw1GzfXUuXzVdYZPh7yrI1BhZptfUNHuWDu+msVqvrhVde9XlR7c4aeig5aco5JLbbDofPGgawABGv2D9fH6LKt3YEFzWAwE3FnYghYFLywH26xEluWlJRTqHky9HbWltCp1nhgOvWlgf0nQs/mOhwy9HpJzTcCNGCbLko72E2xJCN7tPjbvAjaFx8F84KleetGJMFIlZpAIFdMnWyCPJlSHDictXXtAZdTqSmJPvNpLhGsbqG34JAksJy789wLXYLQg7SeeUa16rVLTmxkVe2ZlVQHaEa9OVTAYHw2xAQR4M/XZftS3yBFvLaFxDip0NuKch/lO2juEcGHFoOSEhUfHZWBgtTfu9EcT0SExMjeX5RXq6Q+PjtCjTFnPXchUuCHhWvzBh6hCkSH9aUmi1VLKPCl9pOtVODocmvDAgIRLyNhQgqXvY065DXHLy1nz7DPgMg2vKSYgbkK7PK0iP8/jsMSYzXHjCIpO27jJJPDOEECN5ZWSlyoInOm8zBj4h4zcCCZ+o0U+uJk5I3Fa8taADCDlNSYuL0pgZzjxFBeA7duOmzc29A0KwoWb2VJbKmRtuW0EA0EA1EA9FA7hYQV/TMmREB8s+8uXMiAuTXLIldQjVAvn58cX5EgFTnZmdOZqSlqAtSPTL3lk6nq9/wcjnpo6PVDV8OpiIhfoHpjY0bSK0IEueRpTmZ6dd2baumHK6EDLfp3P+yYIwff487bOqy9M3Ab6G7Lf/9U2BycjK8IHdgUE0ZuYat5TgKw38n/hVgAD8LRHyYfMFjAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDowNkVFRTYxNzhDRjQxMUU0OUJBODlGQ0ZDNUFDOTA2RSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDowNkVFRTYxNjhDRjQxMUU0OUJBODlGQ0ZDNUFDOTA2RSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+a+IidAAABSxJREFUeNrsm1toHGUUx8/eb4mbNLG7MaSJxibE+KANpGIUtbT4UKpGKVjpkyKKVCxRgvrgg76Utb5YqfShkoJSwXuL4jVqNBqCUQnVuonFtKXsJibZbrP3q/OfzQyz95nNzs5i58DHZGdn5vvt951zvv/5wmiGJ5Yp11xtvgbm8ArThpnWzjQ9yWz6AhCHmMOI26MzzCzoye3Rki+opUSqRiAMgJE5THj92u3vThlpzqujWppwRCb+XtRtP/qNiSJxDdXatOujcXiRGYk3x5WBYEHgmGmig2//ZKRwTBkIbkRcjGPqzi3pSEkDyB5Eh9IGEOecR1sXIHrkiXoAkT1ZiQapB1NBVBAVRAVRQf63IJKFyBM7G6jBoqWPp4J07t+kciCDfWbq2Gyg+UtxBiSsHEgha7FpaajXRJPuaNlru1p11N6ip1O/hqsPAogDw010QMI9gF4JpqoLshZO0cWleN75VruOLCZtwe+EEKJA7ttmoa3tBv6z1ZQJtKF+M38+wIA8diS/hj7+dCvjT1r6ZDKYNxWSfQSd3Ttoyzt/a4+Z/xu/+NjXAXmdFWGKCOHs/iEbGzUffL9GntUkPzWyRw1yhTBMAQIDRLnhvjoyq9AGOg20e9BKi76k/D5SzPbtaKQWe6Ze/mI6KP+IdF+ro9t6TNR9nYH6Ok1857AfZ0P0w5kIfXs2Kj/IwQfs1LvFlElC/iSFoyk2SZ0cX6uts575J0buC1F646PLtO/wEi37k8o460adUFVoV09Cg/hpsmr41fePi7GyYd9q11UX5J4+E73wyCb+M0K4mF59ds81dFOXkV0YOSsHLXpqkLDQORpC+fhnV4pe611N8BCQByc+94sS2aKnZuTYsqgHvjMZYmRBmh0FKSpfNIiUh1aScSuKGjhiId9Aq8nqi8hxPbqJjQbhVOE8JyfHxgNZwvjI4xkn/2omXHKkJI0IOvhzIcYuei8+3Jx1vpBSR7RhwbSatWVrHslT89rpK/TbXISNDCj8UvbgHTY20o6e8ueVD7mmmZ+fTz91wlbyopf22qnTacg6h8QWiqZK1jCABYhwxT7vjdPL7/kr8xFA4KHCToQQsExn2ZGVO13CJLchZxVTKJVy9JPPO6qnWeGA+3c2Srpn+myE3v85tPHwxYMQLciW/R2ZdQSyMXd6cg1+BI2Le+GsUHnFijFRIEKV1t+ROf7ijrARVMqQ4ITlaqlprbic6HLqy2ZSXCNbXcNtQSBJYbkvZ7g2IKI2lgwyeyFOo2+tKltyYiPvobsaK+oI1WApn5IEwm1DQBwteBOifYkr0Kpe+wJC+OuQW7bdaGL3UXIjAw4tBqQqKr5nfVeJ2zuRvZxotBT+l/3N1xv5xMdtV6DJ5qxTc1EmfC9nqXKszBh6bh8Na8pze5vZjApfgiAaO7RYVgZIAhFuYyGC7r7FynbIaQ7Ovvs9xH4HQLRd60ClMqsoPcLtv8OQxDjtAYNIcn1YWPhAOO0asPDOihEa+3KNZs7HKx8R4ZqBBW/GHaJPp0MFHypcW9AAhB2mLY4N6hHMPUYE4XlpJVGy82JA0KwoWYuVJaJHpNxKK0Z4rwTVbQkVRAVRQVQQFURZkLTZkK4LkHiTtT5AlrodqboAOT3QlagLkNHetmTyhs1JZUFGPc0Bplp5ff/tMbIY08qGLwMz4rSnpp/cESWlIkiYR+7c6kjOju6OUI+z9tOkyX1lwdXme5U5PPMX/6aAjnxBDSVTNQZZh8G7Ey7USExzUA3enfhPgAEA/+NK7vUioakAAAAASUVORK5CYII="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoyMEVBQkNFRDhDRjQxMUU0OTZDRkZCOTdGNTQ2OTIwRSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoyMEVBQkNFQzhDRjQxMUU0OTZDRkZCOTdGNTQ2OTIwRSIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+iNh1SgAABM9JREFUeNrsm19oU1ccx79xaTU1HVNrNGQd66oFoatanVVowYfhNsRB3J4mjDmfhGFBxKdBGSJI7IMT2faw4RgO9jDnw3COMrau61ZXOy21DLYYFNdSiZUWrUs2u9bz3cn13qY390+ae2/G7g8ON7m5955Pzvn9fuf7O20C8Z5x5FsiOhEWhyOixUWLiRaEwxbUgTgmDgdD/bcrqrtGII4I3sog8GDGHRABUCkOPZU37rWsPDqI0MA43DTtiPSELo+3RA/0YdH9abhti3Kj0Vl5Y6ol2n7REwhaIJlMhjGLySff7HlsyeAdeGUckURVf9pTCAVkV7hrFF4bQVaHLt0uC5Ag80Q5gDierCyDlIP5ID6ID+KD+CA+yCM72Qp8/DzQusLZcsLUtq8F6mqA4VtA7x0PQfSsPiRKsXrgXMr82qbl4vplQOewAyCE6HhJNBv3EDqVKTHIRBa4rlOQRaqBpYv1P0tlbI7IoUagcbX6ng+m7WhQz98VIE2n5987tFf4k7j+k0vzpsK+j7CzVzbOP7+tXn397zfuddhZP7wiI0Sx15+TUXO6D7g5qU6N41HDENWGKUFohDAZ7v9hio/HgLM7Zbb1LKHtb5XhSTt7xQUQrikv1gHrVgEbatXOaV2/Al/9Bnx0zQWQxAvAszH5On0PuP+XzCXv97rsrAM3gaujwDsXxBrxgYRxwMxH5EDvfyBqfBAHzXoeofiJVqmr78Ux87DXhnpJQPatAU7E1fcM4UIykXq2uVYujIqZQVueGiYsds7GUO78tvC1I5MqBOXBu99Z0rbWp+bVT62J5bcHgMmsHAUb4to6iB3FXkTGLS5q9Goa+gabK+HLyKEO/XzPXBiep5xk42ut/fSabNS+JQOh8r78hwzh916ee15PqTPauGCGF5vWPPan5o1vgL6UjAyTb4l9W2SkdXTNKx/yjX+dmF2z8ZzxA6nC1kbmnuOosBOjGoawvEa7YifTYgrPFxk1hOBDtZ1oIRStgjyJkD9d2iS3IKlooVAydPTBt0qoWemA7W327ulOigz7SwkSGh/EaGG23BqVQ6zIRiOjz9B3eC+dlSqvQDFmDUSr0ghC++GajCAjY4LTlqsG01p8OdEQMc+kDREH6xplC4JJisu9mfHau1kHQLqFDOj+zEOFRmNpuXdbcT2xGjTwKXsgyjYExdHvaeu+pBRoJa99CaH9dswtbU/LfZT8yKBDWwApjYpfn9tVUvZOHNcjTyzRP7/5KTXxKdsV8ZiDzvr1dVWPaldmDj3DlImPa0rnLplR6UtfDAFHTpnKAHsg2m0sRtDORtmhojkUOz8sPyMg2+4ckEFmtaZH2OnjuWlprlW1B40iaf8F/W9M4bS7SXVWjtDx74VaG13AiGjXDC54PwqAM4O6D52ztrARiDtMzyxUj3DuOSIMz9SEceeFgKhZWbIWKEusj4jZSmtFeBs4rL8t4YP4ID6ID+KDuAEyO7M0WBYgD6ZXhsoCJJ1tXlEWIF9O7YiVBcjhP7dE/smuX+4tyOGxZVMI4GS6oxkz4Qpvw1fAHPy7rrp/7MRWeBVB2jzSltlUMzRyZjsym2tcBwnk/2QhEZ04Lg7tVT+nK/i/0I9+KTA94y5IDoa/nUiIxn3uVXDhtxMPBRgAqkCgV0AE27MAAAAASUVORK5CYII="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAA2ZpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtcE1NOk9yaWdpbmFsRG9jdW1lbnRJRD0ieG1wLmRpZDpCODE5MjMyNEYyOEJFNDExOTlCNkY0REE0Mjk1RUE2MSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDo2RThFQzBCNjhDRjQxMUU0QjU4MThFMEJGNDM2OEE2NCIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDo2RThFQzBCNThDRjQxMUU0QjU4MThFMEJGNDM2OEE2NCIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M2IChXaW5kb3dzKSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkE3MzAxMjMwRkE4QkU0MTE5OUI2RjREQTQyOTVFQTYxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkI4MTkyMzI0RjI4QkU0MTE5OUI2RjREQTQyOTVFQTYxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+LrQk8wAABP5JREFUeNrsm89vE0cUx59/ZW2wA3Hs2KlDogSoUKMIqGhTlf6QeqiqVqByQdy491Rx4B/opYo49dA7lx64oUoVh6qHVKhAWyht06gyhAIxCXa8JHXAjo2dznfwWBP/2B+212uVfdJoHcfe+eyb73vz3kRxnZpfo3qbG30SZJcv2DjFRoINL1ls3iYQX7LLubVC2Zd6WqZMoUz58jZVtnsEwgAG2GV+s1SZva2WiIFQL032yHy2UJm9ltmi51Y/fhNzV71xAZ64bhMEB6kK8/Nb2SKVbIIQHpljgvSoWxWy0wByAtFhtwEk3usIaQXiRZ7oBxCq2M/xAqQfzAFxQBwQB8QB+d+CmG4Tjk+OkOL10G+pLKnPivaBHBjZQ5FggB7n8vaCNLOAz0NTwyFayuZ0PxsOKLR31wAtrK53HwQQH02Pm/oOoPOlcndBCuyGa5v5hvdD/gGup2a/kyEMgUzH91IsFKj9jBvDXmVaEe9vPS/TxevJhu+enT1ICtPTL/czDUthWiOY7Oh4tHE5ontqr/HEV++lrRUrwhQRIuzYRJRHzbWlVVrPF2tLY3nUIETlMAUIDBB67n45MqtsicFddHgsTBvMO5ZrpGWq3x/n4Qm79SBjvUfCLAtOhkMUGwzQ2FCwNjlscUWlvx9v0F0DGbVjkA8PjVFiKMhf5wpFnjOQS67eXe2qWHVBHj7Z5Nc/Hql8YpGkei5WLsIOhehUaC9HQkPxgyF235V/n+mGvRzqXQHZzwqfT49M1n5GCLcqEz84OEr7wkG+MQrTgzYMgoSFyfmWz3ZihHIrQ7o/WoVAefBnSjVU2xpemm9+Thq64a/LWSowaHjBTHFtGMTMTdvJuG1FDYTYTBsYPdl9ETWnX5/i0SAvFd4X5eRP/6R3FMZn2OflLaIrHsEED9VNHsInZiZ2vN+sUke0YcNUGKhez2N6aX5IrtBSZoOHJyp8LXuDlZWItu8XUw3tQ725ksnk9uX72nH+8WtjFA3t3HHhFRHSrXoYwOIzKB+EZVj4f/fXcnsaAQRuKk8iQ4hapd759csV0SgfTInVSKOkJfTP3pvuXs0KAb5zIG7qO3fSG3STJbqOwxc3QrQgW46yyh0uFmWjlkEz0A6+C7FeWXjQshkzBCK3CgARcIggLUOCk9tVrWVtu50YYQLWy6QjoYA1mVU+gkCSwnavZ/is3hK2BfKI6eTSzSV7W04c5L01FW9rInSDWpoyBSKOIVKs10nn8oa1JBq0rve+gJCfDrllgukF5yj1kQFBGwHpShUfH3xxqiTOTixvJ/zVCr7e9lWfGLlFHFckqvnGErHeU3NUYNlRrsqxM8P14hwNe8onM+M8o0JLKIi+nl/QLQNMgcjHWIig6VfCfEJRc9QyKJscvwMghgDqOLOK83e+DOFgrfaAoUi6srhce2Jc4RkMFE4zEhBe/3hnlVJN+hzDHpH3DGx4ALi9rDa9qby3YAAIJ0yRUIf1CJ4OHkB4rrOlSRno3OqBULNCP63aEsMe0dtpjRTeWoJ1jiUcEAfEAXFAHJBegGx73a6+ACn5Pf0Bkh5W3H0B8m1it6cvQM5H/Z5y2GavuM+vDOFvZV8dGR4gn42i5W5gMOdCPveNN6MK2RVB8nq8G/G7f38/rlDE77EPhHmlyMbhoM994XhMKb0dU2gi6KXdXhf1wkkujf+dmGPjJBsx6sH/TvwnwAD/lU0rvEJ+QAAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAtNSURBVGhD7Vv7d1TVGeU/6Oov/VP6Q2tba7WiggJC1SREqIgKy2ptRbusRRfV+sD6qCwRq9QnQgjkOXmThDwIgQQIz/CIQEhACIE85t577j3n7u7v3MwAYR4JWqCuzFqHzNy5c8++32N/+/tmmDZt3CNszP+laViw1lTNrzcVec2mKm+7ieX+z1cSRliV9xNTt2CdqZ0fR9V8oDLvui4LJGwhiOr8Q2hbZDcPY7nXfVkgdEGBgAjLrz+AxE1PC2tzf6FjOeZGWSIJxFTmfSUxcSPccfme03RlbuuNtoYAmqaZolNALsvOKYuMT44pi0xZJBthTsXIVIxMxUg2C/xwYgRlc4HSuRRSOWnFFMof4jn3A2W/m7DgmhiPFN8HlMyBieUhXvssnC3LYCryLwNDwV06D2HRvXZzv3IRRuueh1fzFHXwQxMCM03HMgmjHGgK6vjutfCbX0IQy4caHYSrAvhViy/dMe/ebX0Nzv6voSofgWr8C5QBnN4OmOLZ3x0IyubBr34cGoD37X6ogunQI2dgAhdByTyYzffAlNzPvzPhn2xCwPNU2XzoigUI+dycbITZ8GsYWkoX3Yew7AGCSt2uZLAIP8CNvK7/8JJAcOE4guO1ML7LHQKEfS1QvdugDhbYTYKLp2C4e3CiHkFfewRk9FsYvtanWuH0dUK3rkBIF6fKqPRA6Oug+jFo9yL3VQjPH0I4fBIhQfAfmKGT8IZOwxyrgNv0EugJewwXjyIc6bNA4I/CXPwGzoU+BKNnodtXIiyeNTkghsjdjg8tEG/XR8DXP4PeOB3+8Gm7QShW2HgnTOFdcPcXwL94EkFZLrD+FvixBdZN+kQdwnU/R1B4N903gyDuHQveq92T0SJO/fNwuz6D2vEOVOvfGZCvQ3sjCLUHtX0l9I43oRqeh1O5BG7nGgTtb8Jr4Xkdq61FgrP74TUth2p7g+euhM/riJXD8gevskp6IEUz4HSXWZNrXtWaXp5rjXDstQSxe6QSQy1v2YAWKyTPCxhHY699Pkm87zf9LWWcpAfC/FfVS6Dr/gC3eYXdwD+zG4pmD0f74DmjcCufgBdbyJRdZM8Ta8h56ngDz/s9UZ6HH79g+cRseQp+zVJLB6nIMGPWoPwBoPB26xrZwNu1Bii4FeFQD9zh8xht/gdU3w6YyoXAhl9Bda6Oztv2OrBpOonkHNQIg5RxFG5iLHV+hKD+TylJLiOhoXQO/BJmDwPRGI2g6lF7RwHTUtwj5vZHzyEonks25XveMF8PwC8lmwpnxAmCGSfA/B1vW1epoT5octB4xs3gmgfJno9B9XdEFziwHmHhHTb4bKwYA4/HFDd1dq5CwCC2biEDgyVBl8zlZzvtMfm8LIk1d9e/U6ZwaiC8G8U79EYGogscLYEp+I0lOGFRySRvy59hvr6F6Xunfe1f6IFqehHhBrqOTBpuvhtB6QPwxRUtL8K0vQK/YRm01KQUg6AM6TuPJn0N7p5P4bLIud1F8I6UQx2rhHdoE1R3MZ9XQR3la77nHKvlsaLomF3VUIdLabUNcBnEAiy0VTv1MCgzxcsHC26D2v3RpVT04vCVM7ZYc+KDXAMInIs85l56j+cF4heJoyOlBEJCyzASy1J9iV783fGevWDY34agaA6C8vl2+WU5iG99mXf8ClTNk5ZZE+8FRbMwtHONjZHR3Z9G7vruQN6Nbq2/lRecaZlRUluzxHuDx621vD1rAcaQZU15v+gejOz4wAJx9nz+fQBhCd+1KgJymlV1rGhFEuEJm56GZdeve5riaVZSvYGAhzs/sSD9o7HvxzVuxypyBKuwFDEROmRdU/hbBu1mi887RVLbdLdlTq9+mdUqId93ap/ByP5CxLex6qbJluQwL7NCi+hYZKEme3ICTZPPtHEiOiXBDYqpHBTNhjvYa+uNf+EElGQW0zUQVt14B9WciKL049OMWaPLHqLomW3lntylJbgd70Kd645AiEs6/mXjJog9DK/9baieGmjnQrL4WVB7yTO1T8KUpxfc6YFQhw63vAG3fxf0wEEEUjPGGNLWE7KmanjOysDoTnPIqLMs4QUsA17LCqgTTcnPOOeOkeAkkFOL6QyExqyoWoTAHYpKuDPI6tsV0TrFcVA8xyr7lOaWdmIMlC9VmeTmtb6aViZmH2/Sr5IZfs0S3mWeTVdxg/Qsmfqay8FJD2TIKWGp9Djp5/zZ+xpqV9soWZOm93G0OTeSc4VrSITZz78UvNmZVTa3QZZijQ8+pqjX+AKC/V/AE+kobcX302DlQpc9yCCjJhFdMm5pvr4iEyR7uj6O6os7Yuk+lT6dXPWlqSVtHSp0NXCMKr0X/lB/cqnzPfAGjiJofC5ZVUWHxNvfi9iUwlkXTzyWMruGqWh6G+0dBiPsYc53wwwegT5/mBUwtNnkNP6VwRtljwWy/b2IQ85QFE2w3YyyJkPvKxwh2kIuLFU2pA41JbOhWdz0ABsuIbWty6L6I9lEUTTa/Hqk6NgFmg23RYGbWDaAJ61H6BoCcQ5XRBW05VXLDWGMdYbpa60iBFfPjr9uKZSIpn1fwe9lhRYLDvUiPPAFwu4N0Tq4DmbvWvbFnCKkCOCsFkkAiTcut4XMiD6hIhe2tUBqlyBsfSnZt9iD4x4JzeqLiGLZuCYg8cMs4eICNuHmbBfMuX38uxehH4+ANFKTMrX9Lc+wl1kIhy2DDVZpMyR9qx9B0PKytWpw7iBFdeoAzmoRASIPd+gM25QeOAPHuXqgfS8CsvVZS93CoNh8ly2KFsg3NZQK0218+JQGcszp77KVe5IN1pUxMtL4Mpvuu2yNUYUzEDBYk0ASopgx5Xd9Yo/7h4uiICVIGfLYTpGtSbpMymqRK4KVCuyqYBWLJIHMZP9TYIGoLtGpAmQ21DYWPAFyYisTIHXmTBhIvOFFYKOk4wyrzjT9Pd4iNt2/oYoTIO3vRPWGy2//Z3RMqCCNiM4KJBGs6jjHUAe+hD60HvrAVzBsIcYD0XSbMKrt+La+QEsxHghEdbwfWaR7c2SlSTVYQvFEnwhWkYASnmps2YlQIljFNVTuotKk/9XaZ5+8OOp/JW72fBxZZN+X1w4kESNDbe/DiT0Cp3op4hWL4Q/2XAFENIoMbWyaDh6LGm2p2ATi7SWxiUWkUbtW1ySAuE0rbK8iA18ZQemB7sgk9U9GsUCTyx1HGVNKa465QCryoYIIyE4WxDQ0nzVGklkjQFhrZEPNxlvIyTZWnR9woPN4lNacLNoNL58K8XzvcFl0vI11aHLDvEhpmfW3In6wOGob294he/4R/q4PI7KS6aK4QcAwQ9xmNtoSB2cPWVBJ0mLPq3sqkyOLyY03ZYxNynZpakUdIg+ZoSbnY3J3pHzVU0tzv8tG6mloGd4IkGZWaXKHFUTCIYWc1fZFMxafoju0evfqCpzSNeAFpB0Ya+btgCUY7rejBm/7W2y4l9rZh8gCfzMbqzP7LFh1soXtBNsLfl4YNjy7Bx4tZKdLXNIJpvuiIHWMsExrpqJ7eg/nIsXwGtg+sGraAJQvCMYEtQz2XCo1G6CsrD7n8NITyzkeSSxxI2JNmT7KuDydoM4YrJp3LVxiFdhVGiL6wsBvXs4mbDf7llfGMoLxRbdoztukB5JMkamRnRRlENJZ2ols7QPlIUHaLwiu6FsEDHVHoq3I8v1O9gZrwj96yg4421dr2RusCYP5bj+UmgLyw/mWM1vQXev7UzEyFSPZYmcqRqZi5P8oRmI5LTfHz0irbhIgpib3s5vip8Z+bf5PqT2DG+2e6OfolXmfY/ujN/bn6AIETTk/NrX5nQLG/i+B6yQPL0/p5H9ZQM3iH5n6h1ebuvwz/J28rytytV2x67P+C6Wsuf7mtj9yAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAuKSURBVGhD7Vtpc1vlFc4/6PQLP6UfWtpS2kKCE5JAVtsxoQQoYSgUSghDacgEaMNSAoUBQtuULN7kPXa84zhOnMWJF+27JVmyLUuyFkuWbdl+es65lnCwtsQ0Zjq+M3dk3fvqvc973uc8Z5G8YcN3js7Rkl+0e/afarLu6zpvLr7caCm+Xm8u+p+fKRit1uL72hz7y5rt+2KN1n0gEPf0FCCtY8X3NQ2XGHsCB3DeUox6Y9E9PwUImV/VM3kAdYZ7DyC56A3NzqKf1xsKF3gr1sISKSBNtuJS5kT9GlqDwWw4byrqXWtrCJAGctF1IMu8c90i3/XQdYusWySXaq9zZJ0j6xzJZYH/H47UGHaAz3pjYcaMrs64l8Y8jlrDrryzvrx0RKXbimrDY5TFFaPV8CrajIfQYCxZBqYYNfqdUOkeRa1xF84bDqDNcBjNxhfBoPLZphxACiWhvuU8hW77WwSkBNHpSczOJNBofCa1Yl59j/1vUHvKCcRT6LS8joUEMOK/ReC2rx5IrWEnmoy/Bx9jIR3ODW7E1Mw4EvNxVGt3okJbgGr946jUboEj0CPjanX7yFr75W/nZDfODv2K7j8KlXYrAd9NoNIn6VksUkwTFGDA/R+ZdDLmgC3QgTkCsbCYwEj4ClyTV6EdU9G4rQjF3FhcBIYDXXCHbshnojNeOIJdcIV64Z7sR8/wMQJOW5ymbsoIhInWZHoW07MhssAs/DEjwjMuLJDNF7GA0LQLoegYrP5mdFnfkgeH4y4E41ZEZjzyfm4+iuD0MCanPARqAlddH6BKt+3OgFQR8j7nl4gTkJsjX+H00E9Rpt6IyPQY5haitPdbUTr0MMo1m6DxqMgiLtTpi3BGfT/q9crWDAc68fXgz1CufoS2cbOQWSHvyu3JapF202EMuk/jmvMELtnfRo/tOGbmpjC/MINexwe0wvfRYTmMRv1B9LlOotf5PpH6bdxwfiFAvBEdWesIrjjeE2tcc50QK9cZ9qywSkYgKt1mGMYaZcLFBXmRY35+Xrig3ABM3hZcsnyovF82LpEgt0kOm1fG8nHR9pe0PMkIhE14wXgQrZY/oNt6TCYZDQ+iXrcfU7MeTMejZInn6P2TohutZhpne1vG2f0X0aD/HeKJAGIzQdGTVsuLaDY9v1RfrxTDrF5TZ9yNMu1vZGv4uDlyEufUDyA0Y0ckGkC35a/kDX1oND2Js+pfom9E2ZLL9uPEiY2YTvgwNT2BMuJRhXYTbrq+Qrv1FQKzUuSyChqrabV2FzGfvWWeROxpIeRU3Jvanqm4D1XaHXIvPhdBNO5HjW6vaEZsdgKJxCwuDx+nxXwkIEMxD6p1O1cobuatIUI1GZ+FJ3RLJtCMVtDKfosu2xGFDgsLpCEVqNHuxXXHZ0Ji5sFF21uo0lNI0O2AJ9if4gn/wVy76fpnWhdOC4RXc97wNCIxv0xk9jXQlvxalJRVdIA8qc38J3Lp+wncwxgYOY3JqJ1Avklb9IC4aYX2EQKzG+2WV9DteBNXXO+Qhx0iou68Mx2pIXm/TPFjwP012kyHYBivIw9pgmWiBbqxGhi89fR3q7znexZvh/Iq1/hsg9F7HtrRSiExA1OidvpmUFayVut3kCUexC33VykTz8zGMDs3vXTGySsmSTX9osCzc/HUPR5HtJKDAVWSHGSLwjnTgCpS0Bsjn8iE7vA1qDQUK/TU2KGzVleILstRXLK9Qy76ghA5eU+l3Ybr9pPyuX7n17Jd3wOQj2XCkUgvTbiFGL+Hzt0UUbcjMOVQHuY+RasuWLq3Byp9Aa4Nfy73BkbOrB4Ir6TP/ZlM6Jm6kWI8c6jJ+JwExMWFRRK0l8hbtgkP6ih7q9JvQd/wvxWyT1wgkq9ya1Tkijecn5FGhDAc7CQg20UDyjQPQT9eq1iKRK1c84goZ7uZPIN0olz7EFqML0PjrsZl6wcEML23pJp5uVtXhZIWsnryhyppa1Tax1J5CmsDu7JKsx2TkREBFow6CWSNuGuZepPoj5IUZW6fZvWaWv1e4QFbgVfJAnfN+TEmIiZ54ML8IhH5H8KbBsMTuOr4CFZfO3lQUO4LqJgTg57TZC0isyFzwp0RCKeAl8zvUSwZwMSUgWR9IhVBhS+kmh2W15aRkHmxTQSPveeS/ZiSPi5FXV/YRtLPJE+fTGfJR3bTdhygxCgs4Z31YiysFqnvtL4u25Mp7eOHJUG1UFRmceuxv5txfM72Ju/rBdNzdB6kVRbLFjFH2FrZ6prlXGAvUlF6WKPnGidzdzunoHHuyqdi0sx7rDyc6hsaK4Qmb8s9/lvy5gTCk7EuZDpvX/1OqmnegHrsLEXZk0Tg/d9XgVWEWv0e8phdGc/lnsCWGPD8S7wlPjslcs8qvOpKjznBGbo/YiM3HEE4NkqJjXL6p+zwRaxE3NdSUZXzkKt2JS5x4lyly59LWbeG9cEZ7JaJI/Ex+KMmBGIWejWTVyp+2Wn+s1IXE0cYSO8SkNFIv5A7H2vk9BqOM5xbKA88SrK9USYv1xTAFzUq122HJP7wtnA+2205LtedgatUbj4o15Mn10J3kY+QnFPNahxrlom7re+iyrBNyFdF7uuPmeV6u40ydOvzkjSpR0upDO2V66HpEWi8Z6H3VUI/UQndRBkGx04RgUvSEjjr1iwH0mk6Qhn9Q7T6rZKV+6IGeWCr9SB6nErJmVRR5c3Kg0WRw0Y6dc0DyAWZMUhFuHdKjYmoll41VNfG5HqH7RXRizbzy1TLPEllqpLNuYN90hVoMj9F1d9RAcmhIhOB8wYSio7DF7bDH3bI61xiRh7YaX+VXPsx8ZwK3SYJinxw8CvXbJSAyFGYD8+kWkJDOqHLA4jCkU7TUQnpVTRR2dDmb8lKQGoot2USMrn7PUoyZPDWCQiOR9zkESBUmmRq3OQNhMlaTWStJ7JyrEmSlS3yLZAt0i/hY5CyfwZSpd9OfZF35ZojcEkaNnfUH5EkaJnXdJjeRKmW3XEzSik7m1gi63IgPN7m65SHXnWcEF0RkXP+Xa5ZfK0Zc9c8LKKQ1e7vhmb8HHTeCnotpXJSKb6WA6kirrCi8vGN9Q3ZFtaO665P5Rqnlqwpd2kRBYi0HLhOSZ5LCU8SiMQUytK4/k3Mz1E2x80+yvSFN0r8UY+ek+26SyAKWa9YP0WD7ik0GZ6n12dI6u23WaSGeMNNGz4CUZukluwdDGTQfVau91Ohlqm+yWNrFCBdlmNQGQqoj0qJjv5R+GJK3tpqe0FEjk3OK+ZDKjtKGZPJts6rEPi66xPZqlVZhIFU6BRdKFU/nCJrn+tzaYGyPgSpsyigl3WFGKBxXOk8cXuCU8g7AFIsWdmZwQeg89QrW2M7gTbLH6lZ8yVs/nbpLiYP9pBkt8gbNorWcCKlWGQzdR5bZCi3LO6ovckg2iwvY8hzjmoUpVa5LY4QUSej1Hf1dZC5P0az4SVqBHtlGMs5aweTlyN12VABVQJKj4WTbiXfXVnfpOVINU3UbVP6ZnKQx4SnRyUb73V8KBVdDfU+OC2opMJqLKiVYc7AFdEeBsIK640OgS3Ei+BCjD+X6YuCtEA4OnLBNBocos5iPXnD6xI1mYAsUJJQU/OfG3v+iOI9HFm5D8+lpSRIDkXExJgEwkzdx+osGVtWr+FVs7txBrYydBMH6AuDi/YjVGwNUkH1jgDgTJ77qI2mp6UGYk/hrlG6vtnyLcoKJEm4bOmeNPxIM26vWwgMWZU9jN011/c7OVPFfPPNO6lf7ipVzB/I6n8klUeBtfqH5LOgdSDftdK6RdYtkstz1jmyzpF1juSywAqO1JsKr/wwfkb6QwHSZC06LT81XoNfft+WKrbYSn5SZypKrNXP0JNg5OfoDZbiM5eD9OXyGv4AW/ldvLPwx822kn4Gs1bblPqXhXbbMz9qcT3xxQVHyXiDuXCuwVA0fy/P/wL0Be0FiCqm0QAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAArRSURBVGhD7Vv5V1PXFuY/eOv90j/l/fbeX9Su2j4VFKwDo6K1olb7XOKArXWorRMgChHCFBlEgaIogwgyhOQOyc2c7Pd95yTIoyEJarXrrZy1zgr35txzv7PP3t/59jaWlGxo9Z7Uv+oHkuequx1XjTvYXe0Oeaq7An96X4NR35P67KAndrmuJ+zUuENS3RX8qF0BqR9JfVbbG3p2bFSkpsuRqof2R+8KCCxwjSAqXdZHB5BZdEl1f+yfVS4rWf2JLLEGpLY7/BN9ourhp7MGwZRUdQV66Zifwi/Wv7Ok5iFCtAjkbXQWLbLRJ4sWKVokH08VfaToI0UfyWeB/x8f2dsREPbKHLqW33HMPlegYJ1TEI+U37dkT7st1VBxJ7u98n2PT2o67TUw1XxxuyW7MY4vP9RpyKkerzR0+3MC3powcplyd9yQc4OWVONv04lILBqWw12G7HdpPfENQF4aMqR90pKDAPFDvymSiMnEm4BU3DcLskpOi+zrsOWI2xCRpEx7Q7K9eVV8wbhE40nZ+8CQsnsmQFiyq82UJwshNa6qwy+1DwEEbeRNRLbdNWApU8rR92M+Wi+bI+cEUtZmScuErSZdtKIy+DoEEAlJpERGl2Iy9iYkrhcB2d1myLIdFUklZXghLBPLYTyREn8oLo8BZnQpKs+WgnJhOKCAbwkI0R9xWxIIxyWWSMqcGZflAEAkU5ICkOVAXLx2WPrnwtL4iFZLqXsLdlK8wYS6DsdTsmjHZcV0xO/E5eenQal4sEUge/DAzVFTApG43B635fM7puxoMWQ1EMELRJl7e4tfdmJ7OiZtZZHKDlO+uGtha/R2Ds2H5fPbppS2+qUM20enP5D2q4J5hI54us8nrdiaa09tuThkKod0ACwGq/w8YqkV0jGPdK7KzTFTLj8JyIVBU37DAmiR6dWIstZPj/VYznPYbWYFs6mP7AL6nmlLTSgpmjqpeiKRwNbgXvq6f8aWy4Mr+nptXEri8CX1rLoPE6rxCWkc0FRQsEVowm8RMQ29llwY0D7wYiUstS6/eJ2kOKEILOFT1+QNNW5Qjxued6TuoV/sSErsUEyOgU+O99lytNtU+XU2MswZNQTz71ZLmZTt1rgl25oteQOHNAIhaRrwycRiSA522fIl7v82psf9OGzCd2wxAcQfjMjOVkNK71lya8yS7/ttbM0fHTYnEBIV+WIJjphIJqW+iysyEQExLFyb3XCi8g3GHMJ39B9eH2gH2XVYYoSTEsMWEdjVJwSZkhU7AhbGPBv4ZFMgtMZhTP58mUQl0g6+2A7rNA5oskomE+re/na/3HiqnZjAzuH7igdg2wd4donPEnC64ZnbsGq2EM4KhBHDfTeCJCaRnldh+boZTAqCI4uS5E72GvLFHUN2tJqILGyXGZGzALGt2VRnTinG7oNlTvaZcnbIkaYRR04hwmiNLREa6f0STNqCl5zq9Yt7KiC9MwHxvHKk82VA3NNBefQqqK7dU0EZmLXVp0fd09/1zASlA1ajsxPY3izRslYfyVUNIB1/hRXeGg+kbZuSUCQm4ajuEXRGhQmfCYT1NTu/47hUgtsFi844sGTu+kteGVAOhv1lLKgmHF+O4TT14xQ2VD/QgVPZ45OLCNvvug0wq77PXnHfkJtPfOq5Zpze3K5cqm1LQMaW40KioyOz82VvDO1Hd+E3Zff1ffZd+PvXJ371Xevv2m/eDwgm+HVcW2RiJS60ECfcm5YIMTItQvt4r44W3mdo7sbfd0a1RfpmP9DW3HhqSjCakKGFqAo9rng7ooVOqwAuOiAsUzHnqT5Dcc8OENjxbp+4Jg25PORTAN/LIlwdZWFdl7YEt4Zb0qx0CiVBUoVyOTTJkuHgXlKWrIh0IoIYrjtxQpN/MmpuMzC5KR7HOqUeO1dJgrsOun/l035BUvtlFMII4KjKrj6xZGDOEZs6QRFZCqCi0vosIN/16HNmy0CUDh3wgh2D8sof1bSuTlC2FO47chorzjghLUcfIeFRl1AOjCzAQrAY22ufAxZGpL2LHqnHdgRJ3TjeLfDFS29Y0fp/PKbswfZkO865YhVRaVANsATJrQl6hovbskX4gJaLJuQAaFlxg7YAJ8yV16x/GZ2UwnkfDsH3clZSPftmJl0/ORU6xxJsOSxSKFhVec5X8OVkufrG1Z/BtrVNOkp71HbSQQv7l468QPbD8XiK7sOpma2vfxEtcXdCk58Tjupj4EMAIXl1IHub94dlGWHohajJ9AUjIq/9ITnteXuqcjuujWhan1kNK01S6PbktAjJa2QRiROaLxiTOSMm82ZMXuNTCTSE8RmPfy16FJDHGshzr2bhQssTOYHQ1NQWbI0er8phKrDKsnsGuIW8koJFkN/ihRxLPXtpYFWNH0UW+BXEFO9nOvOadwrf3SCnPpVSCI56n1QgbA+opNtSVmE73h+UY70UTUG5PxkAAM26K3ZM7r8Mi2s6onr7VERanoekBhWFbA6c2yLrgJzt96ozg6sqhSon27IdBYgLw/qMYd6Sq5EUD8D5szlwXiC9aYssWjGZ8sVkBlsyjc8IKgJspzz6H51OQAbUIpe5NWqo+xNLyIE6LTnkDqCkoQ/IWR8Vf3YHLhjIqh2SeV9QFvyO+mRVgO30oyAmR+SAyErbbByKeisH5kLKp+gfPIVVgrbEeomxNfHMVa73kcb+FSRKfiUVy1p9a1tDIKwWqfHreMQ97ahrnkcs8hDIM5QrNivc5LVIxlmbNnHWjUBYL2FreaaB8fBrGtZWYjGH586W0omNFjkDZ/26BXkNJt/eYsps2ln/BwicexB6hI3ahLKS/YrK8kSlHptp17wWyTjr43mE54uQdEyF5AE+zdBbH8lszR444gxKEYroHuljn0BujGqLdEG1vTcQFZop5rzpnk4lMxZhSNZApVG/xCGoqeYoI/jiO+nzhzzzzkAyPnJt2CsHXT75tssn9a5VWTA1j2SAcPVUbLTGPM6hvekw5YvvwV/Ybv/+AYCcRwli1wMUe1FH5ZkypyhepKEvqEiOL2x7rh1VZ3baKXnfhe1ko97NpCMFF2o2OiuBkCforKylaWZNqZoIS6CUjitQ77x3dl1ViEB6pjXAH1HC2lIxj0qL+/slsn33C12GuDK8KieQ2d9EHkylznqJbim5OkKxbKirWSh8gsoc/8x5Pag8chz5ZDOdmzVqKPdO9NHUqBaqVXIevjhT60jKInyEoXod6UQDEimWL/n9ebyM3EHnZWWytMWHGitDOgXRrfVuwTxC1OeRDqw1qHhvIAo17qhq4lEk3FRrO5HNlbf5ZWpFc8dTHP1kYwKgUnvpi8vsarpYg4Wo5zbJ+LJaJBOKL5aDqi7yQ7/O/PkSOion2w+nrUEhbwEqjY0nK+vw/I4WuQLAmcZssB+1lc2qznnFM1e9lj5s0J7KB5C5NeJknVx2VB02I3y4ENbbmANdH0V1Cdus6mY59GtOZi1Eb2YKfhvzFoLRaYU+mfPNlVfFF6I5872kkDk+CJBCXpRvTBHIlig+nzk/5PfFrSluTT5/KvpI0UeKPpLPAn/wkWpXsOev8TPSrr8IEPzU+GKNO1xw0W2rJi90fEldb+wfVZ1WXP/uubDi7J8xTv0cvdLtNDWMfeKfoxNIrVv+XtcbHiaYmu5Ps01r/2WhblD+dtATOV3XE12C6WOVnVai8uHH6/8FnrFldFmRebwAAAAASUVORK5CYII="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAf8SURBVGhD7ZvZUhtXEIZ5g1Ru8ii5S57Td6lKqpKL3NjGrBJoB8S+LzZgMPsuCTCLgJP+WmplUJhFFgWu1HTV1FgzZ+b8p9e/D+OOjiYpFou/jo6M/JZJp1PpVCqfTaWKqRc4GjAKhcJPI0NDf+Wz2ctsOu0yqdSLHgpkWkAUstnl2elpnTw1MPDihwJJp9N/A2IwmXxxALbojnw+/4v8uH8tTTSA5DKZP/GJ1zCHd86OzODg0GtrA0AdhGgMxBOdsUaagyPWSKyRsIQZ+0jsI7GPhGng/+MjA4mE4whaMZQzyrhHxCgKH+nv7XXJ/n6dXFi+K+RyLj04+AgM9xkHCO7lZYywv8jMLzSh8eL5uTk3Njqqk1xeXLibmxtl+0a2ATE+NuZWlpcVhLQl7u7uzu3u7Ci4KP4SCAT1wmeRo8ND19XZ6S4ESLVadYm+Ptfb3e2Scu7r6XE729s6Ts0mYBCAfHj/3vXL/f6e3kCTBgJhgqXFRX1puVRyW1++KIiH+3u3v7fndnd33aePHxVIpVJ2Dw8Pbntry+3v7+szl5eXCnBPxnJMiNYA/pSGfIGYNq6urlTNpbMzd35+7u4FBBNWKhVXKVfc5saGK46M6MTcr5TLqjWkenvryvK7JIsA1Iz0TgkfU/kCQfVzs7Pu+vrKzc/Pu863b133hw86GRNge373dHW5jysrCgDwmCIlvoSgnffyHGMwozlzSxoZTCbcUD6vpmElOCPH9fW1uxcNTU1O6nUcEwedF9DTU1Pq1HMzMwrk+PhYtTU1MeFm5B7j8bmnOkpfjYB+bXVVX4gpTDCT9/fn9XWdvHkcvmTiHT9aLDZSQaQ8AmrQkzPG5GHkUCJHQ1js/fXr11oIS25BIzquDmhLTMIkaI9x5BO0y9mvvw6MGh7CvjNT0wqEfEII46j4CqsjGgDUJb6BeZCJ8XH1iWtxdBzXfITnhwuF1kzDikhUOC2eT7RYEiOpmbqZiDHcQwNoi+dw3CvRBiaaFGD4CMIiaiH8eOfBVyNmmoN6TiAyWJnsJukLAca1hEyKEwICQUsA47B84vUVtMK9SNWX1WD3C1E/svH5s5qExGVJDpt3vnvneiSEFxcWNFcAEhPh6H1kXQE5JKYg2ogczGI1KxIQBgEGWzMJkxJBRMiGJLDVT5/c2tqaJjNAcq9x5poc3FuXMWgNZzdgfnUn0FlBjyYWJKGZYAKKnh1EBX5BBvZeZxyhjrAANBlU/EKrLymZDIscHBxoAcN/jHMUh4c1bAlNtGj3MA++gyzMzYdW4baAMNmZ1CAEE7JqLxAyrd0LowOhQHgBno6Q0KxoWVG8oxpL5iWhEQ0DdQLFv2frqR7/ads0AOGFN2LzbSnpTKCJTqIFp0X2hA6QwCyDKleRUM8Jm1sRZ6X8h9HLUI3gYISybW8pyRFwmAJBG0QV181McBdAEq5aoeUI2zoNZWhMysEqqT044MnJiYIgqaEtsz8+QdgSQSblUtktLS0p1w0C4wuE0CVLwsROZWIjOzYBWZMVe50QsPgCZhiXSIKdWSk4EUrAO1suerxM/jCghYuXkS/grSSoEQlZL7Nvzg9MZqBg82iJ7OqXVUO3Ny0yrHyrmWTFQS9sBsU7eK5tZ7VGKczZDIBOLGD9uOk3pfgo/Yh3DCAwG+aDmxBtURcQGr68HFP4Hd6J0MRSPayJHMuyURYUCASHW1lecaenp0qOtIWoH2dyjetowHyG8ZBq5Pjo6Ene8U2mwcloohDCl4QF7+BsYQmLfwqIlgOfZqqldoLBAIFbIKycNG6tJtqw6xaqUAYj0LSbkCQjU5yDCl9oywm5QZjAVs7Z0vmwaKQgKX5dOAdNuGkQE9KO8vy6ECdS/rJkWBbYUl/DA6zCgDAhnJVVcTaNUHVhclGEpGicJTJVNCDWZFEzSNPUGc7V6m3DNIyl8HE2EkVpIHyzQpjMXDzr5zehpjEg9LYAoO7wQuvkaCmVh0iYw0uNlX3Z3FQqgAapSQj1yc9PQoGEmQYg5jtMsrRYowdrq2u1ciAgTSO0Jm0D8XPWZiA4KELzbvRhYqzmQ1TjtoEYyWGVkB3jJF4gODcmQeAm1mhZl0cV5vmW8wgvNh9ht4jVEoaciQDEC4RCd3x0XLtez7hGNWvmWm0fCC8im9aOf4PVgFhusH4GLmMUwGgleaZt00xIy8gWRCZV+2N16az0SCM4LOkeIdlZmDIxiQyhUWsbCLRRt57qRMcSGr2tESZWjJBlrX3gnrF9vj9oeQ+tObMCxOqF11lJYJBqJqRCIzTjjZD2ZOdJ0WrLCQ370u1bOPISsifNFpEBgzexvTN+s2/mVb83kmjGW9oNAASTYltbpXcfjAnpXQBEOwGnpRFHyDfWhHFGe6R7byRFDl9vNrRoOZdqSh6A+DAxK4MWsOLDg0OdaEdKP78BQEKjJECQJN402qxRjwzEQpGUTOxTebWW1Cfxdv3mtOQVihz3ADJdZ2q2ELYmgohS6P6I9S9PcQiuYXcA07eYb3CdFhUSjemIrKDmKrSviUJ6bcOvuW8BjLWrtkPQ1kZNFDDPMSa0nXiOSaK8IwbSEmeNotLnGhObJjZNmC/FPhL7SOwjYRr4j4/IBwWF7+IzUvnU/PsAIoT29+/iU+OhTObndDJZfW0w9jn6H3xYEHWXuFVHjDJegeRyuR/lmARMK98FRZkg6pjGf1kYTyR+GM7n38gfePbl4VvpUe7k/GLHP5Xyh9ap1UdoAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAApdSURBVGhD7VtpdxPnFeYf9PRLf0q/tT8oTRtCIGYJqy0vccKS4ABpEwJmCUtYAzSsJcRmDYUsZQ1hSdhiJM287yyakXT7PHc0wjiSRsYU5/RozpljaTSe93nv8tznXsszZkw4qoODf64MDq4v5XInwu7er4Jc7myw7H9/1mEAwB8qAwNbS339XinXK0FP7qWeCoQgot6+q7JylYQA4XX3vPRTgQQ9vTsIwi7rfukA0k3PiHK5P9nunkoIV0yHJepAglzfFsYEwEwzkO6eUQbmdFqDa88ImKIdIE/DoWORiTHZsUjHIlk81YmRTox0YiTLAv8/MeIvWiw8vVayEp/5C3Hf4iVt65y2eMTOXyDeWwuh4nKSXz0khaEPxKPITsFAz1iAs/Pm6+LuwNuS/2CNFFe91xrwOFWYCcRgMefzA2I/Xi8uFiwVixJEkbiD74i3ZGmy44WLxBneJOboUXH7B8Rdt04iEbFXrohZ8FZbVmkNhLt7512p4KH+D7ek8MZsiZ88kUqpJC4t0DVXLECYufMkuHxZ7ysCmOntwyuR0qVL4r42Uy1laFUCb6IGWwLRhQ4e0odGDx5IeOGCgpByWeJvv5Xgu+/EO3FCDBaKHj1SIOHFixL+54pU8bqcz0vp4r8lwr3e1avifbJBrdcoo5oDWbxUDKwRGyMVuKJ8965UsFgVIKRSlRivg19+kfDMGXE++jhZmGB+/lnKuM731SCQMjbgP34sMUD5Wz8Vr4mrmgKxCE53z14FYvftF/vKq+LOniOlsTERLEBzu3CVmfOmmGPHpQQQLoP31b+KA9fQOqWvvxbzF/zenC4xsK5H9yxdNkmLwJ+FNWvFHPqn2O07xGzYKM6mTRJ7nlRhIYPdcYfu2nVSgOXcvXvF27JVXJjf2bNHLVK6dUutZbZs0Xv1OQzyBmCaWwQBaEZGdGdwhv7U13BNtVqtXzenz8gYAPzqvjhWMLzODEo/twBGKmifWYHafXe5WHCBg13yoeH1G2r2KmMDlqElinjvgDf0vg3JfT4C1unrl6rrSoSzuAp99Xvvi1m+QkgHjciwdfougT9nvaEm5WH27hOLdKzcvy9BoSD5jcPiI0MsuMP+7TWxNZe4mzeLh9gRcE6IIHXx2jJO9uH3h4bETsY1KVGRL6KHD6UCl5iBQd1RhIfXzQ5ALlKSbBpbTyK+x0IWMVbB6zLJb9Nmsdu2J3GDDDINSkRzi9A1CKzg6jW1hnfsmFhkiZumaqWi1xws6Hy2S4OYceCCgZmiBuB8/C4XTw/GmssMbJDCjYHw4dghzc8jRNCa12cpk5JFzaFD4qLmGKSqQUq7yKwS3OX+4yMxNSa1b3aJC2Y2uC/A9QBuNKg/rEmTJDTQO9LVgFmLQ3jAl6fEGx0V/8xZsf86Kd6pU+KfPafvPXxmzp3Xn/5ZvNcTn301Ivb4cQ12AvOagMicBrCO2Jmvg9D2qWVo5sj3JQKh6RmGEjnIDAQliU/fBzzxGe6LkeY8FBAs2UosZVZfsqG/8zN9YPz99+LAv6zCeiKOCogJZyPIbsVKZdb6Z7gvD2bmUTxwUDww8QsDUkaR050x/RjMWCxAbHDf9sAB8Ujjtc883FfYtVuBMIZYEqYEhKLI3508sAx9oSKJGgT+pkRgelbgAheEpQUtTU0snN//eeKa06dfjGvcXbukbK1EKGKaeti1QSrbkyeThUBqhlkC5nSQGeQeD9lUALgiZEIBJaBVoGYGq+6clMxhX39/YgmYnC4xBw8mpEZrgC1dWg7ER64oPXgoHjLLXbNGHLIqQNfVXJOhYctgNdw5F2WA0hUgOLtjp4R37iSuIqkhkBmIVGUG7OkjjWPUF9UjNVAWMeKuXNlyjtscCFL3CXjEu3ZNotu3ldbJnDy0sOG6C5lQD0JaDoAZzJo94A4fUjH9He/ePXEpFZ9Hj9i3B1E/rJo7dhwJb94UC1o3EMcuS3mDcq7u42K0JFmYSv7cOXHBrM1kYnaMYAeUi+7y5eJSFHPHTEPqznbH5XAphbPN6HGyCY0P4NnEpM9wAxU67lWwTPN2wbY18E2FTLOf47OAAf3h38U/fFi1CwO4XTCZFjF0D6voogknr9FS43atlkAzxiOCLCDdt2XJLIuQvAyqZ/jTT6rSS5CI6RmibQiQCXbt2qdkBXcUdyRqLrz1o2qSF2IRpmKEFNQdjj2RGL1NDFDx3XsqoJnGRbjCW1gTwwRSk5UR0ruRAGpWb1p3ejQ1tAUXzFN9zwZL0koobtHtO3rdA5d485Nsop4toAfmEaC7cykheD090zrVgF0zgZjR07pgAX2NjwXpc+oUWkUXRD3xV6wSb2RU7JEjCkAt+OixhHhfQq0pHcd59JgEEFl2/BSh3WmANs81IHnIPW8WCh12ZaDIaREePgpdABYlg8Z6pfkRgRRZNibVYNGX2lZCHuoO0cPGN3+QGEEYYzJQhRpTi6Daest6oGFXa8/jIG31OtsMvPfZ86xfn7SgKBXNAjjTNSmQAD2vh0D1kC38WeZUgAsyRhYgO1j6IQUMiqJeR/FjX6wFEWBVPN24oZV7cuK5ZpHUNWMIVhfBSqlYrAVrHcjCRJmP5xEf4lrfox4ZyEntFDGaaDa4ybRIPVjZ1TUKVlpkPBAEJg+LlkOBMMuGhxMrXbqMWUpN4U3InLaB5MkXM9HbsKJC6EQ/3n7qmhoQDmx8DHN4UJtovWHB+3SbXmOr0Uy7ZgJhjChfYPITHj4iIZg2OHJUymghJrrGwA1kVN5vlOjQjjDL0AkqkJNfTg0IH0JNwvFCeqatpAYrLUI1B45Q/YKRhM5BUKeSuElEtIeNPLdF6sGKGlLsG5ACRhV5CKYI2fOMRdiIo+6oPERWpY22ctEXXyRA9qPvbdJWZLqmHqzrP5Gga14y7IW54zt3EyBUYIyDudg5yr+6YFxnx4UDsCsPu3NnEjdTongA8Wd3JZoUrUJaa+zuPdrfuAsQHxz2MT7IwjUZqRYAMB4G44nJDfOoI6hDMHxx0Fhr0du2Tcz7q8XDrMw/f17nJTx0FLF9uzbaaiGwp+rZmk4h8LBWOMknzXRuY9dQ9GBRC1Nzl+mCaYugvct9jC3PXxAPTFqAezi+VGvAcrprFkcAKoJtfRCZMuu6Dyc5Z+Vos7ZDgtCMgSjiGILTRHZ0VGcWFO6wsUINUmt8840Ofy3cRKVWhuqnhdLpEn9P9W/bMVJLRQ+1gXMRByNMHUcxIBlseJjPYQ7S1a9lDysr5/C6EBjY1EhMrQkRZTF9pHx4LmGku27WPiAG+J0Tjiv969fFYDRRzwhuBCluoUGYKZwaNZqbjQfVWjy30w6QP/gHgokmpwXTtiLr7ztZ4rntP4e1AzjjGzuZ7UTbYKb41aAOkPZn8VM09WRd2nFNxzVZMdOJkU6MdGIkywK/ihHbkxv5TXyNFF+2/m0AiXr6Nka90/PN72ekYtTf/0doz5jfe56sX1/k/cnX0bu7hwW9ybR+HZ1ApK/v91Gu/yLBRNNkmfq/LEh//+/i3oG1+JeFhzB5BOuUX+b5XznR72LWCgrMAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAsFSURBVGhD7Vp3VFRnFve/PTl7TuJmVxPXWBERkTJUKTMqDGBvsXeigh0VRewFOxaMosauGEQRu0Zj7KisBVGjAhobgthLInbv3t8dZ4Iwb2aIrnj2zD3nHZ3He9/3+2793TtTqlQh8VAHatw09Zer/LT7VD4BKSpf7X8+xmWA4eQdWNZVE7RGpQl84qoOJNVHvgQIQKjU2gxP/0YkIPy0H/0SIAwiCSBKAoB+z1KummC1yi/gtZiiBDRhAMIAEkrKHAUPXsrZT3ukpLUBQOwf/ilWIAV80qqRwhFq1YhVI+ayttVHrD5i9RFzGvj/8ZEabn5k5+ZLLr4BRhmdq1pLTj7+ZOfqSzU9NORqIeuzKI9UdfIiW5U3uWkCqVWn7tS6Sw9yrxNsAAOGZ++upsq1PGRzdXAzatstlBq37kTODMoSM5kEoj/1tNlzqVuvAeRRtz7dvnOXXr16RXUbtiAHrzqyCU7fd3AUzf1hKfkFNaV2IWEEOXj4KFVzqf3+QGp6qCmgcStZ9HjaKSpv50zZOTn0JD9fTl7BXiVmquTgTtt37ZbnPFhTtf0byv+37NhFpStUpyqOHmTDWq3lVVexXVHUCNRdwd6Vps+eJ4tmXrxE6zdvo/z8p/TixQva8fMe+nnvflqwdCVVdfKky1euynMbt+2gvQcOyf9v5ObSxq076Kfde+nQkVTq0W8wVXf1MaohRSAOnhqq16gl3bv/gJ49f05pp89S1qXLDOIlm+Y1XfrtMl3PvkGr1yZTSO9w2fgi3zt7PoOuXL0mnx89fkwXMi/StevZlJN7k8KHjVI0lSKQ6uyc46fMECDR02bR38pUpHK2TpR9I4cePnrM6vakr6s50r+rO9OCJSvo0uUrctLPylY2mCZ5yzb6rEwl+qaGiirWdJN3nLzrGTWPCY3UoXbs+TO+n09DRo6nsPAh1G/IcAGR//QpDYwaTQOGjpToCGrWhsZPnUH9+XPP/hE0bnKMwa9C2MnxXHjkKBo6arxo2bF23SLmUQQCB4xPTJIFCwoipqCsWb+RgY4t8hxMaEy6hPaXVGBxZoUKA5u2pgbfdpBTQg6nHiNvbSN2zGv0+PHv8ncfbWPS1G8mz4UOGCLPbd6+k3wDm9Ct23fo7r17kk8atupIQc3bCgBjydBk1NRiFf6rSk1W6QTZYMK0mfTFN7Z07kIm3eZN+g4eRvsOHmZwjenz8tVo7KTp8lx/NiH8KedmHuXm5XHYu4gvRfP7zdt3M5rkTAKprvIhRA+iASbByXEiRIBecnkze3c/+duDhw8p79Yt2Qg5A0CePXsmvhUxYpy8cvXadVmzcMZVBALTwLGQHSHzFi2jMlUdqCs7H+TN69dyDwuOip5Kj9iJIQhllATUJOSOwjJx+myycS6abY0CQerW1G/Op7st6yxfnUj/rGwvmRROHDNnPrXoEEJ//7qKhDAi60JmFnUO7Uel2XRVuOYgZGvxOjBFxx59xH++5Tplz9naWO1R1Ahe6BsRRdNj51Grzt1p6aoEWp24nhI5ShavWE3L4tfQ2uTNhKhZFp8gWRfPrE3eRIl8rduwmVb+uJbiFi9nEBECDFpyUajGJn0EL35ZyY4mTJ1p0PDvf/xBT5484Suf030+3bl7VzR37/59+Yw6hL/hOb2sYECVHNxMFj+zNAB1JHK0LmpQW+zYgUEB9DTguz4DqVf4UGrQsr1spP+brYs3RU+fJe/BdKAIpuhAsYDs/GWv+AgcGaGNxJTFxRAyZeb3UiTxN1wVOKWPnawL55lzF7w/EJTwkROmyIJ7uKraOHrJyRCyAU1a0XMuiG/evKEmbTobosWZ2Rs0MJXBQX7kwvhBTDOKgaD4JbNDQuU48Vc2tWjJytWy0f5Dh6l8DRfJnGBwyBNf2ThSs3Zd2bHjaVDUGAH+XqZBOvaoG0zeAbrxJ0yDRDd11lyDM7bsGCLlXc9JkAAXLY/naOvB0eJCZTn/6NmcEhiTUYNkBQ3gqvmWn0SOiabTZ38VEDBJ1NiJAs7LvwEXv3GUtHGL1Be9AFTMnDhxZhxKiXQrAgEPDY8cKYUuLf0MIZUXlJSjqdSmS09JXvpCZuPsxb7hLswdhXLbTh19hJzPyNQ5sgKZVgSCrAg2/uDBQ1kIpPnosRMUx2kd5BiUD1FjbEaLDZHG4bCoukhuvQdGikmLbxo+FcAgMgKZ+GBDbIwTA4SSigtvVIOdFJqCA5saLJvNI1gA/oFTmtscG+FZaAKJ0Nzz70yezQ18sRic1tkH/xa9Ci6G+tShe2+aNW+hcJfaAQ0VfcJihqZ/ULIomwiaKXLx/YKnhiYmxcSKT4HBo8dx5Pffu9ODTyzkvgWM7DfuW9A+oDW4lp0tZf/chQxuP3tKVcVmMMdwDmfIibR0vq/cIxdLI3DMrdytQQDiFPc2Z349T+mcR14zMYJ06tnXEA0AgrwCOZByVPKPJdow+6UA6gz4BaR730FUrpqTRA56lLT0s3IfKV0fqqUr2DJfHSH3d+3ZzxSihkSZ/gLQYocvXoDNE9ZtkIX7DBom/SscF6Y4ffac3Nez8/g1STRnwWLa9cs+uQ9TwmmRd0ApY+cvkgqNcmFsQmAyfAsCARdFzQAYsHJkW4i2aRvq9rbllBsm5DaTKBdff04FRUcVFgBJlqXRhCOzHjt5ilKPnzQwMHBSRA4qrU9gY5r4lgztPZhCXvUakC+PKUJ6D5Q1TqafVnRgi4HcyMmlDI4UEKGMrIv0lNtOSMuO34mzIoOidxnGRRGyjosfPsM/QJohqcdPKGZlC4DofAQjBSyMYliZqy0iSA9EH74w5eQZc+Q+iDQ+w7n104IDKUeKPw2w1FmhkT+BuNP8JcsFSExsnAABT+k1KFLuoRorRY7FGunKzfOXFe2E8oF9nTx1uohGEO4btmyX++AmaLRwDR4+Ru4lrt9koA3FTGgIX52zYgw1a+5CmsdzMoTizbxb7wCBw8JsxzmjQjp27yP+gJnIyPGT5R76IfhMsRqsP02jA6IketNg5oEoQf/7/PkLw7APWpoYM1tenx33gyKbt9g0INBqDsXgFu2kJz6fkfWORnD6Nl1D5R5qEwaB0BL8BIkNgj4HwP6iRnRRE8rUD20j2DjUrY+a+i3aU1VuMcBbY/nEEF1npzMBgGC0BYkaM/H9nTWMm2jUGtgYs4+TbzPr6OhpwuKgkctvh3hoxvVTIQBZlbBOgPSLGG50EqBY9MC0QP8/L28rkwDIsNHR1LRtF5mPJW3aSi9f/jmawmxMPy2CpuC0qCcwDQpkQpJOq8gnxsZWikBA97BpbNwiQ69S2Fkzsy5Jw4W+uEnbzoTMC0GVRu4AodJValfDjAWkGyAt9pFqzCOQSQvKVSZEa5mND+JpIqaIYG3lONNis2Mn0uTRn3bvEWcED5k0I1ZakfQzuh4IEsydICbWFgPBaTw5FFO5yGH20a5bmKga/oEEhcUcPOvIPBV1B4J2A3N48FY8EzFi7DsHwVzF1LcaJsMXuQEnhCMW5hCwP3ypa1h/OXkYjyaQvnEP72kaNJcvCVAEUaERzqa+qTAJxJKvOAASPlWwbwEYcA59W4HKbK61MNvXWMI50WYojaQsed8sZ7V0kQ/x3AfRiBXIh9BAsfjI/2LDv9TXWIF8TA1YfcSctq0Jzeoj5n3EN+Dwp/EzUl/tpwGE2dSqT+OnxuogT2cf7cuSBiM/R3dVB8SX+M/RAcTRr9E/VH5BaSX+A32AcdNqv3DXBC1kQHn8y/AXzLpfufj4f7Trv1gk6Hi2+zmHAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAu8SURBVGhD7Vv5cxNXEuY/2Npf8uMeVcveCTnAQAgQbAsS2A3J1oZsllyEYCpcBpvDJzY2wYT7xjixgcBCuCEc4QgQlpsAgUAAAwuWZka3JVmWddnSt93vWcaALrMUTm3pVU3JGo1m+nV//fXXjejS5YGlK/f2e6W0cXVmkeVoRqHpBL2effyHme55/9Fmhq7E9NTA0oaNumJHk67YCl2R+YkewhBdSeNTmdPrbwypCEFXbEFmofGJH8KQzGLrFjYis0B74gZENt1lULGjb0aBFtIVdY4n2gzRFdk2MCY6Ixztn9kls9ByioHZ6YakU4qmDGmXnV1SHnmAq1IeeTBLUx5JeSQRc6cwksLI/y9GXppqRJ8pRmQUxNa16fnymn7Tkte+SWVNzxwVvSdrpOo1jFxgwEcLFQwqvmeMrsiIvlM19JioiocPnaEia5EB78xRkR7H4PsUWjwZIHZdoGLpTgsmVFrwSrEKu8uPlqAfb5Sr6N+64z5TNOTVmFC9347XyIjRS4wAmnH6Rxd65ShJqb+4HulLD/r7JxrdNIzvbzei29i70OwBeP0h9Jui4LkJCvpMVtE9W8E3F93iukGFBgyezt8BDlzw4Lej9OQpBT0nKXg5j70aPVwxDdGRXnh+goplX9nFTW9rPuw564Yv0IJgC3D4khffXnZj7SEH0ughdWafMGTfd24cv+oRfxvrg/TegyOXmnDmWgNyqqx4kUIcLYNiGtJvmoa/zTTC6Q4iEAzhSl0Ad0xBNLeEEQqB/g5AtXqx7YQb2ZUcCjpnDuCGEoTeGhTv3d4QbmkBKBYPTGRU0Rc2CpXaMUN656qYt8UCZ2MQC7bZ8IsRBjwzVg/N5qMHhJFGwHx6jB7dxilYc9COuya/wNOvPlQoNKrwyJ6zjfjlCD2eG6/HCxTGtEkqBlBGsbeTZlYGYtZiFSt22zHjX1ZM/syMfAKk2xOk8IRRvNaCwrV2ZBEwh31iwLytVnpvQy5dN5c2wOv7202YsNKIwjUW8gbfx0Ze1ggrHTCke7aKLccYH2E6CBTiNYyWlnt/AyHsOOFAyVrloeuam9tf10yfUzzp++NXmtE792GcxMQIu/DNWSr+OceE3CqT2OG5Wg+GTFcIAy1obPLjTfLEX0oMxBuKuG7yZ/K6/d814K+lCmwNIdQ3BASfDKfP36ogAyh80cgwbtawC//4sYoyCg2v+YSVrlkG1KrNsDm8yPtcxcmrbgwpNeI3oxTM2WIT1xWsNuOZcSrMzhDM9T48O86AbuMV+r4VHywwIT2/Ax5hQHGq9ZuqUIYQibWEMLRM7shUH2gNBWB2+PESccrQMhUuTzMsTj/S8yRnmB3N8AeakU+Gla6XmzFYfOKeDzJunNBw+mo4fa1R3KDmQD3+NEYh8En3hyiH+Vx6ngGzN5nhbuKUDVMqm9AzRyPjVOIOJrn2K4SF261RUziqIf0pJK/TDq3kfl5fHnPjD6MNxKSSRVfstmHEAg2//lBPKWwQ72+qXoxbYULXUQZRc/ja/tMUEYoxK2yYXG3HyEUmqkkd5BEuYnk1ZmJWG0YuVLHhqANb/+3EjpMurD/swMajTuw65RLv+bM9px306sROOreTzvFnm445sfpgPYHdSFyikpdiD4LigpW/+DvyxPxtkubZ9R5vAE2+ILx8+IOwU1ZYnQE4iIH5vThPB1+HMKcwyCAXXiA6iCeOEsqANKLkso3SkG9/8OLFXAMGFaniyMhXMLFSw5TPjXh7Nj2IgBz5rDdV3QVbJfWv2GUR4Xpshhy57EN3uiFzDKd271yFimGTeNiSnTZRJPkzPl7I1jBnswT2yt3W/92QHlQfKjbXixsev+qjjJA7Y6XGEiFApTgcDuHducbWbNEEYfWYqGHJDmnIthOPKTQVm6xU/Jqx55yHvCB3/WdKZQYtLya1Z4mwmDlHLpTc8/QYFe/PU7DuGyumr2XD408sE2KEd8eykNmTPcGhYZws3cUsSvWHvMGp3CvHgDqT1CF3jD6sIyM/WmQkVtUL/mFKeCSMMLOm55NWJdAxFniXTHDlG6z4sU7iIhxqwcyNNmHcq6TKStdb8NVpF+opgyKLjVpOFfztTzlkdMQYJsf0COvQwhoV56434Ic7XqoZfro3V1C5zlx3Y9RixoLEDHuOGbU7yULOntwqMw5daGj7Tq2hEQOI+lnhd0ihsSuHlhnhImHEBtgbgjh/0yNoffRSE4WHPMXKPsoOGUO9yCjWqsPnGIncnJhaTd+JIRPZsLgY6U+FizNjGIFQV0hhoocLjNAN4/U17XfMIO1J4WXpGUs4JzSEL+A+hQ/eZaz4Rh7MD+IHcrjSJiVvbFKG8M65ZPNrtKP97rk+fbzMiMq9DqE9BpdoMTGRtGaNXDggT1ZRzpoHDz7fPkTsiUU7JPm5PQFB9+zJRA14Qo8wea09aEOtoUn0LarVB6X1uKk04QadH7XY3FZV0wigMzdINr30nyZqvu439JF4JEJe3K3xUm0B6m38uGbw46rehxBraVpjl2tt2dDekNPXvYKFk/FGQo+wq3dS6vGaVKlQX8OZo1JBM+DKXe7sgJGLLUJx8bVdsxQU1Mh2k7vA34/WiyyLHNzXxDIsbvoyD2w/LmM+7XNOQ2ZbqhskA38kr/AaPteCt2absYVE02df1+PoZSkt68x+VO5zoeaQGzUH3aja34Alu5xULgjAUSYESRuSvVKhmqFSM61SgTPgh7tSRg6rMCN7VWvdace8MnD3L7srIMpGNHZNaMi24/ea8PO3fNS9+XCBXj0+SfcfLLSIzHl/vpF6HBJD22SXd+JqI9UfI14rM2PiKjOdCeMylYpYAE7aEKOtCTcNbtxWG3FLkVMBXh8uog6fcMMM2m28LIq8vjrdIPpixgeLZl7naxtE5Y5GjAkNiWAkp5KaJGqU+tCNumfXEVhlaNiQiChmwC7eKTHFQprfcz3KrmSPAKeueWIObpI2ZFp1dLC2N4R3v+agFEvLaa7ChvSiPndqtQzXoQtuMUvpUPXli9tnzfjlCil6Tl0aR4w1iHg/7BEFe89y6YfQJiwL+Ji+ToaL24xYIjqhRyJgPXDeJdKx+oALVV87qbWU4ifiEY77i8Skl2gUwWvMMln2ufjN+lIasv6wU2DmkTwSMUSKIh4vRA5JrRFDXiZ+eZUGNNz/Bmkk8Ua5JuQhi++FO2TmrdpX/+geiYC1YoOK10oN+McskozlemoxJaFFDOHdZy3m7AijVvGK1lKqeRWVe6QhC7YTbmKwa8LQRAzJrdLwXLZsI7jpulInDXl7DmGB3M8u5x3z4lZTSEaBMwLwIac4P3ODTYyvHik0EUMmV6k085AP5FlahFk/3WwlFSeVvd4sATxuBU2FWmUhG7L5mMyk/NVUl6JMi2IWPVZaPEPr+pEeXx6Rw5fy9Rrem2fC3K127D7TQNPFe0J6BmVIZFp05Y5HsKcQU+QRzrLtJ+V4gvkk2tgqpiEs996bb0LVXjsVL7lLOUOLrJCcu55xoYzc/e5chWaqcngzieicd82CiCv18+PraMYiDRm91Ewz+g7MWdnqnNa5mXx0iASRX4wair+wCDHN6owpvHeOHhdvygcd/t4tuIe/v4gY9txN0i53ZdPF9+BOMNY/FEQFK++GG6bztW4x++DZejp1/gxAJii+GafmYBrs3VKlcOLKynP4vgRmvqZ0nWRT6Uw5fexD8qHDDRbH7WXaNacbp+aDGoJTU1eo0bjSgnM3Gmk0YRYZwfhiTnmdeKR6fz3KqRPkqRGnc7x/qYibvsn8E0dk4Nef+5bWZku2qzJ12Tguion6oIRNeDKaM9FDkrnHYzEkmQcluiZlSIc7vUQufVyfp0KTCk0iLKUwksJICiOJPPAQRjLyTSd/Ej8jzSz6iRiiK7J/IX9q3Hk/vBbtxMASe1pmgbG5s36GHsGK+Dl6RrFl3ZDZ4c79OTobMqTC9fOB0+0X2ZjO+kV4239ZyJzm+NnAGY7KgSUOc2aRMZhRqLWkP8HjvyEEP4klh8phAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAuBSURBVGhD7VsLcFTVGY4ICNm0iuPY1rZU8TFVO1Pqc9Rq66NWi33Z2qmjjo+2ztiiTqtVEaMQ3uRB5CEkRlCioEWMLxAFeUl4KEYQoSAIAvKKIEKye+8595779fvP2Q0J7iYbwgSnkztzZrO7Z+/5zn/+//u//99NTs5BV5Q39jITK51kuhfPM7mFi0yscFl7jAYYUazwRJP7+FQTK44jVgTECtt1WCBR7AmCKF6LvAlcXECMbPdhgZjcomkOxIh2B5DadE6UV3KpiY00R8oSDUB4JFOONAgBkxPmFi52jtn+ftF4TQGyqANIo1PosMjBPtlhkQ6LtMRTHT7S4SMdPtKSBf6PfCRnCCAjN4N+yePr3Sg5ZU6noYA8z0LrZMcjOQW88WBE3xiJ+AXlSFz4JKJjKbJzkxo3j8KqkwAcABw9DMFJo1B/cQX8H40Humeng5sHwoVM3gjE/z0L+qpKhD0KoXbug2c0gp6PA52Hud12Hgz/98/DGzgPuucoqEsmwgeQmLUWkWwik/Wy1iNHD0VwymiEvKm/aBN0Tn+Em76AqVcIOw/hIo9x0FK0hH5hFQLO0z1GwpxQjIh/m+dW8b0HOQbCcKArgWc4qswWkQ9wIfXAW7wlEKzaiaDyA5i4AlSI6D+roavWQA1ZaBcJ1tbCyLzKFQheW+eAfLqXYFbCTFuNxJsfI7xmCo9ucFqfyQykyzCEJ9Man9fD+Nzrkq3AqlpEmvYJDczqWvgbd8OMfw/+1ZUWhLyGFTsQrd1tgWCvB7NyJxKf1CLYvBfhDS8CRw1qHZCIH/D6vo6wth7q7pm0zn3c+SPQm/YAX3h8PhAhjyrKyYc3ZAH0mlqE9Cfk3I/ghCJ7TOGkD+znQs6RY5TP4JjhaY8ns0V4nolL6Pn9ZkPf/BL830yB98fnEX4RRxTXUH9+0e5QXzQR3umj4d09A8EN0+D3eQ7eP163FgkWfgrVZzI0X7dzb6lC2HO0A3NQSGcGwh0kxi515y5mT44wpH+k/uajV/Yu9t/MhQ6eFwR2nryuk4/Wma+YbKkgeyDdh0OfOgbhWePtLgWInrMBulcRonW74e+LwzttNPxvFUN/dxTCMznvuilu3rMrOa8E2FYHXbvf8ok5awKC08daOkjHLc1HTReaMOdha1JZQPUVX2E41uyAt2Mv6q+fCjWDEXICF815AOqu1+08/w8v8Hk+sHkf1FaGu/zN4d8zE2HvckTcZPYWkTOkCcNuQxCs3gUTGITfL0X4zRE2AhrMvoXR0H0wgh/wvT1x6M++RJA3DBGjLtr8JUKP/kRg+qaX7Gf0+s9hupKFD2LcjBaJug1H0Gs09KyP7Q1UwXwC6w915WS76zAyUIPnQx9Pp+77CgI6sbUaQxlkU0O+UOQOeU0+nwLu3TvTsW1WzsrdCFX729zOTclSfrgfF3iUZn4M/kOzoc6liRmqEcF5fK5X7oC6/BnLpDZMOTfsSmY+pxz42WREfZ5HcH4FAaZPmJkt0mUo1PUvcNG34DPJecXV8McthSojgY18B6pkMVT5cvtclVQj8fT7UJyjyvlcxpPLoR9fAn/QAuvsAqy5rN180rMp/yFoOmkqZHW9B13vuxH3Eezcj2DbPgS1jBA+b3ivLsGwt/wKXbrEpovm5EDLMkDO+5ZX3DlX/RdB10EIjyu0I8gbjsQ1lfBIduqscdaRG97rWoD997ooqu/3pjuuZnRJq4Bg2hqnOYQZGdqGINVHzC3ipPfJYty1vCeDf9ff9aoFknh49uEAwpzyl9esifHqOhsRdmdUX0EvEh4Tooki6LPZDLQCKhmaBFx3vztSPe7dw3A0RxXA/9urLgtPWuGy5zEjbLQoNpvk8meS1OiMwpzq/CcRdhGtwmjqPQF1hQsRv3EaLUiAbToaqrTouCKYE0sQWZIbgJBg1P1vJrkhgjqvHGGsAN66XTYvqTW7oEYsgrqwAgEztvCPSMhDA0JhZEjF4gdRDh2UO9LUJ+q2Kqh3tzgQJLXg9lccuOOL4N/yEtTE9xHuqnOkJ8cioIRnznyCG2GeySAbMzsrmbHuT9Shc9YjXLwFwVZSeZIhbT6ZvR7qoqesTLQ7lQUsaIJi9Chyh6J8TH0mseIz6hVahYydzjLN6hFzUimCPfUuxZMv9LxPSFDzrTgOaaV06dwuYqOGYS9a9mxm5Yrl1DNT6V/pZWKqz5q+vSmalVQfnDIGwaljuUselV2cFhA9kYUyt6AYRZJbos6scdrkrOJkVPN2ly0tLvWNzLVgSWBS98iGWvpcsvPcfMNXbiYpO9NovEvJT5dOQtBvLvy+MxCyrLBV3+Go9Ew3qnkukGlEqWovGdrBvbMciX0Zt3SfTp+2zll546jTICSGLoCq2QbNukV/sht6Awcf1crt8GsYCRc+5cpNC6QAiTuqHJB3PoUR52wM9JB9hGcdsVqTK9i4B2bZVpj3tvHxM4qUyEaTd/nTB6KHQOK3O1lp3lhv+Scb/2g+auwOB1rNITeOX0txQ5aUmxsRSOQWKw3Oo0UaoulB1LPksEmwajXnPpR0XHHelAOn7w40m32FnBITlrkM+juWi5LUSEhS75qltIpcZ5YBpz0BNYaiadBc6OkEIBbkUUaPzgUKFgIDOfLnw/zrLRimi9apePGRRkDiV4sMlMpOCup8mOotDkivccBVzzXUNe7FpldKs2qSYphLGkjDri1bZPwyl1dYhEdzN8HMpxPO24RonzQeePWusH6ge5dB9SxGQsSxOOsMtiS+w/AVdv7Fs65Qq97MTaR34KyB+Jv3ILFqG7yPttvHULoCcp0rPsKbHyWyMh/q1mTUVLxv62LxjeC8CgsuMX+DSw1pCK5lIEkfqfvVZJvSQ/qJFoW+JHk05wiQVPiyPfHPWRafLlqcdNBB0EmL6JksTdKUEi1GTVNnZdKS3ZBhI+7ehrBcTYAMYL9kgYuaB0QeSqQQyG9dJOmpH1ofaz2hNXbWK4QvpLaRLlF/mEVftYgAV0/XOCDsIDjpSAveON0BKVveBiBJZ1XS+cmfCzNwAcJH58Fs3dfUIjx3YVLLqLLoZUmiE4H9V1cF6MJqZ6U0DNuyjxCIXCIBJU7ERWXIYk2Oputwq9Kk/g1VcKDZJ22te5L55xHyyiEDSTrr/jurkDi5BN4PxyDRiyG5YntTIIwcTcVmw/SD7TBs9lniIhC//xxnkbvfcPLgkCySBOL1edb6hwtTkpr01OQ6g71U6wtUY/lv25d0qdTKySOQ8B280AG57WU3ty1AEtcJEMcLRtqcZFbrlHe+xqYM6xtR9utI67Lgz585IAs53x+9xL3OWtpGXtZARGmxmRux+RIvrXZl423Toc8vg/77DOinamCku5jyHap3KbQtsKWbER7NxUQQWUH9GEJ2Hq0D25ZFa4BIO+HHZfAHvG13KVeqh9ZQJpDy1aQa6DteZiE1HiF7qhbIL8VyUoSJgCaHkPz8ZI8luHiiY+GsLcIbqGtd30wuq+IpiBTVuLqR3cEzKKYJVig86MTCauFG5xvTPrK+IYJKGDaasxE+LWRzlbzPStBq2qyBcDcSit68DfDZ+/B/yvIhJulfmJLOJt868OjUtznnQxc9egfbE2zqOfFM7rhpesNGDKFI91G6SK0vsIg6ZK/VUrKY82ARLF8YsMTQV1bCZ7dR/VpSAEGKahdO+V6pbXfpW19G8JPyA+GcQS4235bI5isOafjJFwSN6xb7nY34SNKCqQ7BIWvWLMoAe94COIvapW0FVrZg2jiv5Y5RGxfIprhqUY9ke5PDMa/DItl1ntvJLxqD6TiajqNpKcQ7fKTDR7LwkZHVX4ufkZrcrwuQWPFkxNhQOQK03oTidffiC/jj6+BI/+7Z/Ry9e2HlEf85ugDBscN6mFhJDfLYITxCx9TwLwvoMfxY/s/EBP7PxE4TG6E5wvYc/wNOyYrDiZA2fwAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACIAAABdCAYAAADAM7TWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAp2SURBVGhD7VtnUxtZFuUfbO2X/Sn7bfdfTe2MZ21sDxhsMoM9OI8NNs4Yg8HkHIQCSiiQoxAgkETOd855rZa1DEqDB6a2dKu6hNStfqdvOPfcVyIr65RVGKb+fbd/8mlR12hnYZuzr7hz1FDQYf/TjyiMgv6Jf5T3jtWUdHu2sbgUdTgu9FBA7gFEabfH82DEJ8UAUNhmu/BDASlqt78liIJW64UD0B86q7zP+6/CNuvxZXkiCqSoy/2ypNMJT1yeNwgmq7DDMcDEvIy8iF0zC2/6MkBiqjPjkdM5mfFIxiPJeCqTI5kcyeRIMg/8/+RIfotV8nAUJNC1Ba02dc1tHKl6JiUeyWmyyK3mEXXTh912edzjkOJ2WxRMET7Pw/mfmsxyG7q3rMOGa+xS2WVPCDhlYcSnpqBudkzJC4NXivB3eGtHDvb35GcsxkV5M4J8PeyVLs+clLZb5dcBl8jRkXgXVyUH4FLxSkKPMAwVnXahzaysy9W6IVnf2pX9wyPJ+2KWG59NcuuLRW42msS5sKquK2yxSAm8RXMsBuSHOoP81GhWR6JQJQRy47NZWkdn1E39oS0ZmVsBiEM5Oj4R19K6uLFQr3cBi5hkNbwlcnIitvlV8eIcLbi9J/aFgLh8azLmC0iNcQzAtRCnXDVEX9HlkK3dfTmAmxeCW7KyuQMQxwIcsrKxLQEsbpxZlupBt1p4ZWNHlsL4HF6j7R4ciR/vV0MbALUrH0YmJRf5lhaQXLi80TapgDQ5puW7WoNcqx+Wtc1tLHCMxLTI1XqDZDcYpQe5sQJQBS0j8p+Pw1LcOqKAWOGd72oHcc2wCiO/cwcVlRYQeuRJr0PaRmflI56kBsn4Bsf2Hj10LO/N4/Ienz/td0kFErTRPiXvLRNSg6T+jAdQeRUIS9WQW97hWnqD92HO3TljooybIzeQXEMTC+qGcnKsvcKOEKYT5IJuximfvDOgSk5dd4hcitrJUfTP6iFPlApSKl+ivgv093tG5SWeija5HFRuDyAJt3d2paLDKiVtI1IGj1TiuhqDR11nQ1KXYnLc3DuQTVxHPnnQOyr3kHOkg7PIMGHVEMx/641w6YRaoMk+LVfqhsWPpAxtbskrAPSiGko77CjTYfmM87S3qI5sVFx490CCW9sqj7KRI00I36M+V3qh0YmKfMFEPEa1lIPE+ERBVkUkPCEQ3C1cw3PMH76/3WxRnBHaQT4hRARWi/ygsczzUAhsAymH5meEZtyvcUK3dx5VYpLqSJgIjJ9x0Xp4bAcggE5eIAdysBAXG19a+5onkRz6Aq/wfEo8wqcpAzsGUao0w7Rffvw0LNfhXrJoG0juUa8TpWoAOKN6vxTclCrwCUPEnnMdockHyEd9o1I9PC6vzRPyBBWWh8/SKl/S+xujV1qdM/IYiw6ML4hh0iem6SXpG1uQwQmfmAGQ7wfGF8XC14lFvNc+47khXN8Dr9UY3Ooh2Bjj9Z2EycovXoEnmhwazdN2EYK9ff04kI2dPQmDNTd392I+31ehOkGp0wiInkzU/JLKAEqATzatGjzIl1zcsAglzOMOGlz1oEteoWx/YcsHs+rncnBdw8iY+l4LujdZ9ZsBcfuDeDKzKj9SNVv8UnBDWwwhJAlq56zq73qLBqTNOa3y5lxA+CQNdi00Y8shLK49mS4RDiEJTlBBD3qcqIYRlQckrJuNFmmyavxjnFoCsG8Qmnormh9Yki2ezZAcwGrpRwLTSGpMRjLnY1QJS/dag0nuQ831emblndGTMFG1PbQkW1d8OspCsie/wNDkws0tEZ3CvsNSZk4sB8MK2HJoU4F8AlDZ6NgErau5P1Q1d5B8zAMuzKckwdWBIefRVWkktU+2KbmJ88XoLbXovpYZPyqI5KbZMgRVm2tWfunW+kzaQKhD36Crji8FZG4NwgbUTebUjaz5tH/0f5KQjEmPsXooGxzzK9HvLAZCioVPU7sOLG5oyK7l8AD7B/sK+WJqJahoneKYXtKV/emnZNUwl8gd95HEJDeK63jXJ80RTS7a5S6OQvAGF+cTJ7rhaVB5kfDm4/Vc5UswLNV4Ei/25pxveC3BsswTDWEpNb3Yi9Rsk+CIvZYgniFsnZ55JR05VsTLibSB3IZL2UXzEfPfHSr5vlYCZ5dmaFzaNnoPB7JUPJk0R5hw3VDoi+thJY4CG18P3/qGLK6F5emAO0pWrJqPZo3WZ1ZDSjClGp6EhMZYOxY1cbOOmWYBmsMHXuCrLqCfoYT15I0FMoF2wAdJZdxM6hG62gxtQasecMo10DhvzhllDh6hPYFHtFI1K8nwGtqD5sLcewXjJj/XD71PpSWMdDofjowUFDc5ILk7kaGbXqE9gOqq7HUp0dSJMLowhtI4CXZ6F6VvYkl6cXSP+6TVPQ8GPjuBk4ZGB/K83ylXocbZzqnKdY/c47hh1LpsLPNGKTjmjw0IKLaNsyopKRBDxCMcwqfRY2ZB95zg9tD+aY8RGnrvIRofZ5ymyJTnRQtg+ZZ3OdWWBm0O3+PuQdpzDWOrA1kLbwr7hW8thGoJqV0BlSODmNyo2vGkFMx1kRnIMrusPHcTxEZpQJtE3yI7nytHqvodagjPaTLJdQzV0WQFEF0UM2wtER6huFaSAWLpRWQC5GhybiCvkKy5EfXFhfVkpUd0IFyYqp3G1k95wIp6jQGL5oCwiicZk+aInqzP+pzyIwQOtSiFDqWBHppYICMICY3ahOXK40MkXBw1SAl/KDR6jtjmllGOC9KDMuzCaxiy4DQQMikZlfYcwxaJjkAoNWn9mH/otXMBUaWptif0QxNJemioQajSqF8OMc9QzbFzU3w3O7X+Q545d2g+mjzYarBgq8ImZShTX4TQdCB8em7a0HwYMdSgjbLmwh3IF9oXjBzx5puUc+TFkEtu4Om4kcubzUeAVGKbge6nyzvdc2pBbbLTQsDXXoSEVocQxaP5tIBks29g0WsxydoA8cztqFyUtdpZhFXF7AoRyBBmYtpb01jcRngmECotxvd7TPsDXu0pP5i8YM9RacTGHsmKu4u61VrGo7tFcyA9coXOngRiws4jjXxCCkg5Wam0uGiHe1ZWMKMoi9k341s/Pmep1lmn1CDF7UvaSy6G/NAF9HXsPI5F9kl+RTuIp3fP9AjZkDeMGkAE0E3NmFm4m3gPYpqqjdtRHKymlrXNnFHsPmtsalEMOxXYkLnVIJ9CPQgnQT5kyh5hd2QpTvrX1L4I5xd2TV0U82ac3Lix54N6o21g046bOzxHgfQB4dLtBGXP3Uf2pLQHLH6BAxHLj+483bpVDgBMNTrrBAC/wtyiVwTDwpmIMxArhbtG3ClKJKQTVk0qepMgyRmn5xYuyl5DcPoOwbnmmlQ0ZyqAk90n6W5Asht8q/MZIGlPet/K9cnukwlNJjSZHEnmgUyOJPNQhkd+lyPYcOv/S/yMtLjD+dcAUtLlel6Cn58nS6Y/+3xWUbf3n4Wt9sNi9bvni/8Zur6m+jl6cbuz6qF16XJ/jq5+oN9r/3tJj9tCMCVdlxOm6L8s5LdO/q20b6yyrMfjL2hzHBS0248gii/s+A2BSyo4A38rQgAAAABJRU5ErkJggg=="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeCAYAAAA7MK6iAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKTWlDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVN3WJP3Fj7f92UPVkLY8LGXbIEAIiOsCMgQWaIQkgBhhBASQMWFiApWFBURnEhVxILVCkidiOKgKLhnQYqIWotVXDjuH9yntX167+3t+9f7vOec5/zOec8PgBESJpHmomoAOVKFPDrYH49PSMTJvYACFUjgBCAQ5svCZwXFAADwA3l4fnSwP/wBr28AAgBw1S4kEsfh/4O6UCZXACCRAOAiEucLAZBSAMguVMgUAMgYALBTs2QKAJQAAGx5fEIiAKoNAOz0ST4FANipk9wXANiiHKkIAI0BAJkoRyQCQLsAYFWBUiwCwMIAoKxAIi4EwK4BgFm2MkcCgL0FAHaOWJAPQGAAgJlCLMwAIDgCAEMeE80DIEwDoDDSv+CpX3CFuEgBAMDLlc2XS9IzFLiV0Bp38vDg4iHiwmyxQmEXKRBmCeQinJebIxNI5wNMzgwAABr50cH+OD+Q5+bk4eZm52zv9MWi/mvwbyI+IfHf/ryMAgQAEE7P79pf5eXWA3DHAbB1v2upWwDaVgBo3/ldM9sJoFoK0Hr5i3k4/EAenqFQyDwdHAoLC+0lYqG9MOOLPv8z4W/gi372/EAe/tt68ABxmkCZrcCjg/1xYW52rlKO58sEQjFu9+cj/seFf/2OKdHiNLFcLBWK8ViJuFAiTcd5uVKRRCHJleIS6X8y8R+W/QmTdw0ArIZPwE62B7XLbMB+7gECiw5Y0nYAQH7zLYwaC5EAEGc0Mnn3AACTv/mPQCsBAM2XpOMAALzoGFyolBdMxggAAESggSqwQQcMwRSswA6cwR28wBcCYQZEQAwkwDwQQgbkgBwKoRiWQRlUwDrYBLWwAxqgEZrhELTBMTgN5+ASXIHrcBcGYBiewhi8hgkEQcgIE2EhOogRYo7YIs4IF5mOBCJhSDSSgKQg6YgUUSLFyHKkAqlCapFdSCPyLXIUOY1cQPqQ28ggMor8irxHMZSBslED1AJ1QLmoHxqKxqBz0XQ0D12AlqJr0Rq0Hj2AtqKn0UvodXQAfYqOY4DRMQ5mjNlhXIyHRWCJWBomxxZj5Vg1Vo81Yx1YN3YVG8CeYe8IJAKLgBPsCF6EEMJsgpCQR1hMWEOoJewjtBK6CFcJg4Qxwicik6hPtCV6EvnEeGI6sZBYRqwm7iEeIZ4lXicOE1+TSCQOyZLkTgohJZAySQtJa0jbSC2kU6Q+0hBpnEwm65Btyd7kCLKArCCXkbeQD5BPkvvJw+S3FDrFiOJMCaIkUqSUEko1ZT/lBKWfMkKZoKpRzame1AiqiDqfWkltoHZQL1OHqRM0dZolzZsWQ8ukLaPV0JppZ2n3aC/pdLoJ3YMeRZfQl9Jr6Afp5+mD9HcMDYYNg8dIYigZaxl7GacYtxkvmUymBdOXmchUMNcyG5lnmA+Yb1VYKvYqfBWRyhKVOpVWlX6V56pUVXNVP9V5qgtUq1UPq15WfaZGVbNQ46kJ1Bar1akdVbupNq7OUndSj1DPUV+jvl/9gvpjDbKGhUaghkijVGO3xhmNIRbGMmXxWELWclYD6yxrmE1iW7L57Ex2Bfsbdi97TFNDc6pmrGaRZp3mcc0BDsax4PA52ZxKziHODc57LQMtPy2x1mqtZq1+rTfaetq+2mLtcu0W7eva73VwnUCdLJ31Om0693UJuja6UbqFutt1z+o+02PreekJ9cr1Dund0Uf1bfSj9Rfq79bv0R83MDQINpAZbDE4Y/DMkGPoa5hpuNHwhOGoEctoupHEaKPRSaMnuCbuh2fjNXgXPmasbxxirDTeZdxrPGFiaTLbpMSkxeS+Kc2Ua5pmutG003TMzMgs3KzYrMnsjjnVnGueYb7ZvNv8jYWlRZzFSos2i8eW2pZ8ywWWTZb3rJhWPlZ5VvVW16xJ1lzrLOtt1ldsUBtXmwybOpvLtqitm63Edptt3xTiFI8p0in1U27aMez87ArsmuwG7Tn2YfYl9m32zx3MHBId1jt0O3xydHXMdmxwvOuk4TTDqcSpw+lXZxtnoXOd8zUXpkuQyxKXdpcXU22niqdun3rLleUa7rrStdP1o5u7m9yt2W3U3cw9xX2r+00umxvJXcM970H08PdY4nHM452nm6fC85DnL152Xlle+70eT7OcJp7WMG3I28Rb4L3Le2A6Pj1l+s7pAz7GPgKfep+Hvqa+It89viN+1n6Zfgf8nvs7+sv9j/i/4XnyFvFOBWABwQHlAb2BGoGzA2sDHwSZBKUHNQWNBbsGLww+FUIMCQ1ZH3KTb8AX8hv5YzPcZyya0RXKCJ0VWhv6MMwmTB7WEY6GzwjfEH5vpvlM6cy2CIjgR2yIuB9pGZkX+X0UKSoyqi7qUbRTdHF09yzWrORZ+2e9jvGPqYy5O9tqtnJ2Z6xqbFJsY+ybuIC4qriBeIf4RfGXEnQTJAntieTE2MQ9ieNzAudsmjOc5JpUlnRjruXcorkX5unOy553PFk1WZB8OIWYEpeyP+WDIEJQLxhP5aduTR0T8oSbhU9FvqKNolGxt7hKPJLmnVaV9jjdO31D+miGT0Z1xjMJT1IreZEZkrkj801WRNberM/ZcdktOZSclJyjUg1plrQr1zC3KLdPZisrkw3keeZtyhuTh8r35CP5c/PbFWyFTNGjtFKuUA4WTC+oK3hbGFt4uEi9SFrUM99m/ur5IwuCFny9kLBQuLCz2Lh4WfHgIr9FuxYji1MXdy4xXVK6ZHhp8NJ9y2jLspb9UOJYUlXyannc8o5Sg9KlpUMrglc0lamUycturvRauWMVYZVkVe9ql9VbVn8qF5VfrHCsqK74sEa45uJXTl/VfPV5bdra3kq3yu3rSOuk626s91m/r0q9akHV0IbwDa0b8Y3lG19tSt50oXpq9Y7NtM3KzQM1YTXtW8y2rNvyoTaj9nqdf13LVv2tq7e+2Sba1r/dd3vzDoMdFTve75TsvLUreFdrvUV99W7S7oLdjxpiG7q/5n7duEd3T8Wej3ulewf2Re/ranRvbNyvv7+yCW1SNo0eSDpw5ZuAb9qb7Zp3tXBaKg7CQeXBJ9+mfHvjUOihzsPcw83fmX+39QjrSHkr0jq/dawto22gPaG97+iMo50dXh1Hvrf/fu8x42N1xzWPV56gnSg98fnkgpPjp2Snnp1OPz3Umdx590z8mWtdUV29Z0PPnj8XdO5Mt1/3yfPe549d8Lxw9CL3Ytslt0utPa49R35w/eFIr1tv62X3y+1XPK509E3rO9Hv03/6asDVc9f41y5dn3m978bsG7duJt0cuCW69fh29u0XdwruTNxdeo94r/y+2v3qB/oP6n+0/rFlwG3g+GDAYM/DWQ/vDgmHnv6U/9OH4dJHzEfVI0YjjY+dHx8bDRq98mTOk+GnsqcTz8p+Vv9563Or59/94vtLz1j82PAL+YvPv655qfNy76uprzrHI8cfvM55PfGm/K3O233vuO+638e9H5ko/ED+UPPR+mPHp9BP9z7nfP78L/eE8/sl0p8zAAAAIGNIUk0AAHolAACAgwAA+f8AAIDpAAB1MAAA6mAAADqYAAAXb5JfxUYAAAeJSURBVHjarJZ5bBzVHcc/b+bN3pv1eteOj9gkxJRcIiUiKnUJRyUiUAMNTRuqIipFokWoaSkU1Oufilb8VVF6SCQUQQOlEVdJawSiDUVpMSRuHEKMmrOuE8d2fK3tXe81M++9/mHvYjuGItGvNHqamffe93f/fsIYwyw2lEql+3zfvxZICiEAWLhWUDn3MddJx3E6I5HIL4GDAMIYg1Lqzqmpqad938eyLIQQ857/RfxRD4DWGgDHcUgmk3dLKR8XWuu1mUzmfd/3CQQCVQLLsqqEC0nFzI/q5VrrjySurL7vI6UknU5fJQuFwk9d1yUYDLIQcwmFEFXJY/H4vH0TExMshsqZCrGUEtd1yefzPxfj4+Ojvu+npZTzNF1o8spB27LY9dsn6enpwRhYtXoVO+/5Jq5bxvP8izSuCFuB1hop5bQYGRkpCSGCc0k+zM+RSBghYOP1t3H4wEFA0b75Bjpff55isUipVL7I/HPjYA6KEvCAoDGmSqC0pjaZ5MOwZu1axvIGXylWr14NCMLhCOFw5KK9Y2NjuK670G2+nBHIzPvhSMnRd48yPjaCEwwhECAgEHCwgMnJKQLSQlqQGc/QdeggpXIZ1/XBGAwGpdSMkGvWEI1GcV13vv+Hh4ezQLxiYiltEokadtz9XZ7Z+yItLS0YDBiB0j5CWNQ3LScYjoCBUnGa0aFzCMCy7WpQFQpFAo7NH5/dzZXr1zE8MjZX45wYGhrKCiGqxJYlsG3JaGaK6UKZSCiImb3MkRKE4fs/eYQjPSfRWvPZq67g4R9/B9fz8H01IyTg+wqtFalElIDj4M9aoEIsK84Xs3mpNSjl0dJYRzAUXtTH4YBNoVhGK0UkKLmkpXHRfYV8jmwuj+t51Wypkg8ODs4zdSWKk8kku5/4HY/teoLGllYwIARYQnD87DjTJYUxhnhY8qmWWqS0aWqoA2PIZCZY2XYpP7h/J0oZXM9dmN85OTfMtdZV4nK5THNzM9dcewPp+jqgkpOCL29fQSwaBSCby3G+/zye7/PUH/aRnS5QGBji9h3bSaVqGRwaXphKMzWhkuAf5LAAI5icnGLLlpv44pab+Djo7e3lhY43aVnRxmB6KdtuvRmEwPf9RffP0xij0Uh0ogHhwNjoCHZ+CIwLdgQdbUJFkhgJwgeKeSjnaGxsYNdTexnNTBCOxmhM17Bu1Qqyk5OLajtLrDEGBAqCtZiaGiKnXyL0zqPYZ9/Cys92hSDoujRq2dXo+vV49ZvwUleypLGBo+dK/PlvR2hd1sTA4AXu/vptrF7VRl9fPwv6y1xiMEaj7BiBRA31HV/D69iLVwvFulWoVB2mmMXKDRA4PUa46xVs+QrBZlCXLCO6cStnDgfIZIvkQstJpBJ8ZevNlAt5jNawIJrnmVprg0jXk3ztTk49v5fIt/Zhr74B7UQwgHELmOlxzOhp7L63CfbuJ9zbSWzgPPr0b/hCArruTPJ090liV9zBylWX0zcNViyEXRxFqNJMSjCnbPb29mY9JxFvNad4+f528tv+xJe+egvZgWFQHsKyEJZE2BJtObgiiJcbI3S+i7ozHQS6nyQYBWJAFIg4TIeuJF+/mXLTjZTrPo2KLEH4Grs0juXlEDBTQKLRGCf//g77Ctfzo02bKfWfRXtqTk8uz4ppIYUgFHBo2bSVRwdiPPd2D/evm6BdnaE2CDrpYdtdpE51oRM/o5zeQLHuWspLr8NNr8eNX4qwZ00dEmVGRRPHR3yc6X5iS1NMj4xWC8oHrc0nmawhna5jz549PPzQr5gMt7H9cBvPfe9etka7cN94BqcE+RRYAoK5I0T7jqCDj1Je0kQpdTVuw0bEiRMnstJx4lFHcN2tO1i39nJe+v1u8tM5MhMTKKWR0iYej7EkFmVgYIBf/Ho3jz/bgYymiUbC/PDeHWy743YGz4+ie/ZRe+IF0n1/xSmAFweTABkCxwLpgzbkxLFjx7LK9+PNzQ3845//Ytvt3+D69nU88O27WLq0nkAgQD5fYGh4mMOHj/KXAwfpPtZLqLaB9o1XcN9d2/l8+3rO9g9QKJYoB5O4bpnwuU6Sva+S6nuVyKSHHwZVA3YI7BA50d3dndNax5TSXLaylUPvnWHngw/R29NDsqGWVHIJ+WKZkbFJ1OQ0dkMrm9o/w43XbGD7LdeRSkQ5859+jJjpapZlYbSmKJdQ0hbh4feo/XcHqd6XiY1MoGzwEuTEoUOHikqpkDGgtWJFazOesXntzS46u95lZDSDdCSJRIJljfVctryJDevaWNnawMDgBTKTORxHLpzysCwbIQRuoIayEyeYOUNt3+ukTr9IfOB4UXR2do57nldbCSLP94mEAixrakBjUSiWUUph2xaRUABLwIWRMbK5PFLKi0bfxadNUMEaypEGnPwF6s6+XhAHDhzo8Dxvy9yaWh1ljcGyLazZd6VNdRj8sBr8kQIYjXYikGh+S+zfv3+DUqrb9/2PJf0nhW3b2Lb9OWmMOWKMuUcp9VhlQFs4/H0SzL1rZrSyHjDGvC1nP+7SWr+vlHrQGLPRGBP7f2ophCgIIbqBR4wxbwgh+O8ATH/iUkiU/EcAAAAASUVORK5CYII="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeCAYAAAA7MK6iAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAX7SURBVEhLpVdpbFRVFH7GIIigiJSCZWkh2BZSDC0xBjAGmyAWISprYoLID7eQEFDD9ocQ/GFlaUs3GNppOzClrZRtGiilLIVETGUVhLAIgUCBgkClb3/389w7d2in86ZUPcnpzJt73/fds9xzTpWuCoAerOHbOfrerxrtnVOg5SdA2xAH3TMS9v75cA58uZad96XL7f9fcHxVmrl9WoXpS70P7whYG2KgZfcn0teDmjMQWlY/YGMcWFESrPLxl1hgxvctF3f3kxD/Ttjt35OdwOwt+uYkG8VxcPKILGdAG2GEDoSxIRYoIC1LgF2adstsWLZcwnVNWmu+GGv6JzTBF08WtrOuq0peQH5/oCoFZnm6H7fQU0JHF/P0psWOP01DQT+o3I1uwLlx0Nb3hbqmF5HEuu8h1XOI3BMLOzDzAlpuJUmKSDGq5ow2y1ItFDzDyqzXoG+dCCPwObSNiVDX9aFDvCRjT4dqv5fCA99QGNvS6yVNuLDmM4mG/50mlh8Dtf2LXEMW/tQT2rpXoGW+APtsCSU6oJenw9gxE9apQujFY0Sihb3LlZNTnmh75m6lV56TlEExqmdU8pi6unf9qzCqpsI8vJwsfZcOMghO8zkwtRna2t5wLgfEIQz/RDpYn8j3SY2cGKAyFVbDismSkkjrF71pFSXbpmsiDSRLX4R9sUqAc2EPr9AfG+z+BRjV08Fa74I1n4fGY+5msVR4BkAvG3dM0iqKVvFhpXCFy2ahFDtj2yRYjVmApUn6cGF/XSKXzxbx52GJiDWpzl1elgy9fmmGwtijvhSbx+KedtjYpgRCLuVZ7Nw9FaKCc20/PZ+Rz0Gxr+6FTgknDuCCheJB0LdN9irWocUfwfvGM4oDKc9kbxqY0SIImPoAWsEwiinFf8eMoPul2Jd2iN/dMJEfC9Uz+rRi7fu60aTAd9zQUdU1PWHWzJfQBH6t7mlMW1crMAPz5AodqvWetDoy3ry6qZtGPVawayrU7OhFIKT8KlknCyQ0EdN1EsD8Dq/tRfHPlis83pcp8weH1/QQDnFZnkQowS4Tvhih3GXZMXCafpPQQWGtd+DcbIBz56T8JSjmkRVBb7hhcaXrqLhlX4RSluolY+miPhHATH8I+0KFsCxMmAPr6EpodP1E13LDkqronuTON5GlamYPKo9tMXRuHIH6Yzd6LxZGZQZZfVSu0BplebCCuWc1rwta3hAoqJ0HNStKcvEGUDCcYtibSuImCU1X+XimKCoaz43M7gQ0lLL6qlyl9RO5IhndMFWKu1WcAsWomZ/NqIlHbOKJkRcP5/avcO6dfXqNuBjVn0D9QREJJywnr/AqFhL73JZg0+iISWrnUb54km4q7OTGcXZxss3TPGwTNQOj4gM6virh2sRpOkEZng+zfjHM/QvgUNFoL2bdwqjEKCSekrcbRMnUSydcj2iFPO4UC140jN2fkhU+Cdu5sAcXg/OYW/gIE74E8tisVYLY3DVrGUrjabFDknFy3g7JrUb1xwLYasyBeWiJ+M7U+2AtN8HMJ6JoOFcClP1vyQ4VGT4rNxZm0ShV/yVrhCB+eMrbxygZ0+TQuNJxM1ceS/uPckFG3QXGnrniu3lgER2OOo43labNUcHmQC3U9Ypya0sGU51+P1+QhsQ8uGQJfh4NvWO14VZTheJx462Rg4b6r+4bT4WCspcT8ioWpd6rObxGx9DQ8J7FrtcmSMqgMMa6a+WT/HxGigQgC9a9TB2KlLdIPhTUfyeuUTSy9sr7vFUUrxt1C2dLunAhI543d06/At8Qd0BuPf8MzVih5yjKLeVzOHaNhVb7zWpJ4y7GgxspTkV6LR8M+LjiBtgV5SMUd69RmMCcugWryahukqJzMWs+86NiTHBc6WSEjVAitHNprPUOhr09/Ymxb4G7ezsTdmxphuGfcBClSQQ0SDRxUWjC2ii5m+LOKxIvDvANg+FN+dvZnpGv/3ksMYj0H0U/vHSas3NKoVmSelrbPPKR7kkSrU0Q8yJTRP8xeEZes7aMb3D2zFzJGrOHy1ejiKL8A1Que4OelJ6EAAAAAElFTkSuQmCC"},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeCAYAAAA7MK6iAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKTWlDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVN3WJP3Fj7f92UPVkLY8LGXbIEAIiOsCMgQWaIQkgBhhBASQMWFiApWFBURnEhVxILVCkidiOKgKLhnQYqIWotVXDjuH9yntX167+3t+9f7vOec5/zOec8PgBESJpHmomoAOVKFPDrYH49PSMTJvYACFUjgBCAQ5svCZwXFAADwA3l4fnSwP/wBr28AAgBw1S4kEsfh/4O6UCZXACCRAOAiEucLAZBSAMguVMgUAMgYALBTs2QKAJQAAGx5fEIiAKoNAOz0ST4FANipk9wXANiiHKkIAI0BAJkoRyQCQLsAYFWBUiwCwMIAoKxAIi4EwK4BgFm2MkcCgL0FAHaOWJAPQGAAgJlCLMwAIDgCAEMeE80DIEwDoDDSv+CpX3CFuEgBAMDLlc2XS9IzFLiV0Bp38vDg4iHiwmyxQmEXKRBmCeQinJebIxNI5wNMzgwAABr50cH+OD+Q5+bk4eZm52zv9MWi/mvwbyI+IfHf/ryMAgQAEE7P79pf5eXWA3DHAbB1v2upWwDaVgBo3/ldM9sJoFoK0Hr5i3k4/EAenqFQyDwdHAoLC+0lYqG9MOOLPv8z4W/gi372/EAe/tt68ABxmkCZrcCjg/1xYW52rlKO58sEQjFu9+cj/seFf/2OKdHiNLFcLBWK8ViJuFAiTcd5uVKRRCHJleIS6X8y8R+W/QmTdw0ArIZPwE62B7XLbMB+7gECiw5Y0nYAQH7zLYwaC5EAEGc0Mnn3AACTv/mPQCsBAM2XpOMAALzoGFyolBdMxggAAESggSqwQQcMwRSswA6cwR28wBcCYQZEQAwkwDwQQgbkgBwKoRiWQRlUwDrYBLWwAxqgEZrhELTBMTgN5+ASXIHrcBcGYBiewhi8hgkEQcgIE2EhOogRYo7YIs4IF5mOBCJhSDSSgKQg6YgUUSLFyHKkAqlCapFdSCPyLXIUOY1cQPqQ28ggMor8irxHMZSBslED1AJ1QLmoHxqKxqBz0XQ0D12AlqJr0Rq0Hj2AtqKn0UvodXQAfYqOY4DRMQ5mjNlhXIyHRWCJWBomxxZj5Vg1Vo81Yx1YN3YVG8CeYe8IJAKLgBPsCF6EEMJsgpCQR1hMWEOoJewjtBK6CFcJg4Qxwicik6hPtCV6EvnEeGI6sZBYRqwm7iEeIZ4lXicOE1+TSCQOyZLkTgohJZAySQtJa0jbSC2kU6Q+0hBpnEwm65Btyd7kCLKArCCXkbeQD5BPkvvJw+S3FDrFiOJMCaIkUqSUEko1ZT/lBKWfMkKZoKpRzame1AiqiDqfWkltoHZQL1OHqRM0dZolzZsWQ8ukLaPV0JppZ2n3aC/pdLoJ3YMeRZfQl9Jr6Afp5+mD9HcMDYYNg8dIYigZaxl7GacYtxkvmUymBdOXmchUMNcyG5lnmA+Yb1VYKvYqfBWRyhKVOpVWlX6V56pUVXNVP9V5qgtUq1UPq15WfaZGVbNQ46kJ1Bar1akdVbupNq7OUndSj1DPUV+jvl/9gvpjDbKGhUaghkijVGO3xhmNIRbGMmXxWELWclYD6yxrmE1iW7L57Ex2Bfsbdi97TFNDc6pmrGaRZp3mcc0BDsax4PA52ZxKziHODc57LQMtPy2x1mqtZq1+rTfaetq+2mLtcu0W7eva73VwnUCdLJ31Om0693UJuja6UbqFutt1z+o+02PreekJ9cr1Dund0Uf1bfSj9Rfq79bv0R83MDQINpAZbDE4Y/DMkGPoa5hpuNHwhOGoEctoupHEaKPRSaMnuCbuh2fjNXgXPmasbxxirDTeZdxrPGFiaTLbpMSkxeS+Kc2Ua5pmutG003TMzMgs3KzYrMnsjjnVnGueYb7ZvNv8jYWlRZzFSos2i8eW2pZ8ywWWTZb3rJhWPlZ5VvVW16xJ1lzrLOtt1ldsUBtXmwybOpvLtqitm63Edptt3xTiFI8p0in1U27aMez87ArsmuwG7Tn2YfYl9m32zx3MHBId1jt0O3xydHXMdmxwvOuk4TTDqcSpw+lXZxtnoXOd8zUXpkuQyxKXdpcXU22niqdun3rLleUa7rrStdP1o5u7m9yt2W3U3cw9xX2r+00umxvJXcM970H08PdY4nHM452nm6fC85DnL152Xlle+70eT7OcJp7WMG3I28Rb4L3Le2A6Pj1l+s7pAz7GPgKfep+Hvqa+It89viN+1n6Zfgf8nvs7+sv9j/i/4XnyFvFOBWABwQHlAb2BGoGzA2sDHwSZBKUHNQWNBbsGLww+FUIMCQ1ZH3KTb8AX8hv5YzPcZyya0RXKCJ0VWhv6MMwmTB7WEY6GzwjfEH5vpvlM6cy2CIjgR2yIuB9pGZkX+X0UKSoyqi7qUbRTdHF09yzWrORZ+2e9jvGPqYy5O9tqtnJ2Z6xqbFJsY+ybuIC4qriBeIf4RfGXEnQTJAntieTE2MQ9ieNzAudsmjOc5JpUlnRjruXcorkX5unOy553PFk1WZB8OIWYEpeyP+WDIEJQLxhP5aduTR0T8oSbhU9FvqKNolGxt7hKPJLmnVaV9jjdO31D+miGT0Z1xjMJT1IreZEZkrkj801WRNberM/ZcdktOZSclJyjUg1plrQr1zC3KLdPZisrkw3keeZtyhuTh8r35CP5c/PbFWyFTNGjtFKuUA4WTC+oK3hbGFt4uEi9SFrUM99m/ur5IwuCFny9kLBQuLCz2Lh4WfHgIr9FuxYji1MXdy4xXVK6ZHhp8NJ9y2jLspb9UOJYUlXyannc8o5Sg9KlpUMrglc0lamUycturvRauWMVYZVkVe9ql9VbVn8qF5VfrHCsqK74sEa45uJXTl/VfPV5bdra3kq3yu3rSOuk626s91m/r0q9akHV0IbwDa0b8Y3lG19tSt50oXpq9Y7NtM3KzQM1YTXtW8y2rNvyoTaj9nqdf13LVv2tq7e+2Sba1r/dd3vzDoMdFTve75TsvLUreFdrvUV99W7S7oLdjxpiG7q/5n7duEd3T8Wej3ulewf2Re/ranRvbNyvv7+yCW1SNo0eSDpw5ZuAb9qb7Zp3tXBaKg7CQeXBJ9+mfHvjUOihzsPcw83fmX+39QjrSHkr0jq/dawto22gPaG97+iMo50dXh1Hvrf/fu8x42N1xzWPV56gnSg98fnkgpPjp2Snnp1OPz3Umdx590z8mWtdUV29Z0PPnj8XdO5Mt1/3yfPe549d8Lxw9CL3Ytslt0utPa49R35w/eFIr1tv62X3y+1XPK509E3rO9Hv03/6asDVc9f41y5dn3m978bsG7duJt0cuCW69fh29u0XdwruTNxdeo94r/y+2v3qB/oP6n+0/rFlwG3g+GDAYM/DWQ/vDgmHnv6U/9OH4dJHzEfVI0YjjY+dHx8bDRq98mTOk+GnsqcTz8p+Vv9563Or59/94vtLz1j82PAL+YvPv655qfNy76uprzrHI8cfvM55PfGm/K3O233vuO+638e9H5ko/ED+UPPR+mPHp9BP9z7nfP78L/eE8/sl0p8zAAAAIGNIUk0AAHolAACAgwAA+f8AAIDpAAB1MAAA6mAAADqYAAAXb5JfxUYAAAf4SURBVHjanJdbjF1VGcd/a+3bufTMmUtnpjOdaadM22lLoaUUIkhBI5akKiIGQRJIMIGExEQTE3zwEiXBxEcTowYTjBceMEETRSWC0AJGyq1CW6ZlSq90bp2ZzpyZc2avuw9Tatv0IPFL1t77YWX/1vdf//WttUQIgQ9jdLrOqfEpAhBLSRxHJHGE8x5jHCMnx1Y673c4z05L2Jo38lq1XHwpTeIXqstKb/SvWL6QJjFTs/Noa6mWSzRyhRSClmVFNqzuPM+KuUwIIYijCCkEs7V67/j07G3a2DuitHBTtbq8vb29lWpZ8upr+2lYf0tNqe9NzNbfP31m9vlV3e1/zLLkpSSOFqNI0izEhRmPzzQYPTNDfVEls7XGzrmF+r0d1dJtQ6u7O67o62bBw3unFYdPTzIyOomqL7Kmq8qq7lYKaUxtoc7U9Cz1euNQOYufHujtfKpczPbHUUSpmDG0avnlwSfG5yqvHzxy16I2D29as2L71rV9RHHM5HSNF949xZ8PjPHv0Rqj8zm5tpSkoD2JGOxYxpX9nWxZ08XgiioiWMbPzDA5dTYX3r3YXin/bvO6/mc2DnTVLgv+zV/3Ptk3sOLemzetJjjPP/Yd4am9R3jh+FnGTARpShrHRID0FpxDaI1UCqENlSxhfW8nN27oY9tgF52VhOmzc5wenyIm/P6B22+8+7JzPL+o++OoxK93H+Znf3+Ld2sGUyhTrnTQGkdIIADeOZyLCNYShEBISRpFLKqcV4ePsnf/CJ3VCtdv6OOeHUPEIXDm7FzPhayLwFJKuzBf54ndB3irLmntWE5LmhDHCVIIBAII+CRgrcUIiUNgQ0B6T+JTisWAV5rJqWme/MtJWJxnXasgKxVdU3ASR1TbWpFpTLEUkyQxSRRjcrWUqmDpISCEgHceYy3BOqy1ZNYinENKSZqlFIqGRWUoFFqQUURTcJwkvlwpQSwRkSCKE0pW8cj1XXR3VOGcrFJKAoIQAtY4tLYorbHGMDO/yM+f38+s1gghCUCWpbhLltPF4EhQKApAIKMIISUx8JVPDNLSVm26JnEQFi250ZyYqvHEnmF8ACHAh0CWpShjw0fMMcRJwAWQQiARWCHZfXyOnoYg+IAPgZZUsrG7AsDhiTqjMw200litmJpdwHiPlIIARFFEsZChnWuecSTxUix9hxCQBFSS8fXnjp3vs2g8O3rL/OGB6wH4wYvv8fS+k5SDIVWKROV4bYiEgABSSgrFjHquLgJfVNNiKQ0hEImA9x4f/LlBXNAAcYFowXuC9wjvkcGf6+SXzBeWMi9kKXEch6bgs7X6XBRJKoUEa+ySYwlLsgvONyEuADuP9I4oeGK/NLrgA3gPPlApZiRJTJbGqim4XMgOa2Xoby/j8hxrDNoYXPB8ONzAkmH+C3YIa4mdQ3iHd0sKeL+kVs/yCiEICOHYR0k9srCwyIaVbaByvLFobVBaoa1GW4MyCm0M+CU3e22InSG2Fu8swTlwDm8daSTo72qlkSuKWXqwqbk6W1venpiYdVuuWBG14PBKEbzD2AgnJQiB1hajUrxeklQaQ2oMQgScdQTn8M6hjaW7WqKvs5Xx8Um625bta5rxYH/3yPRUbWSgu52hjhKh0SBSCqlzUDkhP/fWijzXLCqN1BppDNZYgrEEY/DOkSvNVYM9tBRTFnN1ek1f14HmUkdSR94/06grdl23DubrFLQmU5pEKVKdk6qczFpybci1xhmDMwZvDMEYnLVYbUmk4DPXruVsrU6llP2tVMjmm4K1dQys6Pjt8OGT7gs7rqK3mmIaDWKtSLUiVYpUaYRWKGPQxmK1wWtD0BprLM46FhoNtg2tZOtgL6cnpxga6PlVpVRovo7TJGZNX/c79dm5P8VBcN9nt9OYr2OUwiqFVZrIWYY/mObBx5/jwcef4+CxCRLvzkENSikKacz9t22nsajw1r7Y29X+r1yb5pVLsHTW2rym54evvzm868uf3prtfuMQ/zx4knIxO99vVnn2TM8RnCcRIFgylnOWPNc89MUb2LK2hzf3v++vXtv//eAJuTXNM/beo7RhdW/n2wXMj48dH+Pb9++kp1qkPl/Ha4VTCq81SfAkBLy1GG2w1lBvKHbduIn7dl7L0VMTtFcKP7lq7apXCllCpVxsDhZSIKTAh8BN16x/7OiRY7vLScyjD36OlkLC/HwDq825prFG46zFOUsjN9x63Xq+dffNzMwtMDtbe+WWazd+xwePEAJxYbm7FBxJSSQlICgWCnrHliu++sZbB4aH+rt47OHP07qsSG2hjrUG5xzee6z3aOvZuX0t37jzBvLccOzk2DvXbRq4K8uSRe/D/z7eTtc0l5rtg4mpdXv2HX3mmi0b1h8fO8OjTzzLu++PUsgSrPcUkphd2wb40o4ribMSp8Ym375l27rbB/t7TtpLtsKOSvrxwABZGrN/5MTgawdPPX31lUNbnLf89A8v8+zeQ/R3lLl1cy/XrO3FiQyEf3lodec9fd3LR7vbq9hz9fr/Aksp+GBihvl6o/vA0fFftLV13NHX08ZLbw4zMzZK67ISTqb0dLb88lPbN3xzanahUSkX6e1sw30EuPkd44KwzlFZVprYvrH/TpvXvnvoyGm9qqMNawUNK6a2rF/5tU9uXfdQFEUN5/zH+eXl706XPVY5jxAybB1a9diJsak9w8cmf7Sqr3N6++aBR5a3towY62hmpMvFfwYAViZs8kbaYL4AAAAASUVORK5CYII="},function(t,e){t.exports="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeCAYAAAA7MK6iAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAVFSURBVEhLtVZrTJNXGH7BC0oLxhjMfsxE4xJ3z24xcf7YssmPZWZuxphtWba/259ll8zM6EJMxA0dZsoMm5q5LVkWF10krlIKRe6XcmuFCuMu0KICgoAUpPDsOd93OmopKM49zfP2nPN953m+876n56vMgteyRZqsDvEmTEhjAvh9f7ys6U3oleaEVKm1JmmHCFQlrpfmRJu08Oa/9eSGB0Bl3kY2W/qlPuEL7aZRH7deGq2t4ks0b4wm8F+pFtRD/UuWg9qVcFtzjcF6XpyTVtKiGe36PVCVrt3w2SrisbxqpFY91aVI0sQdC6kTk5449peE9ZdGmXMXdqhVW7OFnTzjSTwRrIsxTJ5p3oxd/r04deM3ZN204czQOWT0/4Cdne9jZcMaSK16gGUUo2CkRjSqveNJuCZSZx0XNzv/kimtETzV+ALODp7D5PQk5kL7eCc+69mNxXXMTN1izqX5HVpzMshUs1EXxmrBtradGJgc1PJ3R9aQDYnu1XxgVQaah+tFozKXWoYQq2KwqekVBKYCWhLoHb+qW3ciEAygb2JA94A/B89jUc1y6sTP6M1HqWFQrF6OJdWJqByp1lLAcf9PSCpKwpGeTD1iIjgdxHbvu1hd9jDqR716FPig40OIizUPac5HqWZQrIzBlqY3tISJY70/IrYsDt/6juoRE8r4tabtWOFaZdQ5BMdQPqSCxq5F1GTdQ9rRKFUMiuWCr7r3a4kZdN32YZqfSEzx0z3hQ+GNEnzffRzpVzJwqOsIXrycjNV1a7kQbjb1EFXcrCGPcIqLwcWLpYLDvgwte3d4hhuQ2n4Qx3tOoXbEjasT1zEavGVko29yAGcGziH58uuQMppXsvYubjrDS1MqaKpYLPiyI0XLzo+CG0XY07oPrWPteiQ6VJ7SfIeZTR48FfytV9Aw5CflDIolMdjsTjZnzIPWW23Y3ZKC4eCI0W+71YGW0TajHYI/0IuG4ZlNl9pziPpceTnrHvKTMgaDyxFbuAw5/U59e3Skdx6Fe8RjtKenp/FI+dNYU7IB48FxY0zhTc/bkNyl6An4jf4U79vofplZ5fEb8pNShhCLYvF4+XO4Pt5vTIjENdYxreOw7pnI9J/AUV8mxaf0CHC2LwspnQcwFhzTI8B3PccgBVx1KVetvKSEIZz5gk2VLzGFs+vnHr6EzK4Turcw2AfyIMyoFHOjKR8pYghnMZ/IITjl+1VPmYFv3I+T3T/r3sJwoS+HK+YmK6Kx8pFChnBeXMITaR1uTt6EZ6geO2vew46ad7C/OQ37mg/gI+8nWmph+LojnXVnqkM+UsAQzvzFeLJiI1KaUpGYy4M/hzczA5JtMt65Ck0jLVru3jDGc/2x0uchTp5oIR+5yBBJJ2mnkZMv+juumWVIdm3FRPC2lp0faud/7N1l6oVrST7DLNLA4BzXuPKtlW+hc/SKlp8b6W0ZEJtaBN9a4TpGUCtcEE3zh3LXYq83BeX9legL9Bu/5aGJIdQOuvF712mjfbL7F529sPm51klh8OsOJG+BzOGL/y9BjM2KNXkb8Gj+s1jnfAJx2SshWfwXU7ARexr3weJMoj53s5qj6ptn9dHY8o2xvZWxY4FUc4x5FLVz42TzZFLfDv5e1bjq2xYhxsEXRGhOGc9ru8UmcoH/8B3xw8YWDxd9YFQbUrfViktpnGPdYf6vtls+l3IOGOn7n6iM1evQbj0tKRJrGivYLWlSyAslpEqT/QFRGaq6qoXZrTbJXblCO4Yh27pN8iznebOf7alZIvfD7PgRLqRKciyfyh+y1DQS+Qc7LndRdtdxOQAAAABJRU5ErkJggg=="},function(t,e,n){"use strict";function r(t){return t&&t.__esModule?t:{default:t}}function i(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}function o(t,e){if(!t)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return!e||"object"!=typeof e&&"function"!=typeof e?t:e}function A(t,e){if("function"!=typeof e&&null!==e)throw new TypeError("Super expression must either be null or a function, not "+typeof e);t.prototype=Object.create(e&&e.prototype,{constructor:{value:t,enumerable:!1,writable:!0,configurable:!0}}),e&&(Object.setPrototypeOf?Object.setPrototypeOf(t,e):t.__proto__=e)}Object.defineProperty(e,"__esModule",{value:!0});var a=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),c=n(1),u=n(6),s=n(7),l=r(s),f=n(5),p=r(f),h=n(2),g=r(h);n(8);var d=n(3).version,b=function(t){function e(){return i(this,e),o(this,(e.__proto__||Object.getPrototypeOf(e)).apply(this,arguments))}return A(e,t),a(e,[{key:"formatConfig",value:function(){(0,u.formatConfig)(this._config)}},{key:"getTctipDom",value:function(){this._tctipDom=new p.default(this._config)}},{key:"stat",value:function(){(0,l.default)()}},{key:"init",value:function(t){var e=this;t&&(this._config=t),(0,c.ready)(function(){e.formatConfig(),e.getTctipDom(),e.config.stat&&e.stat()})}}]),e}(g.default);b.version=d,e.default=b,t.exports=e.default}])}); | . |
checker.go | package processor
import (
"github.com/maxzerbini/ovo/cluster"
"log"
"time"
)
const (
CheckerPeriod = 5 // 5 secs
ErrorThreshold = 3
)
type Checker struct {
topology *cluster.ClusterTopology
outcomingQueue *OutCommandQueue
doneChan chan (bool)
nodeError map[string]int
partitioner *Partitioner
}
func NewChecker(topology *cluster.ClusterTopology, outcomingQueue *OutCommandQueue, partitioner *Partitioner) *Checker {
return &Checker{topology: topology, outcomingQueue: outcomingQueue, doneChan: make(chan bool), nodeError: make(map[string]int, 0), partitioner: partitioner}
}
func (ckr *Checker) Stop() {
ckr.doneChan <- true
}
func (ckr *Checker) Do() {
tickChan := time.NewTicker(time.Second * CheckerPeriod).C
log.Printf("Start checking cluster node (relatives)...\r\n")
for {
select {
case <-tickChan:
ckr.checkNodes()
case <-ckr.doneChan:
return
}
}
}
func (ckr *Checker) checkNodes() {
nodes := ckr.topology.GetRelatives()
for _, nd := range nodes {
if _, ok := ckr.nodeError[nd.Node.Name]; !ok {
ckr.nodeError[nd.Node.Name] = 0
}
if err := ckr.outcomingQueue.Caller.Ping(cluster.GetCurrentNode().Node.Name, nd.Node); err != nil {
ckr.nodeError[nd.Node.Name] = ckr.nodeError[nd.Node.Name] + 1
} else {
ckr.nodeError[nd.Node.Name] = 0
}
}
for name, count := range ckr.nodeError {
if count >= ErrorThreshold {
ckr.notifyFaultNotification(name)
}
}
}
func (ckr *Checker) notifyFaultNotification(name string) {
node, _ := ckr.topology.GetNodeByName(name)
node.Node.State = cluster.Inactive
node.UpdateDate = time.Now()
node.Node.HashRange = make([]int, 0) | ckr.outcomingQueue.Caller.RemoveClient(name)
for _, nd := range nodes {
if nd.Node.Name != name {
if _, err := ckr.outcomingQueue.Caller.RegisterNode(node, nd.Node); err != nil {
log.Printf("Call node %s fail\r\n", nd.Node.Name)
} else {
log.Printf("Notify node %s of fault of node %s\r\n", nd.Node.Name, name)
}
}
}
ckr.nodeError[name] = 0
go ckr.partitioner.MoveData()
} | nodes := ckr.topology.GetClusterNodes()
ckr.topology.AddNode(node) // repartition index |
maxMirror.go | package array3
func maxMirror(nums []int) int | {
ml := 0
for i := 0; i < len(nums); i++ {
for j := len(nums) - 1; j >= i+1; j-- {
if nums[i] == nums[j] {
k := 0
for ; i+k < len(nums) && j-k >= 0; k++ {
if nums[i+k] != nums[j-k] {
break
}
}
if k > ml {
ml = k
}
}
}
}
return ml
} |
|
bitcoin_ca_ES.ts | <TS language="ca_ES" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Feu clic dret per a editar l'adreça o l'etiquetaccn</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Crea una nova adreça</translation>
</message>
<message>
<source>&New</source>
<translation>&Nova</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia l'adreça seleccionada al porta-retalls del sistema</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Copia</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Tanca</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Elimina l'adreça sel·leccionada actualment de la llista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporta les dades de la pestanya actual a un fitxer</translation>
</message>
<message>
<source>&Export</source>
<translation>&Exporta</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Elimina</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Trieu l'adreça on enviar les monedes</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Trieu l'adreça on rebre les monedes</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&Tria</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Adreces d'enviament</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Adreces de recepció</translation>
</message>
<message>
<source>These are your pigycoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Aquestes són les vostres adreces de pigycoin per enviar els pagaments. Sempre reviseu l'import i l'adreça del destinatari abans de transferir monedes.</translation>
</message>
<message>
<source>These are your pigycoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Aquestes són les vostres adreces pigycoin per rebre pagaments. Es recomana utilitzar una adreça nova de recepció per a cada transacció.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Copia l'adreça</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Copia l'eti&queta</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Edita</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Exporta la llista d'adreces</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Fitxer separat per comes (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>L'exportació ha fallat</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>S'ha produït un error en desar la llista d'adreces a %1. Torneu-ho a provar.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Address</source>
<translation>Adreça</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sense etiqueta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Diàleg de contrasenya</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Introduïu una contrasenya</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nova contrasenya</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Repetiu la nova contrasenya</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Introduïu la contrasenya nova al moneder.<br/>Utilitzeu una contrasenya de <b>deu o més caràcters aleatoris</b>, o <b>vuit o més paraules</b>.</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Encripta el moneder</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Aquesta operació requereix la contrasenya del moneder per a desbloquejar-lo.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Desbloqueja el moneder</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Aquesta operació requereix la contrasenya del moneder per desencriptar-lo.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Desencripta el moneder</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Canvia la contrasenya</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase to the wallet.</source>
<translation>Introduïu la contrasenya antiga i la contrasenya nova al moneder.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Confirma l'encriptació del moneder</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR pigycoinS</b>!</source>
<translation>Avís: si encripteu el vostre moneder i perdeu la contrasenya, <b>PERDREU TOTS ELS VOSTRES pigycoinS</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Esteu segur que voleu encriptar el vostre moneder?</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Moneder encriptat</translation>
</message>
<message>
<source>%1 will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your pigycoins from being stolen by malware infecting your computer.</source>
<translation>Ara es tancarà el %1 per finalitzar el procés d'encriptació. Recordeu que encriptar el vostre moneder no garanteix que les vostres pigycoins no puguin ser robades per programari maliciós que infecti l'ordinador.</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANT: Tota copia de seguretat que hàgiu realitzat hauria de ser reemplaçada pel, recentment generat, fitxer encriptat del moneder. Per motius de seguretat, les còpies de seguretat anteriors del fitxer de moneder no encriptat esdevindran inusables tan aviat com començar a utilitzar el nou moneder encriptat.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>L'encriptació del moneder ha fallat</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>L'encriptació del moneder ha fallat per un error intern. El moneder no ha estat encriptat.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Les contrasenyes introduïdes no coincideixen.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>El desbloqueig del moneder ha fallat</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La contrasenya introduïda per a desencriptar el moneder és incorrecta.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>La desencriptació del moneder ha fallat</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>La contrasenya del moneder ha estat modificada correctament.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Avís: Les lletres majúscules estan activades!</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<source>IP/Netmask</source>
<translation>IP / Màscara de xarxa</translation>
</message>
<message>
<source>Banned Until</source>
<translation>Bandejat fins</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Signa el &missatge...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>S'està sincronitzant amb la xarxa ...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Panorama general</translation>
</message>
<message>
<source>Node</source>
<translation>Node</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Mostra el panorama general del moneder</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaccions</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Cerca a l'historial de transaccions</translation>
</message>
<message>
<source>E&xit</source>
<translation>S&urt</translation>
</message>
<message>
<source>Quit application</source>
<translation>Surt de l'aplicació</translation>
</message>
<message>
<source>&About %1</source>
<translation>Qu&ant al %1</translation>
</message>
<message>
<source>Show information about %1</source>
<translation>Mosta informació sobre el %1</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Quant a &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Mostra informació sobre Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opcions...</translation>
</message>
<message>
<source>Modify configuration options for %1</source>
<translation>Modifica les opcions de configuració de %1</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Encripta el moneder...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Realitza una còpia de seguretat del moneder...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Canvia la contrasenya...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>Adreces d'e&nviament...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>Adreces de &recepció...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Obre un &URI...</translation>
</message>
<message>
<source>Click to disable network activity.</source>
<translation>Feu clic per inhabilitar l'activitat de la xarxa.</translation>
</message>
<message>
<source>Network activity disabled.</source>
<translation>S'ha inhabilitat l'activitat de la xarxa.</translation>
</message>
<message>
<source>Click to enable network activity again.</source>
<translation>Feu clic per tornar a habilitar l'activitat de la xarxa.</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>S'estan reindexant els blocs al disc...</translation>
</message>
<message>
<source>Send coins to a pigycoin address</source>
<translation>Envia monedes a una adreça pigycoin</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Realitza una còpia de seguretat del moneder a una altra ubicació</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Canvia la contrasenya d'encriptació del moneder</translation>
</message>
<message>
<source>&Debug window</source>
<translation>&Finestra de depuració</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Obre la consola de diagnòstic i depuració</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Verifica el missatge...</translation>
</message>
<message>
<source>pigycoin</source>
<translation>pigycoin</translation>
</message>
<message>
<source>Wallet</source>
<translation>Moneder</translation>
</message>
<message>
<source>&Send</source>
<translation>&Envia</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Rep</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Mostra / Amaga</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Mostra o amaga la finestra principal</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Encripta les claus privades pertanyents al moneder</translation>
</message>
<message>
<source>Sign messages with your pigycoin addresses to prove you own them</source>
<translation>Signa el missatges amb la seva adreça de pigycoin per provar que les poseeixes</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified pigycoin addresses</source>
<translation>Verifiqueu els missatges per assegurar-vos que han estat signats amb una adreça pigycoin específica.</translation>
</message>
<message>
<source>&File</source>
<translation>&Fitxer</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Configuració</translation>
</message>
<message>
<source>&Help</source>
<translation>&Ajuda</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Barra d'eines de les pestanyes</translation>
</message>
<message>
<source>Request payments (generates QR codes and pigycoin: URIs)</source>
<translation>Sol·licita pagaments (genera codis QR i pigycoin: URI)</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Mostra la llista d'adreces d'enviament i etiquetes utilitzades</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Mostra la llista d'adreces de recepció i etiquetes utilitzades</translation>
</message>
<message>
<source>Open a pigycoin: URI or payment request</source>
<translation>Obre una pigycoin: sol·licitud d'URI o pagament</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>Opcions de la &línia d'ordres</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to pigycoin network</source>
<translation><numerusform>%n connexió activa a la xarxa pigycoin</numerusform><numerusform>%n connexions actives a la xarxa pigycoin</numerusform></translation>
</message>
<message>
<source>Indexing blocks on disk...</source>
<translation>S'estan indexant els blocs al disc...</translation>
</message>
<message>
<source>Processing blocks on disk...</source>
<translation>S'estan processant els blocs al disc...</translation>
</message>
<message numerus="yes">
<source>Processed %n block(s) of transaction history.</source>
<translation><numerusform>S'ha processat %n bloc de l'historial de transacció.</numerusform><numerusform>S'han processat %n blocs de l'historial de transacció.</numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 darrere</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>El darrer bloc rebut ha estat generat fa %1.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Les transaccions a partir d'això no seran visibles.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>Warning</source>
<translation>Avís</translation>
</message>
<message>
<source>Information</source>
<translation>&Informació</translation>
</message>
<message>
<source>Up to date</source>
<translation>Al dia</translation>
</message>
<message>
<source>Show the %1 help message to get a list with possible pigycoin command-line options</source>
<translation>Mostra el missatge d'ajuda del %1 per obtenir una llista amb les possibles opcions de línia d'ordres de pigycoin</translation>
</message>
<message>
<source>%1 client</source>
<translation>Client de %1</translation>
</message>
<message>
<source>Catching up...</source>
<translation>S'està posant al dia ...</translation>
</message>
<message>
<source>Date: %1
</source>
<translation>Data: %1
</translation>
</message>
<message>
<source>Amount: %1
</source>
<translation>Import: %1
</translation>
</message>
<message>
<source>Type: %1
</source>
<translation>Tipus: %1
</translation>
</message>
<message>
<source>Label: %1
</source>
<translation>Etiqueta: %1
</translation>
</message>
<message>
<source>Address: %1
</source>
<translation>Adreça: %1
</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Transacció enviada</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Transacció entrant</translation>
</message>
<message>
<source>HD key generation is <b>enabled</b></source>
<translation>La generació de la clau HD és <b>habilitada</b></translation>
</message>
<message>
<source>HD key generation is <b>disabled</b></source>
<translation>La generació de la clau HD és <b>inhabilitada</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>El moneder està <b>encriptat</b> i actualment <b>desbloquejat</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>El moneder està <b>encriptat</b> i actualment <b>bloquejat</b></translation>
</message>
<message>
<source>A fatal error occurred. pigycoin can no longer continue safely and will quit.</source>
<translation>S'ha produït un error fatal. pigycoin no pot continuar amb seguretat i finalitzarà.</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Selecció de moneda</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Quantitat:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Import:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Comissió:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Polsim:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Comissió posterior:</translation>
</message>
<message>
<source>Change:</source>
<translation>Canvi:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>(des)selecciona-ho tot</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Mode arbre</translation>
</message>
<message>
<source>List mode</source>
<translation>Mode llista</translation>
</message>
<message>
<source>Amount</source>
<translation>Import</translation>
</message>
<message>
<source>Received with label</source>
<translation>Rebut amb l'etiqueta</translation>
</message>
<message>
<source>Received with address</source>
<translation>Rebut amb l'adreça</translation>
</message>
<message>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Confirmacions</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmat</translation>
</message>
<message>
<source>Copy address</source>
<translation>Copia l'adreça</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copia l'etiqueta</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copia l'import</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Copia l'ID de transacció</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Bloqueja sense gastar</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Desbloqueja sense gastar</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Copia la quantitat</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Copia la comissió</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Copia la comissió posterior</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Copia els bytes</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Copia el polsim</translation>
</message>
<message>
<source>Copy change</source>
<translation>Copia el canvi</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 bloquejada)</translation>
</message>
<message>
<source>yes</source>
<translation>sí</translation>
</message>
<message>
<source>no</source>
<translation>no</translation>
</message>
<message>
<source>This label turns red if any recipient receives an amount smaller than the current dust threshold.</source>
<translation>Aquesta etiqueta es torna vermella si cap recipient rep un import inferior al llindar de polsim actual.</translation>
</message>
<message>
<source>Can vary +/- %1 satoshi(s) per input.</source>
<translation>Pot variar en +/- %1 satoshi(s) per entrada.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sense etiqueta)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>canvia de %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(canvia)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Edita l'adreça</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>L'etiqueta associada amb aquesta entrada de llista d'adreces</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>L'adreça associada amb aquesta entrada de llista d'adreces. Només es pot modificar per a les adreces d'enviament.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adreça</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Nova adreça de recepció</translation>
</message>
<message>
<source>New sending address</source>
<translation>Nova adreça d'enviament</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Edita l'adreça de recepció</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Edita l'adreça d'enviament</translation>
</message>
<message>
<source>The entered address "%1" is not a valid pigycoin address.</source>
<translation>L'adreça introduïda «%1» no és una adreça de pigycoin vàlida.</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>L'adreça introduïda «%1» ja és present a la llibreta d'adreces.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>No s'ha pogut desbloquejar el moneder.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Ha fallat la generació d'una clau nova.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Es crearà un nou directori de dades.</translation>
</message>
<message>
<source>name</source>
<translation>nom</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>El directori ja existeix. Afegeix %1 si vols crear un nou directori en aquesta ubicació.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>El camí ja existeix i no és cap directori.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>No es pot crear el directori de dades aquí.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>version</source>
<translation>versió</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-bit)</translation>
</message>
<message>
<source>About %1</source>
<translation>Quant al %1</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Opcions de línia d'ordres</translation>
</message>
<message>
<source>Usage:</source>
<translation>Ús:</translation>
</message>
<message>
<source>command-line options</source>
<translation>Opcions de la línia d'ordres</translation>
</message>
<message>
<source>UI Options:</source>
<translation>Opcions d'interfície d'usuari:</translation>
</message>
<message>
<source>Choose data directory on startup (default: %u)</source>
<translation>Trieu el directori de dades a l'inici (per defecte: %u)</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Defineix la llengua, per exemple «de_DE» (per defecte: la definida pel sistema)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Inicia minimitzat</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>Defineix els certificats arrel SSL per a la sol·licitud de pagament (per defecte: els del sistema)</translation>
</message>
<message>
<source>Show splash screen on startup (default: %u)</source>
<translation>Mostra la pantalla de benvinguda a l'inici (per defecte: %u)</translation>
</message>
<message>
<source>Reset all settings changed in the GUI</source>
<translation>Reinicialitza tots els canvis de configuració fets des de la interfície gràfica</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Us donem la benvinguda</translation>
</message>
<message>
<source>Welcome to %1.</source>
<translation>Us donem la benvinguda a %1.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where %1 will store its data.</source>
<translation>Com és la primera vegada que s'executa el programa, podeu triar on %1 emmagatzemarà les dades.</translation>
</message>
<message>
<source>%1 will download and store a copy of the pigycoin block chain. At least %2GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>%1 baixarà i emmagatzemarà una còpia de la cadena de blocs de pigycoin. Com a mínim %2GB de dades s'emmagatzemaran en aquest directori, i augmentarà al llarg del temps. El moneder també s'emmagatzemarà en aquest directori.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Utilitza el directori de dades per defecte</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Utilitza un directori de dades personalitzat:</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Error: el directori de dades «%1» especificat no pot ser creat.</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message numerus="yes">
<source>%n GB of free space available</source>
<translation><numerusform>%n GB d'espai lliure disponible</numerusform><numerusform>%n GB d'espai lliure disponible</numerusform></translation>
</message>
<message numerus="yes">
<source>(of %n GB needed)</source>
<translation><numerusform>(de %n GB necessari)</numerusform><numerusform>(de %n GB necessaris)</numerusform></translation>
</message>
</context>
<context>
<name>ModalOverlay</name>
<message>
<source>Form</source>
<translation>Formulari</translation>
</message>
<message>
<source>Unknown...</source>
<translation>Desconegut...</translation> | </message>
<message>
<source>Progress</source>
<translation>Progrés</translation>
</message>
<message>
<source>calculating...</source>
<translation>s'està calculant...</translation>
</message>
<message>
<source>Estimated time left until synced</source>
<translation>Temps estimat restant fins sincronitzat</translation>
</message>
<message>
<source>Hide</source>
<translation>Amaga</translation>
</message>
<message>
<source>Unknown. Syncing Headers (%1)...</source>
<translation>Desconegut. Sincronització de les capçaleres (%1)...</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>Obre un URI</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Obre una sol·licitud de pagament des d'un URI o un fitxer</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Selecciona un fitxer de sol·licitud de pagament</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Seleccioneu el fitxer de sol·licitud de pagament per obrir</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opcions</translation>
</message>
<message>
<source>&Main</source>
<translation>&Principal</translation>
</message>
<message>
<source>Automatically start %1 after logging in to the system.</source>
<translation>Inicieu %1 automàticament després d'entrar en el sistema.</translation>
</message>
<message>
<source>&Start %1 on system login</source>
<translation>&Inicia %1 en l'entrada al sistema</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Mida de la memòria cau de la base de &dades</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Nombre de fils de &verificació d'scripts</translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Accepta connexions de fora</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Permet connexions entrants</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>Adreça IP del proxy (p. ex. IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Exit in the menu.</source>
<translation>Minimitza en comptes de sortir de l'aplicació quan la finestra es tanca. Quan s'habilita aquesta opció l'aplicació es tancara només quan se selecciona Surt del menú. </translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>URL de terceres parts (p. ex. explorador de blocs) que apareix en la pestanya de transaccions com elements del menú contextual. %s en l'URL es reemplaçat pel resum de la transacció. Diferents URL estan separades per una barra vertical |.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>URL de transaccions de terceres parts</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Opcions de línies d'ordre active que sobreescriuen les opcions de dalt:</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Reestableix totes les opcions del client.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&Reestableix les opcions</translation>
</message>
<message>
<source>&Network</source>
<translation>&Xarxa</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = auto, <0 = deixa tants nuclis lliures)</translation>
</message>
<message>
<source>W&allet</source>
<translation>&Moneder</translation>
</message>
<message>
<source>Expert</source>
<translation>Expert</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>Activa les funcions de &control de les monedes</translation>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation>Si inhabiliteu la despesa d'un canvi sense confirmar, el canvi d'una transacció no pot ser utilitzat fins que la transacció no tingui com a mínim una confirmació. Això també afecta com es calcula el vostre balanç.</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>&Gasta el canvi sense confirmar</translation>
</message>
<message>
<source>Automatically open the pigycoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Obre el port del client de pigycoin al router de forma automàtica. Això només funciona quan el router implementa UPnP i l'opció està activada.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Port obert amb &UPnP</translation>
</message>
<message>
<source>Connect to the pigycoin network through a SOCKS5 proxy.</source>
<translation>Connecta a la xarxa pigycoin a través d'un proxy SOCKS5.</translation>
</message>
<message>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>&Connecta a través d'un proxy SOCKS5 (proxy per defecte):</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>&IP del proxy:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port del proxy (per exemple 9050)</translation>
</message>
<message>
<source>Used for reaching peers via:</source>
<translation>Utilitzat per arribar als iguals mitjançant:</translation>
</message>
<message>
<source>Shows, if the supplied default SOCKS5 proxy is used to reach peers via this network type.</source>
<translation>Mostra si el proxy SOCKS5 per defecte proporcionat s'utilitza per arribar als iguals mitjançant aquest tipus de xarxa.</translation>
</message>
<message>
<source>IPv4</source>
<translation>IPv4</translation>
</message>
<message>
<source>IPv6</source>
<translation>IPv6</translation>
</message>
<message>
<source>Tor</source>
<translation>Tor</translation>
</message>
<message>
<source>Connect to the pigycoin network through a separate SOCKS5 proxy for Tor hidden services.</source>
<translation>Conectar a la red de pigycoin a través de un proxy SOCKS5 per als serveis ocults de Tor</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services:</source>
<translation>Utilitza un proxy SOCKS4 apart per a arribar als iguals a través de serveis ocults de Tor:</translation>
</message>
<message>
<source>&Window</source>
<translation>&Finestra</translation>
</message>
<message>
<source>&Hide the icon from the system tray.</source>
<translation>Ama&ga la icona de la safata del sistema.</translation>
</message>
<message>
<source>Hide tray icon</source>
<translation>Amaga la icona de la safata</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Mostra només la icona de la barra en minimitzar la finestra.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimitza a la barra d'aplicacions en comptes de la barra de tasques</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimitza en tancar</translation>
</message>
<message>
<source>&Display</source>
<translation>&Pantalla</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>&Llengua de la interfície d'usuari:</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting %1.</source>
<translation>Aquí es pot definir la llengua de la interfície d'usuari. Aquest paràmetre tindrà efecte en reiniciar el %1.</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Unitats per mostrar els imports en:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Selecciona la unitat de subdivisió per defecte per mostrar en la interfície quan s'envien monedes.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Si voleu mostrar les funcions de control de monedes o no.</translation>
</message>
<message>
<source>&OK</source>
<translation>&D'acord</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Cancel·la</translation>
</message>
<message>
<source>default</source>
<translation>Per defecte</translation>
</message>
<message>
<source>none</source>
<translation>cap</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Confirmeu el reestabliment de les opcions</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Cal reiniciar el client per activar els canvis.</translation>
</message>
<message>
<source>Client will be shut down. Do you want to proceed?</source>
<translation>S'aturarà el client. Voleu procedir?</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Amb aquest canvi cal un reinici del client.</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>L'adreça proxy introduïda és invalida.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formulari</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the pigycoin network after a connection is established, but this process has not completed yet.</source>
<translation>La informació mostrada pot no estar al día. El teu moneder es sincronitza automàticament amb la xarxa pigycoin un cop s'ha establert connexió, però aquest proces no s'ha completat encara.</translation>
</message>
<message>
<source>Watch-only:</source>
<translation>Només lectura:</translation>
</message>
<message>
<source>Available:</source>
<translation>Disponible:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>El balanç que podeu gastar actualment</translation>
</message>
<message>
<source>Pending:</source>
<translation>Pendent:</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Total de transaccions que encara han de confirmar-se i que encara no compten en el balanç que es pot gastar</translation>
</message>
<message>
<source>Immature:</source>
<translation>Immadur:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Balanç minat que encara no ha madurat</translation>
</message>
<message>
<source>Balances</source>
<translation>Balances</translation>
</message>
<message>
<source>Total:</source>
<translation>Total:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>El balanç total actual</translation>
</message>
<message>
<source>Your current balance in watch-only addresses</source>
<translation>El vostre balanç actual en adreces de només lectura</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Que es pot gastar:</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Transaccions recents</translation>
</message>
<message>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Transaccions sense confirmar a adreces de només lectura</translation>
</message>
<message>
<source>Mined balance in watch-only addresses that has not yet matured</source>
<translation>Balanç minat en adreces de només lectura que encara no ha madurat</translation>
</message>
<message>
<source>Current total balance in watch-only addresses</source>
<translation>Balanç total actual en adreces de només lectura</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>Payment request error</source>
<translation>Error de la sol·licitud de pagament</translation>
</message>
<message>
<source>Cannot start pigycoin: click-to-pay handler</source>
<translation>No es pot iniciar pigycoin: controlador click-to-pay</translation>
</message>
<message>
<source>URI handling</source>
<translation>Gestió d'URI</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>L'URL de recuperació de la sol·licitud de pagament no és vàlida: %1</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Adreça de pagament no vàlida %1</translation>
</message>
<message>
<source>URI cannot be parsed! This can be caused by an invalid pigycoin address or malformed URI parameters.</source>
<translation>L'URI no pot ser analitzat! Això pot ser a causa d'una adreça de pigycoin no vàlida o per paràmetres URI amb mal format.</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Gestió de fitxers de les sol·licituds de pagament</translation>
</message>
<message>
<source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source>
<translation>No es pot llegir el fitxer de la sol·licitud de pagament. Això pot ser causat per un fitxer de sol·licitud de pagament no vàlid.</translation>
</message>
<message>
<source>Payment request rejected</source>
<translation>La sol·licitud de pagament s'ha rebutjat</translation>
</message>
<message>
<source>Payment request network doesn't match client network.</source>
<translation>La xarxa de la sol·licitud de pagament no coincideix amb la xarxa del client.</translation>
</message>
<message>
<source>Payment request expired.</source>
<translation>La sol·licitud de pagament ha vençut.</translation>
</message>
<message>
<source>Payment request is not initialized.</source>
<translation>La sol·licitud de pagament no està inicialitzada.</translation>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>No s'accepten sol·licituds de pagament no verificades a scripts de pagament personalitzats.</translation>
</message>
<message>
<source>Invalid payment request.</source>
<translation>Sol·licitud de pagament no vàlida.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>L'import de pagament sol·licitat %1 és massa petit (es considera polsim).</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Reemborsament de %1</translation>
</message>
<message>
<source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source>
<translation>La sol·licitud de pagament %1 és massa gran (%2 bytes, permès %3 bytes).</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Error en comunicar amb %1: %2</translation>
</message>
<message>
<source>Payment request cannot be parsed!</source>
<translation>No es pot analitzar la sol·licitud de pagament!</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Mala resposta del servidor %1</translation>
</message>
<message>
<source>Network request error</source>
<translation>Error en la sol·licitud de xarxa</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Pagament reconegut</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>User Agent</source>
<translation>Agent d'usuari</translation>
</message>
<message>
<source>Node/Service</source>
<translation>Node/Servei</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Import</translation>
</message>
<message>
<source>Enter a pigycoin address (e.g. %1)</source>
<translation>Introduïu una adreça de pigycoin (p. ex. %1)</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>None</source>
<translation>Cap</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 i %2</translation>
</message>
</context>
<context>
<name>QObject::QObject</name>
<message>
<source>Error: %1</source>
<translation>Avís: %1</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>De&sa la imatge...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>&Copia la imatge</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>Desa el codi QR</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>Imatge PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<source>Client version</source>
<translation>Versió del client</translation>
</message>
<message>
<source>&Information</source>
<translation>&Informació</translation>
</message>
<message>
<source>Debug window</source>
<translation>Finestra de depuració</translation>
</message>
<message>
<source>General</source>
<translation>General</translation>
</message>
<message>
<source>Using BerkeleyDB version</source>
<translation>Utilitzant BerkeleyDB versió</translation>
</message>
<message>
<source>Datadir</source>
<translation>Datadir</translation>
</message>
<message>
<source>Startup time</source>
<translation>&Temps d'inici</translation>
</message>
<message>
<source>Network</source>
<translation>Xarxa</translation>
</message>
<message>
<source>Name</source>
<translation>Nom</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Nombre de connexions</translation>
</message>
<message>
<source>Block chain</source>
<translation>Cadena de blocs</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Nombre de blocs actuals</translation>
</message>
<message>
<source>Memory Pool</source>
<translation>Reserva de memòria</translation>
</message>
<message>
<source>Current number of transactions</source>
<translation>Nombre actual de transaccions</translation>
</message>
<message>
<source>Memory usage</source>
<translation>Us de memoria</translation>
</message>
<message>
<source>Received</source>
<translation>Rebut</translation>
</message>
<message>
<source>Sent</source>
<translation>Enviat</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Iguals</translation>
</message>
<message>
<source>Banned peers</source>
<translation>Iguals bandejats</translation>
</message>
<message>
<source>Select a peer to view detailed information.</source>
<translation>Seleccioneu un igual per mostrar informació detallada.</translation>
</message>
<message>
<source>Whitelisted</source>
<translation>A la llista blanca</translation>
</message>
<message>
<source>Direction</source>
<translation>Direcció</translation>
</message>
<message>
<source>Version</source>
<translation>Versió</translation>
</message>
<message>
<source>Starting Block</source>
<translation>Bloc d'inici</translation>
</message>
<message>
<source>Synced Headers</source>
<translation>Capçaleres sincronitzades</translation>
</message>
<message>
<source>Synced Blocks</source>
<translation>Blocs sincronitzats</translation>
</message>
<message>
<source>User Agent</source>
<translation>Agent d'usuari</translation>
</message>
<message>
<source>Services</source>
<translation>Serveis</translation>
</message>
<message>
<source>Ban Score</source>
<translation>Puntuació de bandeig</translation>
</message>
<message>
<source>Connection Time</source>
<translation>Temps de connexió</translation>
</message>
<message>
<source>Last Send</source>
<translation>Darrer enviament</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Darrera recepció</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Temps de ping</translation>
</message>
<message>
<source>The duration of a currently outstanding ping.</source>
<translation>La duració d'un ping més destacat actualment.</translation>
</message>
<message>
<source>Ping Wait</source>
<translation>Espera de ping</translation>
</message>
<message>
<source>Time Offset</source>
<translation>Diferència horària</translation>
</message>
<message>
<source>Last block time</source>
<translation>Últim temps de bloc</translation>
</message>
<message>
<source>&Open</source>
<translation>&Obre</translation>
</message>
<message>
<source>&Console</source>
<translation>&Consola</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>Trà&nsit de la xarxa</translation>
</message>
<message>
<source>&Clear</source>
<translation>Nete&ja</translation>
</message>
<message>
<source>Totals</source>
<translation>Totals</translation>
</message>
<message>
<source>In:</source>
<translation>Dins:</translation>
</message>
<message>
<source>Out:</source>
<translation>Fora:</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Fitxer de registre de depuració</translation>
</message>
<message>
<source>Clear console</source>
<translation>Neteja la consola</translation>
</message>
<message>
<source>1 &hour</source>
<translation>1 &hora</translation>
</message>
<message>
<source>1 &day</source>
<translation>1 &dia</translation>
</message>
<message>
<source>1 &week</source>
<translation>1 &setmana</translation>
</message>
<message>
<source>1 &year</source>
<translation>1 &any</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Utilitza les fletxes d'amunt i avall per navegar per l'historial, i <b>Ctrl-L<\b> per netejar la pantalla.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Escriviu <b>help<\b> per a obtenir un llistat de les ordres disponibles.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<source>(node id: %1)</source>
<translation>(id del node: %1)</translation>
</message>
<message>
<source>via %1</source>
<translation>a través de %1</translation>
</message>
<message>
<source>never</source>
<translation>mai</translation>
</message>
<message>
<source>Inbound</source>
<translation>Entrant</translation>
</message>
<message>
<source>Outbound</source>
<translation>Sortint</translation>
</message>
<message>
<source>Yes</source>
<translation>Sí</translation>
</message>
<message>
<source>No</source>
<translation>No</translation>
</message>
<message>
<source>Unknown</source>
<translation>Desconegut</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>Im&port:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Missatge:</translation>
</message>
<message>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation>Reutilitza una de les adreces de recepció utilitzades anteriorment. La reutilització d'adreces pot comportar problemes de seguretat i privadesa. No ho utilitzeu llevat que torneu a generar una sol·licitud de pagament feta abans.</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>R&eutilitza una adreça de recepció anterior (no recomanat)</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the pigycoin network.</source>
<translation>Un missatge opcional que s'adjuntarà a la sol·licitud de pagament, que es mostrarà quan s'obri la sol·licitud. Nota: El missatge no s'enviarà amb el pagament per la xarxa pigycoin.</translation>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation>Una etiqueta opcional que s'associarà amb la nova adreça receptora.</translation>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Utilitzeu aquest formulari per sol·licitar pagaments. Tots els camps són <b>opcionals</b>.</translation>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>Un import opcional per sol·licitar. Deixeu-ho en blanc o zero per no sol·licitar cap import específic.</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Esborra tots els camps del formuari.</translation>
</message>
<message>
<source>Clear</source>
<translation>Neteja</translation>
</message>
<message>
<source>Requested payments history</source>
<translation>Historial de pagaments sol·licitats</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Sol·licitud de pagament</translation>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Mostra la sol·licitud seleccionada (fa el mateix que el doble clic a una entrada)</translation>
</message>
<message>
<source>Show</source>
<translation>Mostra</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Esborra les entrades seleccionades de la llista</translation>
</message>
<message>
<source>Remove</source>
<translation>Esborra</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copia l'etiqueta</translation>
</message>
<message>
<source>Copy message</source>
<translation>Copia el missatge</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copia l'import</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>Codi QR</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>Copia l'&URI</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>Copia l'&adreça</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>De&sa la imatge...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Sol·licita un pagament a %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Informació de pagament</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adreça</translation>
</message>
<message>
<source>Amount</source>
<translation>Import</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Message</source>
<translation>Missatge</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URI resultant massa llarga, intenta reduir el text per a la etiqueta / missatge</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Error en codificar l'URI en un codi QR.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Message</source>
<translation>Missatge</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sense etiqueta)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(sense missatge)</translation>
</message>
<message>
<source>(no amount requested)</source>
<translation>(no s'ha sol·licitat import)</translation>
</message>
<message>
<source>Requested</source>
<translation>Sol·licitat</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Envia monedes</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>Característiques de control de les monedes</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Entrades...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>seleccionat automàticament</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Fons insuficients!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Quantitat:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Bytes:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Import:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Comissió:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Comissió posterior:</translation>
</message>
<message>
<source>Change:</source>
<translation>Canvi:</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Si s'activa això, però l'adreça de canvi està buida o bé no és vàlida, el canvi s'enviarà a una adreça generada de nou.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Personalitza l'adreça de canvi</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Comissió de transacció</translation>
</message>
<message>
<source>Choose...</source>
<translation>Tria...</translation>
</message>
<message>
<source>collapse fee-settings</source>
<translation>redueix els paràmetres de comissió</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>per kilobyte</translation>
</message>
<message>
<source>If the custom fee is set to 1000 satoshis and the transaction is only 250 bytes, then "per kilobyte" only pays 250 satoshis in fee, while "total at least" pays 1000 satoshis. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Si la comissió personalitzada es defineix a 1000 satoshis i la transacció és de només 250 bytes, llavors «per kilobyte» només es paguen 250 satoshis en una comissió, mentre que amb la de «total com a mínim» es pagarien 1000 satoshis. Per a transaccions superiors al kilobyte, en tots dos casos es paga per kilobyte.</translation>
</message>
<message>
<source>Hide</source>
<translation>Amaga</translation>
</message>
<message>
<source>total at least</source>
<translation>total com a mínim</translation>
</message>
<message>
<source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks. But be aware that this can end up in a never confirming transaction once there is more demand for pigycoin transactions than the network can process.</source>
<translation>No hi ha cap problema en pagar només la comissió mínima sempre que hi hagi menys volum de transacció que espai en els blocs. Però tingueu present que això pot acabar en una transacció que mai es confirmi una vegada hi hagi més demanda de transaccions de pigycoins que la xarxa pugui processar.</translation>
</message>
<message>
<source>(read the tooltip)</source>
<translation>(llegiu l'indicador de funció)</translation>
</message>
<message>
<source>Recommended:</source>
<translation>Recomanada:</translation>
</message>
<message>
<source>Custom:</source>
<translation>Personalitzada:</translation>
</message>
<message>
<source>(Smart fee not initialized yet. This usually takes a few blocks...)</source>
<translation>(No s'ha inicialitzat encara la comissió intel·ligent. Normalment pren uns pocs blocs...)</translation>
</message>
<message>
<source>normal</source>
<translation>normal</translation>
</message>
<message>
<source>fast</source>
<translation>ràpid</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Envia a múltiples destinataris al mateix temps</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Afegeix &destinatari</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Esborra tots els camps del formuari.</translation>
</message>
<message>
<source>Dust:</source>
<translation>Polsim:</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Neteja-ho &tot</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balanç:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Confirma l'acció d'enviament</translation>
</message>
<message>
<source>S&end</source>
<translation>E&nvia</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Copia la quantitat</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copia l'import</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Copia la comissió</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Copia la comissió posterior</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Copia els bytes</translation>
</message>
<message>
<source>Copy dust</source>
<translation>Copia el polsim</translation>
</message>
<message>
<source>Copy change</source>
<translation>Copia el canvi</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 a %2</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Esteu segur que ho voleu enviar?</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>S'ha afegit una taxa de transacció</translation>
</message>
<message>
<source>Total Amount %1</source>
<translation>Import total %1</translation>
</message>
<message>
<source>or</source>
<translation>o</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Confirma l'enviament de monedes</translation>
</message>
<message>
<source>The recipient address is not valid. Please recheck.</source>
<translation>L'adreça del destinatari no és vàlida. Torneu-la a comprovar.</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>L'import a pagar ha de ser major que 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>L'import supera el vostre balanç.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>El total excedeix el vostre balanç quan s'afegeix la comissió a la transacció %1.</translation>
</message>
<message>
<source>Duplicate address found: addresses should only be used once each.</source>
<translation>S'ha trobat una adreça duplicada: les adreces només s'haurien d'utilitzar una vegada cada una.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>La creació de la transacció ha fallat!</translation>
</message>
<message>
<source>A fee higher than %1 is considered an absurdly high fee.</source>
<translation>Una comissió superior a %1 es considera una comissió absurdament alta.</translation>
</message>
<message>
<source>Payment request expired.</source>
<translation>La sol·licitud de pagament ha vençut.</translation>
</message>
<message>
<source>Pay only the required fee of %1</source>
<translation>Paga només la comissió necessària de %1</translation>
</message>
<message>
<source>Warning: Invalid pigycoin address</source>
<translation>Avís: adreça pigycoin no vàlida</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation>Avís: adreça de canvi desconeguda</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sense etiqueta)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Q&uantitat:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>Paga &a:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Escull una adreça feta servir anteriorment</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Això és un pagament normal.</translation>
</message>
<message>
<source>The pigycoin address to send the payment to</source>
<translation>L'adreça pigycoin on enviar el pagament</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alta+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Enganxar adreça del porta-retalls</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Elimina aquesta entrada</translation>
</message>
<message>
<source>The fee will be deducted from the amount being sent. The recipient will receive less pigycoins than you enter in the amount field. If multiple recipients are selected, the fee is split equally.</source>
<translation>La comissió es deduirà de l'import que s'enviarà. El destinatari rebrà menys pigycoins que les que introduïu al camp d'import. Si se seleccionen múltiples destinataris, la comissió es dividirà per igual.</translation>
</message>
<message>
<source>S&ubtract fee from amount</source>
<translation>S&ubstreu la comissió de l'import</translation>
</message>
<message>
<source>Message:</source>
<translation>Missatge:</translation>
</message>
<message>
<source>This is an unauthenticated payment request.</source>
<translation>Aquesta és una sol·licitud de pagament no autenticada.</translation>
</message>
<message>
<source>This is an authenticated payment request.</source>
<translation>Aquesta és una sol·licitud de pagament autenticada.</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Introduïu una etiqueta per a aquesta adreça per afegir-la a la llista d'adreces utilitzades</translation>
</message>
<message>
<source>A message that was attached to the pigycoin: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the pigycoin network.</source>
<translation>Un missatge que s'ha adjuntat al pigycoin: URI que s'emmagatzemarà amb la transacció per a la vostra referència. Nota: el missatge no s'enviarà a través de la xarxa pigycoin.</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Paga a:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Memo:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Introduïu una etiqueta per a aquesta adreça per afegir-la a la llibreta d'adreces</translation>
</message>
</context>
<context>
<name>SendConfirmationDialog</name>
<message>
<source>Yes</source>
<translation>Sí</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>No apagueu l'ordinador fins que no desaparegui aquesta finestra.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signatures - Signa / verifica un missatge</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>&Signa el missatge</translation>
</message>
<message>
<source>You can sign messages/agreements with your addresses to prove you can receive pigycoins sent to them. Be careful not to sign anything vague or random, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Podeu signar missatges/acords amb les vostres adreces per provar que rebeu les pigycoins que s'hi envien. Aneu amb compte no signar res que sigui vague o aleatori, perquè en alguns atacs de suplantació es pot provar que hi signeu la vostra identitat. Només signeu aquelles declaracions completament detallades en què hi esteu d'acord. </translation>
</message>
<message>
<source>The pigycoin address to sign the message with</source>
<translation>L'adreça pigycoin amb què signar el missatge</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Escull una adreça feta servir anteriorment</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alta+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Enganxar adreça del porta-retalls</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Introduïu aquí el missatge que voleu signar</translation>
</message>
<message>
<source>Signature</source>
<translation>Signatura</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Copia la signatura actual al porta-retalls del sistema</translation>
</message>
<message>
<source>Sign the message to prove you own this pigycoin address</source>
<translation>Signa el missatge per provar que ets propietari d'aquesta adreça pigycoin</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Signa el &missatge</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Neteja tots els camps de clau</translation>
</message>
<message>
<source>Clear &All</source>
<translation>Neteja-ho &tot</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Verifica el missatge</translation>
</message>
<message>
<source>Enter the receiver's address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack. Note that this only proves the signing party receives with the address, it cannot prove sendership of any transaction!</source>
<translation>Introduïu l'adreça del receptor, el missatge (assegureu-vos de copiar els salts de línia, espais, tabuladors, etc. exactament) i signatura de sota per verificar el missatge. Tingueu cura de no llegir més en la signatura del que està al missatge signat, per evitar ser enganyat per un atac d'home-en-el-mig. Tingueu en compte que això només demostra que la part que signa rep amb l'adreça, i no es pot provar l'enviament de qualsevol transacció!</translation>
</message>
<message>
<source>The pigycoin address the message was signed with</source>
<translation>L'adreça pigycoin amb què va ser signat el missatge</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified pigycoin address</source>
<translation>Verificar el missatge per assegurar-se que ha estat signat amb una adreça pigycoin específica</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>Verifica el &missatge</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Neteja tots els camps de verificació de missatge</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Feu clic a «Signa el missatge» per a generar una signatura</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>L'adreça introduïda no és vàlida.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Comproveu l'adreça i torneu-ho a provar.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>L'adreça introduïda no referencia a cap clau.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>El desbloqueig del moneder ha estat cancelat.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>La clau privada per a la adreça introduïda no està disponible.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>La signatura del missatge ha fallat.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Missatge signat.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>La signatura no s'ha pogut descodificar.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Comproveu la signatura i torneu-ho a provar.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>La signatura no coincideix amb el resum del missatge.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Ha fallat la verificació del missatge.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Missatge verificat.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Obert fins %1</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/fora de línia</translation>
</message>
<message>
<source>abandoned</source>
<translation>abandonada</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/sense confirmar</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 confirmacions</translation>
</message>
<message>
<source>Status</source>
<translation>Estat</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, encara no ha estat emès correctement</translation>
</message>
<message>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<source>Source</source>
<translation>Font</translation>
</message>
<message>
<source>Generated</source>
<translation>Generada</translation>
</message>
<message>
<source>From</source>
<translation>De</translation>
</message>
<message>
<source>unknown</source>
<translation>desconegut</translation>
</message>
<message>
<source>To</source>
<translation>A</translation>
</message>
<message>
<source>own address</source>
<translation>adreça pròpia</translation>
</message>
<message>
<source>watch-only</source>
<translation>només lectura</translation>
</message>
<message>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<source>Credit</source>
<translation>Crèdit</translation>
</message>
<message>
<source>not accepted</source>
<translation>no acceptat</translation>
</message>
<message>
<source>Debit</source>
<translation>Dèbit</translation>
</message>
<message>
<source>Total debit</source>
<translation>Dèbit total</translation>
</message>
<message>
<source>Total credit</source>
<translation>Crèdit total</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Comissió de transacció</translation>
</message>
<message>
<source>Net amount</source>
<translation>Import net</translation>
</message>
<message>
<source>Message</source>
<translation>Missatge</translation>
</message>
<message>
<source>Comment</source>
<translation>Comentari</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>ID de la transacció</translation>
</message>
<message>
<source>Merchant</source>
<translation>Mercader</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Les monedes generades han de madurar %1 blocs abans de poder ser gastades. Quan genereu aquest bloc, es farà saber a la xarxa per tal d'afegir-lo a la cadena de blocs. Si no pot fer-se lloc a la cadena, el seu estat canviarà a «no acceptat» i no es podrà gastar. Això pot passar ocasionalment si un altre node genera un bloc en un marge de segons respecte al vostre.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Informació de depuració</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transacció</translation>
</message>
<message>
<source>Inputs</source>
<translation>Entrades</translation>
</message>
<message>
<source>Amount</source>
<translation>Import</translation>
</message>
<message>
<source>true</source>
<translation>cert</translation>
</message>
<message>
<source>false</source>
<translation>fals</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Aquest panell mostra una descripció detallada de la transacció</translation>
</message>
<message>
<source>Details for %1</source>
<translation>Detalls per %1</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<source>Type</source>
<translation>Tipus</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Open until %1</source>
<translation>Obert fins %1</translation>
</message>
<message>
<source>Offline</source>
<translation>Fora de línia</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Sense confirmar</translation>
</message>
<message>
<source>Abandoned</source>
<translation>Abandonada</translation>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Confirmant (%1 de %2 confirmacions recomanades)</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmat (%1 confirmacions)</translation>
</message>
<message>
<source>Conflicted</source>
<translation>En conflicte</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Immadur (%1 confirmacions, serà disponible després de %2)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Aquest bloc no ha estat rebut per cap altre node i probablement no serà acceptat!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Generat però no acceptat</translation>
</message>
<message>
<source>Received with</source>
<translation>Rebuda amb</translation>
</message>
<message>
<source>Received from</source>
<translation>Rebuda de</translation>
</message>
<message>
<source>Sent to</source>
<translation>Enviada a</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Pagament a un mateix</translation>
</message>
<message>
<source>Mined</source>
<translation>Minada</translation>
</message>
<message>
<source>watch-only</source>
<translation>només lectura</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sense etiqueta)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Estat de la transacció. Desplaceu-vos sobre aquest camp per mostrar el nombre de confirmacions.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Data i hora en que la transacció va ser rebuda.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Tipus de transacció.</translation>
</message>
<message>
<source>Whether or not a watch-only address is involved in this transaction.</source>
<translation>Si està implicada o no una adreça només de lectura en la transacció.</translation>
</message>
<message>
<source>User-defined intent/purpose of the transaction.</source>
<translation>Intenció/propòsit de la transacció definida per l'usuari.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Import extret o afegit del balanç.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Tot</translation>
</message>
<message>
<source>Today</source>
<translation>Avui</translation>
</message>
<message>
<source>This week</source>
<translation>Aquesta setmana</translation>
</message>
<message>
<source>This month</source>
<translation>Aquest mes</translation>
</message>
<message>
<source>Last month</source>
<translation>El mes passat</translation>
</message>
<message>
<source>This year</source>
<translation>Enguany</translation>
</message>
<message>
<source>Range...</source>
<translation>Rang...</translation>
</message>
<message>
<source>Received with</source>
<translation>Rebuda amb</translation>
</message>
<message>
<source>Sent to</source>
<translation>Enviada a</translation>
</message>
<message>
<source>To yourself</source>
<translation>A un mateix</translation>
</message>
<message>
<source>Mined</source>
<translation>Minada</translation>
</message>
<message>
<source>Other</source>
<translation>Altres</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Introduïu una adreça o una etiqueta per cercar</translation>
</message>
<message>
<source>Min amount</source>
<translation>Import mínim</translation>
</message>
<message>
<source>Copy address</source>
<translation>Copia l'adreça</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copia l'etiqueta</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copia l'import</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Copia l'ID de transacció</translation>
</message>
<message>
<source>Copy raw transaction</source>
<translation>Copia la transacció crua</translation>
</message>
<message>
<source>Copy full transaction details</source>
<translation>Copia els detalls complets de la transacció</translation>
</message>
<message>
<source>Edit label</source>
<translation>Editar etiqueta</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Mostra detalls de la transacció</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Exporta l'historial de transacció</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Fitxer separat per comes (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmat</translation>
</message>
<message>
<source>Watch-only</source>
<translation>Només de lectura</translation>
</message>
<message>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<source>Type</source>
<translation>Tipus</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Address</source>
<translation>Adreça</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>L'exportació ha fallat</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>S'ha produït un error en provar de desar l'historial de transacció a %1.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Exportació amb èxit</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>L'historial de transaccions s'ha desat correctament a %1.</translation>
</message>
<message>
<source>Range:</source>
<translation>Rang:</translation>
</message>
<message>
<source>to</source>
<translation>a</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Unitat en què mostrar els imports. Feu clic per seleccionar una altra unitat.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>No s'ha carregat cap moneder.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Envia monedes</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Exporta</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exporta les dades de la pestanya actual a un fitxer</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Còpia de seguretat del moneder</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Dades del moneder (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Ha fallat la còpia de seguretat</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>S'ha produït un error en provar de desar les dades del moneder a %1.</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>La còpia de seguretat s'ha realitzat correctament</translation>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation>S'han desat les dades del moneder correctament a %1.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Opcions:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Especifica el directori de dades</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Connecta al node per obtenir les adreces de les connexions, i desconnecta</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Especifiqueu la vostra adreça pública</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Accepta la línia d'ordres i ordres JSON-RPC </translation>
</message>
<message>
<source>If <category> is not supplied or if <category> = 1, output all debugging information.</source>
<translation>Si no es proporciona <category> o si <category> = 1, treu a la sortida tota la informació de depuració.</translation>
</message>
<message>
<source>Prune configured below the minimum of %d MiB. Please use a higher number.</source>
<translation>Poda configurada per sota el mínim de %d MiB. Utilitzeu un nombre superior.</translation>
</message>
<message>
<source>Prune: last wallet synchronisation goes beyond pruned data. You need to -reindex (download the whole blockchain again in case of pruned node)</source>
<translation>Poda: la darrera sincronització del moneder va més enllà de les dades podades. Cal que activeu -reindex (baixeu tota la cadena de blocs de nou en cas de node podat)</translation>
</message>
<message>
<source>Rescans are not possible in pruned mode. You will need to use -reindex which will download the whole blockchain again.</source>
<translation>Els rescanejos no són possible en el mode de poda. Caldrà que utilitzeu -reindex, que tornarà a baixar la cadena de blocs sencera.</translation>
</message>
<message>
<source>Error: A fatal internal error occurred, see debug.log for details</source>
<translation>Error: s'ha produït un error intern fatal. Vegeu debug.log per a més detalls</translation>
</message>
<message>
<source>Fee (in %s/kB) to add to transactions you send (default: %s)</source>
<translation>Comissió (en %s/kB) per afegir a les transaccions que envieu (per defecte: %s)</translation>
</message>
<message>
<source>Pruning blockstore...</source>
<translation>S'està podant la cadena de blocs...</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Executa en segon pla com a programa dimoni i accepta ordres</translation>
</message>
<message>
<source>Unable to start HTTP server. See debug log for details.</source>
<translation>No s'ha pogut iniciar el servidor HTTP. Vegeu debug.log per a més detalls.</translation>
</message>
<message>
<source>pigycoin Core</source>
<translation>pigycoin Core</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Vincula a una adreça específica i sempre escolta-hi. Utilitza la notació [host]:port per IPv6</translation>
</message>
<message>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation>Elimina totes les transaccions del moneder i només recupera aquelles de la cadena de blocs a través de -rescan a l'inici</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Executa una ordre quan una transacció del moneder canviï (%s en cmd es canvia per TxID)</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Defineix el nombre de fils de verificació d'scripts (%u a %d, 0 = auto, <0 = deixa tants nuclis lliures, per defecte: %d)</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category> pot ser:</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Opcions de la creació de blocs:</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Opcions de connexió:</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>S'ha detectat una base de dades de blocs corrupta</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation>Opcions de depuració/proves:</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>No carreguis el moneder i inhabilita les crides RPC del moneder</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Voleu reconstruir la base de dades de blocs ara?</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Error carregant la base de dades de blocs</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Error inicialitzant l'entorn de la base de dades del moneder %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Error carregant la base de dades del bloc</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Error en obrir la base de dades de blocs</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Error: Espai al disc baix!</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Ha fallat escoltar a qualsevol port. Feu servir -listen=0 si voleu fer això.</translation>
</message>
<message>
<source>Importing...</source>
<translation>S'està important...</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>No s'ha trobat el bloc de gènesi o és incorrecte. El directori de dades de la xarxa és incorrecte?</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Adreça -onion no vàlida: '%s'</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>No hi ha suficient descriptors de fitxers disponibles.</translation>
</message>
<message>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation>Només connecta als nodes de la xarxa <net> (ipv4, ipv6 o onion)</translation>
</message>
<message>
<source>Prune cannot be configured with a negative value.</source>
<translation>La poda no es pot configurar amb un valor negatiu.</translation>
</message>
<message>
<source>Prune mode is incompatible with -txindex.</source>
<translation>El mode de poda és incompatible amb -txindex.</translation>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Defineix la mida de la memòria cau de la base de dades en megabytes (%d a %d, per defecte: %d)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Defineix la mida màxim del bloc en bytes (per defecte: %d)</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Especifica un fitxer de moneder (dins del directori de dades)</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: %u)</source>
<translation>Utilitza UPnP per a mapejar el port d'escolta (per defecte: %u)</translation>
</message>
<message>
<source>Use the test chain</source>
<translation>Utilitza la cadena de proves</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>S'estan verificant els blocs...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>S'està verificant el moneder...</translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>El moneder %s resideix fora del directori de dades %s</translation>
</message>
<message>
<source>Wallet debugging/testing options:</source>
<translation>Opcions de depuració/proves del moneder:</translation>
</message>
<message>
<source>Wallet needed to be rewritten: restart %s to complete</source>
<translation>Cal reescriure el moneder: reinicieu %s per a completar-ho</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Opcions de moneder:</translation>
</message>
<message>
<source>Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source>
<translation>Permet les connexions JSON-RPC d'una font específica. Vàlid per a <ip> són una IP individual (p. ex., 1.2.3.4), una xarxa / màscara de xarxa (p. ex., 1.2.3.4/255.255.255.0) o una xarxa/CIDR (p. ex., 1.2.3.4/24). Es pot especificar aquesta opció moltes vegades</translation>
</message>
<message>
<source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source>
<translation>Vincula l'adreça donada i posa a la llista blanca els iguals que s'hi connectin. Feu servir la notació [host]:port per a IPv6</translation>
</message>
<message>
<source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source>
<translation>Vincula a l'adreça donada per a escoltar les connexions JSON-RPC. Feu servir la notació [host]:port per a IPv6. Aquesta opció pot ser especificada moltes vegades (per defecte: vincula a totes les interfícies)</translation>
</message>
<message>
<source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source>
<translation>Crea fitxers nous amb els permisos per defecte del sistema, en comptes de l'umask 077 (només efectiu amb la funcionalitat de moneder inhabilitada)</translation>
</message>
<message>
<source>Discover own IP addresses (default: 1 when listening and no -externalip or -proxy)</source>
<translation>Descobreix l'adreça IP pròpia (per defecte: 1 quan s'escolta i no -externalip o -proxy)</translation>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation>Error: ha fallat escoltar les connexions entrants (l'escoltament ha retornat l'error %s)</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Executa l'ordre quan es rebi un avís rellevant o veiem una forquilla molt llarga (%s en cmd és reemplaçat per un missatge)</translation>
</message>
<message>
<source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source>
<translation>Si no s'especifica una paytxfee (comissió de transacció de pagament), inclogueu suficient comissió per tal que les transaccions comencin a confirmar-se en una mitja de n blocs (per defecte: %u)</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source>
<translation>Import no vàlid per a -maxtxfee=<amount>: '%s' (cal que sigui com a mínim la comissió de minrelay de %s per evitar que les comissions s'encallin)</translation>
</message>
<message>
<source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source>
<translation>Mida màxima de les dades en les transaccions de l'operador en què confiem i en les meves (per defecte: %u)</translation>
</message>
<message>
<source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source>
<translation>Genera a l'atzar credencials per a cada connexió proxy. Això habilita l'aïllament del flux de Tor (per defecte: %u)</translation>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Defineix la mida màxima de transaccions d'alta prioritat / baixa comissió en bytes (per defecte: %d)</translation>
</message>
<message>
<source>The transaction amount is too small to send after the fee has been deducted</source>
<translation>L'import de la transacció és massa petit per enviar-la després que se'n dedueixi la comissió</translation>
</message>
<message>
<source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source>
<translation>Els iguals en la llista blanca no poden ser bandejats per DoS i es transmetran sempre llurs transaccions, fins i tot si ja són a la mempool. Això és útil, p. ex., per a una passarel·la</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to go back to unpruned mode. This will redownload the entire blockchain</source>
<translation>Cal que torneu a construir la base de dades fent servir -reindex per tornar al mode no podat. Això tornarà a baixar la cadena de blocs sencera</translation>
</message>
<message>
<source>(default: %u)</source>
<translation>(per defecte: %u)</translation>
</message>
<message>
<source>Accept public REST requests (default: %u)</source>
<translation>Accepta sol·licituds REST públiques (per defecte: %u)</translation>
</message>
<message>
<source>Connect through SOCKS5 proxy</source>
<translation>Connecta a través del proxy SOCKS5</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Error en llegir la base de dades, tancant.</translation>
</message>
<message>
<source>Information</source>
<translation>&Informació</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</source>
<translation>Import no vàlid per a -paytxfee=<amount>: «%s» (ha de ser com a mínim %s)</translation>
</message>
<message>
<source>Invalid netmask specified in -whitelist: '%s'</source>
<translation>S'ha especificat una màscara de xarxa no vàlida a -whitelist: «%s»</translation>
</message>
<message>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation>Manté com a màxim <n> transaccions no connectables en memòria (per defecte: %u)</translation>
</message>
<message>
<source>Need to specify a port with -whitebind: '%s'</source>
<translation>Cal especificar un port amb -whitebind: «%s»</translation>
</message>
<message>
<source>Node relay options:</source>
<translation>Opcions de transmissió del node:</translation>
</message>
<message>
<source>RPC server options:</source>
<translation>Opcions del servidor RPC:</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Envia informació de traça/depuració a la consola en comptes del fitxer debug.log</translation>
</message>
<message>
<source>Send transactions as zero-fee transactions if possible (default: %u)</source>
<translation>Envia les transaccions com a transaccions de comissió zero sempre que sigui possible (per defecte: %u) </translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Mostra totes les opcions de depuració (ús: --help --help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Redueix el fitxer debug.log durant l'inici del client (per defecte: 1 quan no -debug)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Ha fallat la signatura de la transacció</translation>
</message>
<message>
<source>The transaction amount is too small to pay the fee</source>
<translation>L'import de la transacció és massa petit per pagar-ne una comissió</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Això és programari experimental.</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Import de la transacció massa petit</translation>
</message>
<message>
<source>Transaction too large for fee policy</source>
<translation>Transacció massa gran per a la política de comissions</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>La transacció és massa gran</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation>No s'ha pogut vincular a %s en aquest ordinador (la vinculació ha retornat l'error %s)</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Nom d'usuari per a connexions JSON-RPC</translation>
</message>
<message>
<source>Warning</source>
<translation>Avís</translation>
</message>
<message>
<source>Warning: unknown new rules activated (versionbit %i)</source>
<translation>Avís: regles noves desconegudes activades (versionbit %i)</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Se suprimeixen totes les transaccions del moneder...</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Contrasenya per a connexions JSON-RPC</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Executa l'ordre quan el millor bloc canviï (%s en cmd es reemplaça per un resum de bloc)</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permet consultes DNS per a -addnode, -seednode i -connect</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>S'estan carregant les adreces...</translation>
</message>
<message>
<source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source>
<translation>(1 = manté les metadades de les tx, p. ex., propietari del compte i informació de sol·licitud del pagament, 2 = prescindeix de les metadades de les tx)</translation>
</message>
<message>
<source>How thorough the block verification of -checkblocks is (0-4, default: %u)</source>
<translation>Com d'exhaustiva és la verificació de blocs del -checkblocks (0-4, per defecte: %u)</translation>
</message>
<message>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation>Manté un índex complet de transaccions, utilitzat per la crida rpc getrawtransaction (per defecte: %u)</translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation>Nombre de segons necessaris perquè els iguals de comportament qüestionable puguin tornar a connectar-se (per defecte: %u)</translation>
</message>
<message>
<source>Output debugging information (default: %u, supplying <category> is optional)</source>
<translation>Informació de sortida de la depuració (per defecte: %u, proporcionar <category> és opcional)</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source>
<translation>Utilitza un proxy SOCKS4 apart per a arribar als iguals a través de serveis ocults de Tor (per defecte: %s)</translation>
</message>
<message>
<source>(default: %s)</source>
<translation>(per defecte: %s)</translation>
</message>
<message>
<source>Always query for peer addresses via DNS lookup (default: %u)</source>
<translation>Demana sempre les adreces dels iguals a través de consultes DNS (per defecte: %u)</translation>
</message>
<message>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation>Quants blocs per comprovar a l'inici (per defecte: %u, 0 = tots)</translation>
</message>
<message>
<source>Include IP addresses in debug output (default: %u)</source>
<translation>Inclou l'adreça IP a la sortida de depuració (per defecte: %u)</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Adreça -proxy invalida: '%s'</translation>
</message>
<message>
<source>Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</source>
<translation>Escolta les connexions JSON-RPC en <port> (per defecte: %u o testnet: %u)</translation>
</message>
<message>
<source>Listen for connections on <port> (default: %u or testnet: %u)</source>
<translation>Escolta les connexions en <port> (per defecte: %u o testnet: %u)</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation>Manté com a màxim <n> connexions a iguals (per defecte: %u)</translation>
</message>
<message>
<source>Make the wallet broadcast transactions</source>
<translation>Fes que el moneder faci difusió de les transaccions</translation>
</message>
<message>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</source>
<translation>Memòria intermèdia màxima de recepció per connexió, <n>*1000 bytes (per defecte: %u)</translation>
</message>
<message>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</source>
<translation>Memòria intermèdia màxima d'enviament per connexió, <n>*1000 bytes (per defecte: %u)</translation>
</message>
<message>
<source>Prepend debug output with timestamp (default: %u)</source>
<translation>Posa davant de la sortida de depuració una marca horària (per defecte: %u)</translation>
</message>
<message>
<source>Relay and mine data carrier transactions (default: %u)</source>
<translation>Retransmet i mina les transaccions de l'operador (per defecte: %u)</translation>
</message>
<message>
<source>Relay non-P2SH multisig (default: %u)</source>
<translation>Retransmet multisig no P2SH (per defecte: %u)</translation>
</message>
<message>
<source>Set key pool size to <n> (default: %u)</source>
<translation>Defineix la mida clau disponible a <n> (per defecte: %u)</translation>
</message>
<message>
<source>Set the number of threads to service RPC calls (default: %d)</source>
<translation>Defineix el nombre de fils a crides de servei RPC (per defecte: %d)</translation>
</message>
<message>
<source>Specify configuration file (default: %s)</source>
<translation>Especifica el fitxer de configuració (per defecte: %s)</translation>
</message>
<message>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation>Especifica el temps d'espera de la connexió en milisegons (mínim: 1, per defecte: %d)</translation>
</message>
<message>
<source>Specify pid file (default: %s)</source>
<translation>Especifica el fitxer pid (per defecte: %s)</translation>
</message>
<message>
<source>Spend unconfirmed change when sending transactions (default: %u)</source>
<translation>Gasta el canvi no confirmat en enviar les transaccions (per defecte: %u)</translation>
</message>
<message>
<source>Starting network threads...</source>
<translation>S'estan iniciant els fils de la xarxa...</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation>Llindar per a desconnectar els iguals de comportament qüestionable (per defecte: %u)</translation>
</message>
<message>
<source>Transaction amounts must not be negative</source>
<translation>Els imports de la transacció no han de ser negatius</translation>
</message>
<message>
<source>Transaction must have at least one recipient</source>
<translation>La transacció ha de tenir com a mínim un destinatari</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Xarxa desconeguda especificada a -onlynet: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Balanç insuficient</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>S'està carregant l'índex de blocs...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Afegeix un node per a connectar-s'hi i intenta mantenir-hi la connexió oberta</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>S'està carregant el moneder...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>No es pot reduir la versió del moneder</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>No es pot escriure l'adreça per defecte</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>S'està reescanejant...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Ha acabat la càrrega</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
</TS> | </message>
<message>
<source>Last block time</source>
<translation>Últim temps de bloc</translation> |
init.rs | use crate::{
entity_archetypes::{self, InitResourceConst, InitResourceMuts},
prelude::*,
utils::WorldRng,
};
use arrayvec::ArrayVec;
use rand::{prelude::SmallRng, Rng, SeedableRng};
use tracing::{debug, error, trace};
/// World should be already initialized with a GameConfig
pub(crate) fn init_world_entities(storage: &mut World) {
debug!("• initializing world");
let config = UnwrapView::<ConfigKey, GameConfig>::from_world(storage);
let radius = config.room_radius;
debug!("Reset position storage");
let mut entities_by_pos = storage.unsafe_view::<WorldPosition, EntityComponent>();
entities_by_pos.clear();
entities_by_pos
.table
.extend(
storage
.view::<Axial, RoomComponent>()
.iter()
.map(|(roomid, _)| (roomid, Default::default())),
)
.expect("entities_by_pos init");
let bounds = Hexagon {
center: Axial::new(radius as i32, radius as i32),
radius: radius as i32,
};
let rooms = storage
.view::<Axial, RoomComponent>()
.iter()
.map(|a| Room(a.0))
.collect::<smallvec::SmallVec<[_; 128]>>();
init_rooms(
&rooms,
bounds,
FromWorldMut::from_world_mut(storage),
FromWorld::from_world(storage),
);
debug!("✓ initializing world");
}
pub type InitRoomMut = (
InsertEntityView,
InitResourceMuts,
UnwrapViewMut<ResourceKey, WorldRng>,
);
pub type InitRoomRef<'a> = (
InitResourceConst<'a>,
View<'a, WorldPosition, EntityComponent>,
View<'a, WorldPosition, TerrainComponent>,
);
/// Initialize the rooms' entities
pub(crate) fn init_rooms(
rooms: &[Room],
bounds: Hexagon,
(mut insert_entity, resource_mut, mut rng): InitRoomMut,
(resource_ref, world_entities, world_terrain): InitRoomRef,
) {
rooms
.iter()
.copied()
.filter_map(|room| {
let ids = [unsafe { insert_entity.insert_entity() }, unsafe {
insert_entity.insert_entity()
}];
for id in &ids[..] {
if let Err(err) = id {
error!(
err = tracing::field::debug(err),
"Failed to allocate resource entity"
);
return None;
}
}
let ids = ids.map(|id| id.unwrap());
Some((room, SmallRng::from_rng(&mut *rng).unwrap(), ids))
})
.for_each(|(room, mut rng, entities)| {
trace!("initializing room #{}", room.0);
// we don't update positions table so keep track of newly added positions
let mut seen_pos = ArrayVec::<_, 3>::new();
for id in entities {
let pos = loop {
let pos =
uncontested_pos(room, &bounds, &world_entities, &world_terrain, &mut rng);
if !seen_pos.contains(&pos) {
seen_pos.push(pos);
break pos;
}
};
entity_archetypes::init_resource_energy(id, pos, resource_mut, resource_ref);
trace!("initializing resource #{} done", id);
}
trace!("initializing room #{} done", room.0);
});
}
fn uncontested_pos<T: crate::tables::TableRow + Send + Sync + Default>(
room: Room,
bounds: &Hexagon,
positions_table: &crate::tables::morton_hierarchy::MortonMortonTable<T>,
terrain_table: &<TerrainComponent as Component<WorldPosition>>::Table,
rng: &mut impl Rng,
) -> WorldPosition {
const TRIES: usize = 10_000;
let from = bounds.center - Axial::new(bounds.radius, bounds.radius);
let to = bounds.center + Axial::new(bounds.radius, bounds.radius);
// positions might not be available at this time
let room_positions = positions_table.table.at(room.0);
let room_terrain = terrain_table
.table
.at(room.0)
.expect("Given room is missing from terrain table");
for _ in 0..TRIES {
let x = rng.gen_range(from.q..to.q);
let y = rng.gen_range(from.r..to.r);
let pos = Axial::new(x, y);
trace!("checking pos {:?}", pos);
if !bounds.contains(pos) {
trace!("point {:?} is out of bounds {:?}", pos, bounds);
continue;
}
if let Some(TerrainComponent(terrain)) = room_terrain.get(pos) {
if terrain.is_walkable()
&& room_positions
.map(|table| !table.contains_key(pos))
// if positions haven't been registered then assume that there are no entities in this room
.unwrap_or(true)
{
return WorldPosition { room: room.0, pos };
}
}
}
panic!(
"Failed to find an uncontested_pos in {:?} {:?} in {} iterations",
from, to, TRIES
);
}
#[cfg(test)]
mod tests {
use super::*;
use test_log::test;
#[test] | let mut exc = SimpleExecutor;
let mut world = World::new(crate::executor::GameConfig {
room_radius: 10,
..Default::default()
});
// smoke test: can the game be even initialized?
futures_lite::future::block_on(exc.initialize());
futures_lite::future::block_on(exc.init_world(&mut world));
}
} | fn can_init_the_game() { |
unix.go | // +build linux freebsd darwin
// +build 386 amd64
// MIT License
//
// Copyright (c) 2016-2018 GenesisKernel
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package daylight
import (
"syscall"
"github.com/GenesisKernel/go-genesis/packages/converter"
log "github.com/sirupsen/logrus"
)
// KillPid is killing process by PID
func KillPid(pid string) error | {
err := syscall.Kill(converter.StrToInt(pid), syscall.SIGTERM)
if err != nil {
log.WithFields(log.Fields{"pid": pid, "signal": syscall.SIGTERM}).Error("Error killing process with pid")
return err
}
return nil
} |
|
blog-dashboard-navbar-breadcrumb.component.ts | // Copyright 2021 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// | // distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Component for the navbar breadcrumb of the blog dashboard.
*/
import { Component, OnDestroy, OnInit } from '@angular/core';
import { downgradeComponent } from '@angular/upgrade/static';
import { Subscription } from 'rxjs';
import { BlogDashboardPageService } from 'pages/blog-dashboard-page/services/blog-dashboard-page.service';
@Component({
selector: 'oppia-blog-dashboard-navbar-breadcrumb',
templateUrl: './blog-dashboard-navbar-breadcrumb.component.html'
})
export class BlogDashboardNavbarBreadcrumbComponent
implements OnInit, OnDestroy {
activeTab: string;
title: string;
directiveSubscriptions = new Subscription();
constructor(
private blogDashboardPageService: BlogDashboardPageService,
) {}
ngOnInit(): void {
this.activeTab = this.blogDashboardPageService.activeTab;
this.directiveSubscriptions.add(
this.blogDashboardPageService.updateViewEventEmitter.subscribe(
() => {
this.activeTab = this.blogDashboardPageService.activeTab;
}
)
);
this.directiveSubscriptions.add(
this.blogDashboardPageService.updateNavTitleEventEmitter.subscribe(
(title) => {
this.title = title;
}
)
);
}
ngOnDestroy(): void {
return this.directiveSubscriptions.unsubscribe();
}
}
angular.module('oppia').directive('oppiaBlogDashboardNavbarBreadcrumb',
downgradeComponent({
component: BlogDashboardNavbarBreadcrumbComponent
})); | // http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software |
close_auction_cycle.rs | use super::*;
use agsol_token_metadata::state::Data as MetadataStateData;
use solana_program::clock::UnixTimestamp;
use solana_program::sysvar::rent::Rent;
use std::str::FromStr;
const METADATA_DATA_START_POS: usize = 65;
// NOTE: The user can be made to pay for this account's creation by locking its fee besides their bid at the time of bidding
// and using this locked fee now.
// NOTE: With the current calculation method we may scam the auction owner with at most 19 lamports due to rounding.
// This may be improved.
// NOTE: We might introduce a "grace period" in which the user can not bid before initiating a new auction
// in case they wanted to bid in the last second, so that they do not bid on the next auctioned asset accidentally
/// Closes auction cycle
///
/// Creates holding account for the won asset for the user with the highest bid.
/// The cost of this account's creation is deducted from the highest bid.
///
/// Then, distributes the deducted highest bid in the following fashion:
///
/// - 95% to the auction owner
///
/// - 5% to the contract admin
pub fn close_auction_cycle(
program_id: &Pubkey,
accounts: &[AccountInfo],
auction_id: AuctionId,
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
// misc
let payer_account = next_account_info(account_info_iter)?;
// contract state accounts
let auction_pool_account = next_account_info(account_info_iter)?;
let secondary_pool_account = next_account_info(account_info_iter)?;
let auction_root_state_account = next_account_info(account_info_iter)?;
let current_auction_cycle_state_account = next_account_info(account_info_iter)?;
let next_auction_cycle_state_account = next_account_info(account_info_iter)?;
// user accounts
let top_bidder_account = next_account_info(account_info_iter)?;
// contract signer pda
let contract_pda = next_account_info(account_info_iter)?;
// external programs
let rent_program = next_account_info(account_info_iter)?;
let system_program = next_account_info(account_info_iter)?;
let token_program = next_account_info(account_info_iter)?;
if !payer_account.is_signer {
msg!("admin signature is missing");
return Err(ProgramError::MissingRequiredSignature);
}
// Check cross-program invocation addresses
assert_rent_program(rent_program.key)?;
assert_system_program(system_program.key)?;
assert_token_program(token_program.key)?;
// Check account ownership
// User accounts:
// payer_account
// top_bidder_account
// Pda accounts:
// contract_pda
// Accounts created in this instruction:
// next_auction_cycle_state_account
// check root and cycle states
SignerPda::check_owner(
&auction_root_state_seeds(&auction_id),
program_id,
program_id,
auction_root_state_account,
)?;
let mut auction_root_state = AuctionRootState::read(auction_root_state_account)?;
let cycle_num_bytes = auction_root_state
.status
.current_auction_cycle
.to_le_bytes();
SignerPda::check_owner(
&auction_cycle_state_seeds(auction_root_state_account.key, &cycle_num_bytes),
program_id,
program_id,
current_auction_cycle_state_account,
)?;
let mut current_auction_cycle_state =
AuctionCycleState::read(current_auction_cycle_state_account)?;
// Check auction status (frozen, active, able to end cycle)
let clock = Clock::get()?;
let current_timestamp = clock.unix_timestamp;
check_status(
&auction_root_state,
¤t_auction_cycle_state,
current_timestamp,
AuctionInteraction::CloseCycle,
)?;
// If there were no bids, just reset auction cycle
let most_recent_bid_option = current_auction_cycle_state.bid_history.get_last_element();
if let Some(most_recent_bid) = most_recent_bid_option {
if top_bidder_account.key != &most_recent_bid.bidder_pubkey {
return Err(AuctionContractError::TopBidderAccountMismatch.into());
}
auction_root_state.available_funds = auction_root_state
.available_funds
.checked_add(most_recent_bid.bid_amount)
.ok_or(AuctionContractError::ArithmeticError)?;
auction_root_state.all_time_treasury = auction_root_state
.all_time_treasury
.checked_add(most_recent_bid.bid_amount)
.ok_or(AuctionContractError::ArithmeticError)?;
auction_root_state.write(auction_root_state_account)?;
} else {
// check pool pdas
SignerPda::check_owner(
&auction_pool_seeds(),
program_id,
program_id,
auction_pool_account,
)?;
SignerPda::check_owner(
&secondary_pool_seeds(),
program_id,
program_id,
secondary_pool_account,
)?;
increment_idle_streak(
&auction_id,
&mut current_auction_cycle_state,
&mut auction_root_state,
auction_root_state_account,
current_auction_cycle_state_account,
auction_pool_account,
secondary_pool_account,
)?;
return Ok(());
}
let contract_pda_seeds = contract_pda_seeds();
let contract_signer_pda =
SignerPda::new_checked(&contract_pda_seeds, program_id, contract_pda)?;
if let TokenConfig::Nft(ref nft_data) = auction_root_state.token_config {
let metadata_program = next_account_info(account_info_iter)?;
// master accounts
let master_edition_account = next_account_info(account_info_iter)?;
let master_metadata_account = next_account_info(account_info_iter)?;
let master_mint_account = next_account_info(account_info_iter)?;
let master_holding_account = next_account_info(account_info_iter)?;
// Check account ownership
if *master_edition_account.owner != META_ID {
return Err(AuctionContractError::InvalidAccountOwner.into());
}
assert_token_account_owner(master_holding_account, contract_pda.key)?;
assert_mint_authority(master_mint_account, master_edition_account.key)?;
// Check cross-program invocation addresses
assert_metaplex_program(metadata_program.key)?;
// Check pda addresses
// Not checking the following pdas since these are checked (and owned) by metaplex
// master_edition_account
// master_metadata_account
let next_edition = auction_root_state.status.current_auction_cycle;
SignerPda::check_owner(
&master_mint_seeds(&auction_id),
program_id,
&TOKEN_ID,
master_mint_account,
)?;
SignerPda::check_owner(
&master_holding_seeds(&auction_id),
program_id,
&TOKEN_ID,
master_holding_account,
)?;
SignerPda::check_owner(
&metadata_seeds(master_mint_account.key),
&META_ID,
&META_ID,
master_metadata_account,
)?;
// check nft validity
if &nft_data.master_edition != master_edition_account.key {
return Err(AuctionContractError::MasterEditionMismatch.into());
}
if auction_root_state.status.current_auction_cycle != next_edition {
return Err(AuctionContractError::ChildEditionNumberMismatch.into());
}
// change master metadata so that child can inherit it
// if last cycle is being closed, set increments to 0 (#0 and 0.jpg)
if !nft_data.is_repeating {
msg!("Updating metadata account");
let mut new_master_metadata = try_from_slice_unchecked::<MetadataStateData>(
&master_metadata_account.data.borrow_mut()[METADATA_DATA_START_POS..],
)
.unwrap();
increment_uri(
&mut new_master_metadata.uri,
is_last_auction_cycle(&auction_root_state),
)?;
let change_master_metadata_ix = meta_instruction::update_metadata_accounts(
*metadata_program.key,
*master_metadata_account.key,
*contract_pda.key,
None,
Some(new_master_metadata),
None,
);
invoke_signed(
&change_master_metadata_ix,
&[master_metadata_account.clone(), contract_pda.clone()],
&[&contract_signer_pda.signer_seeds()],
)?;
}
}
// Reset auction cycle
if is_last_auction_cycle(&auction_root_state) {
// check pool pdas
SignerPda::check_owner(
&auction_pool_seeds(),
program_id,
program_id,
auction_pool_account,
)?;
SignerPda::check_owner(
&secondary_pool_seeds(),
program_id,
program_id,
secondary_pool_account,
)?;
auction_root_state.status.is_finished = true;
auction_root_state.available_funds = auction_root_state
.available_funds
.checked_add(Rent::get()?.minimum_balance(0))
.ok_or(AuctionContractError::ArithmeticError)?;
let mut auction_pool = AuctionPool::read(auction_pool_account)?;
let mut secondary_pool = AuctionPool::read(secondary_pool_account)?;
auction_pool.remove(&auction_id);
secondary_pool.try_insert_sorted(auction_id)?;
auction_pool.write(auction_pool_account)?;
secondary_pool.write(secondary_pool_account)?;
} else {
// Check next cycle state account
let next_cycle_num_bytes = (auction_root_state
.status
.current_auction_cycle
.checked_add(1)
.ok_or(AuctionContractError::ArithmeticError)?)
.to_le_bytes();
let next_auction_cycle_state_seeds =
auction_cycle_state_seeds(auction_root_state_account.key, &next_cycle_num_bytes);
let next_cycle_state_pda = SignerPda::new_checked(
&next_auction_cycle_state_seeds,
program_id,
next_auction_cycle_state_account,
)?;
// Create next cycle state account
create_state_account(
payer_account,
next_auction_cycle_state_account,
next_cycle_state_pda.signer_seeds(),
program_id,
system_program,
AuctionCycleState::MAX_SERIALIZED_LEN,
)?;
let end_time = clock
.unix_timestamp
.checked_add(auction_root_state.auction_config.cycle_period)
.ok_or(AuctionContractError::ArithmeticError)?;
let next_auction_cycle_state = AuctionCycleState {
bid_history: BidHistory::new(),
end_time,
};
next_auction_cycle_state.write(next_auction_cycle_state_account)?;
auction_root_state.status.current_auction_cycle = auction_root_state
.status
.current_auction_cycle
.checked_add(1)
.ok_or(AuctionContractError::ArithmeticError)?;
}
auction_root_state.status.current_idle_cycle_streak = 0;
auction_root_state.unclaimed_rewards = auction_root_state
.unclaimed_rewards
.checked_add(1)
.ok_or(AuctionContractError::ArithmeticError)?;
auction_root_state.write(auction_root_state_account)?;
Ok(())
}
fn increment_idle_streak(
auction_id: &AuctionId,
current_auction_cycle_state: &mut AuctionCycleState,
auction_root_state: &mut AuctionRootState,
auction_root_state_account: &AccountInfo,
current_auction_cycle_state_account: &AccountInfo,
primary_pool_account: &AccountInfo,
secondary_pool_account: &AccountInfo,
) -> Result<(), ProgramError> {
current_auction_cycle_state.end_time = current_auction_cycle_state
.end_time
.checked_add(auction_root_state.auction_config.cycle_period)
.ok_or(AuctionContractError::ArithmeticError)?;
auction_root_state.status.current_idle_cycle_streak = auction_root_state
.status
.current_idle_cycle_streak
.checked_add(1)
.ok_or(AuctionContractError::ArithmeticError)?;
// If the auction was idle for a period longer than ALLOWED_AUCTION_IDLE_PERIOD
// or for more than ALLOWED_CONSECUTIVE_IDLE_CYCLES number of cycles
// then move it to the secondary pool automatically
// Bidding on these moved auctions will "reactivate" them
if auction_root_state.auction_config.cycle_period
* UnixTimestamp::from(auction_root_state.status.current_idle_cycle_streak)
> crate::ALLOWED_AUCTION_IDLE_PERIOD
|| auction_root_state.status.current_idle_cycle_streak
> crate::ALLOWED_CONSECUTIVE_IDLE_CYCLES
{
let mut primary_pool = AuctionPool::read(primary_pool_account)?;
let mut secondary_pool = AuctionPool::read(secondary_pool_account)?;
primary_pool.remove(auction_id);
secondary_pool.try_insert_sorted(*auction_id)?;
primary_pool.write(primary_pool_account)?;
secondary_pool.write(secondary_pool_account)?;
}
current_auction_cycle_state.write(current_auction_cycle_state_account)?;
auction_root_state.write(auction_root_state_account)?;
Ok(())
}
pub fn increment_name(
string: &mut String,
is_last_cycle: bool,
) -> Result<(), AuctionContractError> {
let mut last_pos = 32;
let mut first_pos = 32;
let str_bytes = string.as_bytes();
for i in (0..32).rev() {
if str_bytes[i] == 0 {
last_pos = i;
}
// "#".as_bytes() == [35]
if str_bytes[i] == 35 {
first_pos = i + 1;
break;
}
}
if last_pos == 0 || last_pos < first_pos || first_pos == 0 {
return Err(AuctionContractError::MetadataManipulationError);
}
let integer = u64::from_str(&string[first_pos..last_pos]).unwrap();
string.truncate(last_pos);
if is_last_cycle {
string.replace_range(first_pos..last_pos, &0.to_string());
} else {
let incremented_integer = integer
.checked_add(1)
.ok_or(AuctionContractError::ArithmeticError)?;
string.replace_range(first_pos..last_pos, &(incremented_integer).to_string());
};
Ok(())
}
pub fn increment_uri(uri: &mut String, is_last_cycle: bool) -> Result<(), AuctionContractError> {
let uri_len = uri.len();
let mut last_pos = uri_len;
let mut dot_pos = uri_len;
let mut slash_pos = uri_len;
let str_bytes = uri.as_bytes();
for i in (0..uri_len).rev() {
if str_bytes[i] == 0 {
last_pos = i;
}
// ".".as_bytes() == [46]
if str_bytes[i] == 46 {
dot_pos = i;
}
// "/".as_bytes() == [47]
if str_bytes[i] == 47 {
slash_pos = i + 1;
break;
}
}
if last_pos == 0 || dot_pos == 0 || slash_pos == 0 || dot_pos < slash_pos {
return Err(AuctionContractError::MetadataManipulationError);
}
let integer = u64::from_str(&uri[slash_pos..dot_pos])
.map_err(|_| AuctionContractError::MetadataManipulationError)?;
uri.truncate(last_pos);
if is_last_cycle {
uri.replace_range(slash_pos..dot_pos, &0.to_string());
} else {
let incremented_integer = integer
.checked_add(1)
.ok_or(AuctionContractError::ArithmeticError)?;
uri.replace_range(slash_pos..dot_pos, &(incremented_integer).to_string());
};
Ok(())
}
#[cfg(test)]
mod cycle_increment_tests {
use super::{increment_name, increment_uri};
const MAX_NAME_LENGTH: usize = 32;
//const MAX_SYMBOL_LENGTH: usize = 10;
const MAX_URI_LENGTH: usize = 200;
fn puff_string(string: &mut String, length: usize) -> String |
#[test]
fn test_name_increments() {
// name increments
let mut puffed_name = puff_string(&mut "puffed name #123".to_string(), MAX_NAME_LENGTH);
assert_eq!(puffed_name.len(), MAX_NAME_LENGTH);
increment_name(&mut puffed_name, false).unwrap();
assert_eq!(puffed_name, "puffed name #124".to_string());
let mut long_name = "aaaa bbbb cccc dddd eeee fff #14".to_string();
assert_eq!(long_name.len(), MAX_NAME_LENGTH);
increment_name(&mut long_name, false).unwrap();
assert_eq!(long_name, "aaaa bbbb cccc dddd eeee fff #15".to_string());
}
#[test]
fn test_uri_increments() {
// uri increments
let mut puffed_uri = puff_string(
&mut "puffed/uri/some.path/123.jpg".to_string(),
MAX_URI_LENGTH,
);
assert_eq!(puffed_uri.len(), MAX_URI_LENGTH);
increment_uri(&mut puffed_uri, false).unwrap();
assert_eq!(puffed_uri, "puffed/uri/some.path/124.jpg".to_string());
let mut long_uri = String::new();
for _ in 0..19 {
// 10 long slice
long_uri.push_str("asdf.qwer/");
}
let mut long_uri_expected = long_uri.clone();
long_uri.push_str("123456.jpg");
assert_eq!(long_uri.len(), MAX_URI_LENGTH);
increment_uri(&mut long_uri, false).unwrap();
long_uri_expected.push_str("123457.jpg");
assert_eq!(long_uri, long_uri_expected);
}
#[test]
fn test_last_cycle_increments() {
// last cycle increments
let mut long_name = "aaaa bbbb cccc dddd eeee fff #14".to_string();
assert_eq!(long_name.len(), MAX_NAME_LENGTH);
increment_name(&mut long_name, true).unwrap();
assert_eq!(long_name, "aaaa bbbb cccc dddd eeee fff #0".to_string());
let mut long_uri = String::new();
for _ in 0..19 {
// 10 long slice
long_uri.push_str("asdf.qwer/");
}
let mut long_uri_expected = long_uri.clone();
long_uri.push_str("123456.jpg");
assert_eq!(long_uri.len(), MAX_URI_LENGTH);
increment_uri(&mut long_uri, true).unwrap();
long_uri_expected.push_str("0.jpg");
assert_eq!(long_uri, long_uri_expected);
}
}
| {
let mut array_of_zeroes = vec![];
while array_of_zeroes.len() < length - string.len() {
array_of_zeroes.push(0u8);
}
string.clone() + std::str::from_utf8(&array_of_zeroes).unwrap()
} |
m2m.py | # _*_ coding: utf-8 _*_
"""
Created by Allen7D on 2018/6/17.
"""
from sqlalchemy import Column, Integer, String, ForeignKey
from app.models.base import Base
from app.models.image import Image
__author__ = 'Allen7D'
class Theme2Product(Base):
__tablename__ = 'theme_product'
theme_id = Column(Integer, ForeignKey('theme.id'), primary_key=True, comment='主题外键')
product_id = Column(Integer, ForeignKey('product.id'), primary_key=True, comment='商品外键')
class Product2Image(Ba | name__ = 'product_image'
id = Column(Integer, primary_key=True, autoincrement=True)
img_id = Column(Integer, ForeignKey('image.id'), nullable=False, comment='外键,关联图片表')
order = Column(Integer, nullable=False, comment='图片排序序号')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
def keys(self):
self.hide('id', 'img_id', 'product_id', 'order').append('img_url')
return self.fields
@property
def img_url(self):
return Image.get_img_by_id(id=self.img_id).url
class Product2Property(Base):
__tablename__ = 'product_property'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(30), comment='详情属性名称')
detail = Column(String(255), nullable=False, comment='详情属性')
product_id = Column(Integer, ForeignKey('product.id'), nullable=False, comment='外键, 商品id')
class Order2Product(Base):
__tablename__ = 'order_product'
order_id = Column(Integer, primary_key=True, comment='联合主键,订单id')
product_id = Column(Integer, primary_key=True, comment='联合主键,商品id')
count = Column(Integer, nullable=False, comment='商品数量')
def __init__(self, order_id=None, product_id=None, count=None):
self.order_id = order_id
self.product_id = product_id
self.count = count
super(Order2Product, self).__init__()
| se):
__table |
email-attachment-view.js | define(function(require) {
'use strict';
const EmailAttachmentModel = require('oroemail/js/app/models/email-attachment-model');
const BaseView = require('oroui/js/app/views/base/view');
const EmailAttachmentView = BaseView.extend({
model: EmailAttachmentModel,
inputName: '',
events: {
'click [data-role="remove"]': 'removeClick'
},
listen: {
'change:fileName model': 'fileNameChange',
'change:type model': 'typeChange',
'change:icon model': 'iconChange'
},
/**
* @inheritDoc
*/
constructor: function EmailAttachmentView(options) {
EmailAttachmentView.__super__.constructor.call(this, options);
},
getTemplateFunction: function() {
if (!this.template) {
this.template = require('tpl-loader!oroemail/templates/email-attachment/email-attachment-item.html');
}
return EmailAttachmentView.__super__.getTemplateFunction.call(this);
},
getTemplateData: function() {
const data = EmailAttachmentView.__super__.getTemplateData.call(this);
data.cid = this.model.cid;
data.inputName = this.inputName;
return data; | },
fileSelect: function() {
const self = this;
const $fileInput = this.$('input[type="file"]');
this.$el.hide();
$fileInput.on('change', function() {
const value = $fileInput.val().replace(/^.*[\\\/]/, '');
if (value) {
self.model.set('fileName', value);
self.model.set('type', 3);
const extension = value.substr(value.lastIndexOf('.') + 1);
let icon = self.fileIcons['default'];
if (extension && self.fileIcons[extension]) {
icon = self.fileIcons[extension];
}
self.model.set('icon', icon);
self.$el.show();
self.collectionView.show();
}
});
$fileInput.click();
},
fileNameChange: function() {
this.$('.attachment-item__filename')
.html(this.model.get('fileName'))
.attr('title', this.model.get('fileName'));
},
typeChange: function() {
this.$('input.attachment-type').val(this.model.get('type'));
},
iconChange: function() {
this.$('.attachment-item .fa').addClass(this.model.get('icon'));
}
});
return EmailAttachmentView;
}); | },
removeClick: function() {
this.model.trigger('destroy', this.model); |
css-vars.ts | import constants from './style-constants';
function formatVariableName(jsName: string) {
return `--${jsName.replace(/[A-Z]/g, c => `-${c.toLowerCase()}`)}`;
}
export default function setCssVars() {
for (const varName in constants) {
document.documentElement.style.setProperty(formatVariableName(varName), constants[varName]);
} | } |
|
mysql.go | package gorm
import (
"fmt"
"github.com/andycai/axe-fiber/library/database"
"gorm.io/driver/mysql"
"gorm.io/gorm"
)
func NewMySQL(conf *database.Config) *gorm.DB | {
db, err := gorm.Open(mysql.Open(conf.DSN), &gorm.Config{})
if err != nil {
panic(fmt.Errorf("connect server failed, err: %v\n", err))
}
return db
} |
|
parameterized_vec_vec_int_op.rs | use std::fmt;
use std::marker::PhantomData;
use crate::engine::*;
#[derive(Debug)]
pub struct ParameterizedVecVecIntegerOperator<Op> {
pub lhs: BufferRef<i64>,
pub rhs: BufferRef<i64>,
pub output: BufferRef<i64>,
pub parameter: i64,
pub op: PhantomData<Op>,
}
impl<'a, Op: ParameterizedIntegerOperation + fmt::Debug> VecOperator<'a> for ParameterizedVecVecIntegerOperator<Op> {
fn execute(&mut self, stream: bool, scratchpad: &mut Scratchpad<'a>) -> Result<(), QueryError>{
let mut output = scratchpad.get_mut(self.output);
let lhs = scratchpad.get(self.lhs);
let rhs = scratchpad.get(self.rhs);
if stream { output.clear(); }
for (l, r) in lhs.iter().zip(rhs.iter()) {
output.push(Op::perform(*l, *r, self.parameter));
}
Ok(())
}
fn init(&mut self, _: usize, batch_size: usize, scratchpad: &mut Scratchpad<'a>) {
scratchpad.set(self.output, Vec::with_capacity(batch_size));
}
fn inputs(&self) -> Vec<BufferRef<Any>> { vec![self.lhs.any(), self.rhs.any()] }
fn outputs(&self) -> Vec<BufferRef<Any>> { vec![self.output.any()] }
fn | (&self, _: usize) -> bool { true }
fn can_stream_output(&self, _: usize) -> bool { true }
fn allocates(&self) -> bool { true }
fn display_op(&self, alternate: bool) -> String {
Op::display(self.lhs, self.rhs, self.parameter, alternate)
}
}
pub trait ParameterizedIntegerOperation {
fn perform(lhs: i64, rhs: i64, param: i64) -> i64;
fn display(lhs: BufferRef<i64>, rhs: BufferRef<i64>, param: i64, alternate: bool) -> String;
}
#[derive(Debug)]
pub struct BitShiftLeftAdd;
impl ParameterizedIntegerOperation for BitShiftLeftAdd {
fn perform(lhs: i64, rhs: i64, param: i64) -> i64 { lhs + (rhs << param) }
fn display(lhs: BufferRef<i64>, rhs: BufferRef<i64>, param: i64, alternate: bool) -> String {
if alternate {
format!("{} + ({} << {})", lhs, rhs, param)
} else {
format!("{} + ({} << $shift)", lhs, rhs)
}
}
}
| can_stream_input |
users_windows.go | /*
** Zabbix
** Copyright (C) 2001-2021 Zabbix SIA
**
** This program is free software; you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation; either version 2 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program; if not, write to the Free Software
** Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
**/
package users
import (
"zabbix.com/pkg/pdh"
)
func (p *Plugin) getUsersNum() (num int, err error) {
| value, err = pdh.GetCounterInt64(pdh.CounterPath(pdh.ObjectTerminalServices, pdh.CounterTotalSessions))
if err != nil || value == nil {
return
}
return int(*value), nil
} | var value *int64
|
write.go | // Package write renders a PDF cross reference table to a PDF file.
package write
import (
"bufio"
"bytes"
"encoding/hex"
"fmt"
"os"
"sort"
"strings"
"github.com/wallentx/pdfcpu-fork/crypto"
"github.com/wallentx/pdfcpu-fork/filter"
"github.com/wallentx/pdfcpu-fork/log"
"github.com/wallentx/pdfcpu-fork/types"
"github.com/pkg/errors"
)
// PDFFile generates a PDF file for the cross reference table contained in PDFContext.
func PDFFile(ctx *types.PDFContext) error {
fileName := ctx.Write.DirName + ctx.Write.FileName
log.Info.Printf("writing to %s\n", fileName)
file, err := os.Create(fileName)
if err != nil {
return errors.Wrapf(err, "can't create %s\n%s", fileName, err)
}
ctx.Write.Writer = bufio.NewWriter(file)
defer func() {
// The underlying bufio.Writer has already been flushed.
// Processing error takes precedence.
if err != nil {
file.Close()
return
}
// Do not miss out on closing errors.
err = file.Close()
}()
err = handleEncryption(ctx)
if err != nil {
return err
}
// Since we support PDF Collections (since V1.7) for file attachments
// we need to always generate V1.7 PDF filess.
err = writeHeader(ctx.Write, types.V17)
if err != nil {
return err
}
log.Debug.Printf("offset after writeHeader: %d\n", ctx.Write.Offset)
// Write root object(aka the document catalog) and page tree.
err = writeRootObject(ctx)
if err != nil {
return err
}
log.Debug.Printf("offset after writeRootObject: %d\n", ctx.Write.Offset)
// Write document information dictionary.
err = writeDocumentInfoDict(ctx)
if err != nil {
return err
}
log.Debug.Printf("offset after writeInfoObject: %d\n", ctx.Write.Offset)
// Write offspec additional streams as declared in pdf trailer.
if ctx.AdditionalStreams != nil {
_, _, err = writeDeepObject(ctx, ctx.AdditionalStreams)
if err != nil {
return err
}
}
err = writeEncryptDict(ctx)
if err != nil {
return err
}
// Mark redundant objects as free.
// eg. duplicate resources, compressed objects, linearization dicts..
deleteRedundantObjects(ctx)
err = writeXRef(ctx)
if err != nil {
return err
}
// Write pdf trailer.
_, err = writeTrailer(ctx.Write)
if err != nil {
return err
}
err = setFileSizeOfWrittenFile(ctx.Write, file)
if err != nil {
return err
}
if ctx.Read != nil {
ctx.Write.BinaryImageSize = ctx.Read.BinaryImageSize
ctx.Write.BinaryFontSize = ctx.Read.BinaryFontSize
logWriteStats(ctx)
}
return nil
}
// Write root entry to disk.
func | (ctx *types.PDFContext, dict *types.PDFDict, dictName, entryName string, statsAttr int) error {
obj, err := writeEntry(ctx, dict, dictName, entryName)
if err != nil {
return err
}
if obj != nil {
ctx.Stats.AddRootAttr(statsAttr)
}
return nil
}
// Write root entry to object stream.
func writeRootEntryToObjStream(ctx *types.PDFContext, dict *types.PDFDict, dictName, entryName string, statsAttr int) error {
ctx.Write.WriteToObjectStream = true
err := writeRootEntry(ctx, dict, dictName, entryName, statsAttr)
if err != nil {
return err
}
return stopObjectStream(ctx)
}
// Write page tree.
func writePages(ctx *types.PDFContext, rootDict *types.PDFDict) error {
// Page tree root (the top "Pages" dict) must be indirect reference.
indRef := rootDict.IndirectRefEntry("Pages")
if indRef == nil {
return errors.New("writePages: missing indirect obj for pages dict")
}
// Manipulate page tree as needed for splitting, trimming or page extraction.
if ctx.Write.ExtractPages != nil && len(ctx.Write.ExtractPages) > 0 {
p := 0
_, err := trimPagesDict(ctx, indRef, &p)
if err != nil {
return err
}
}
// Embed all page tree objects into objects stream.
ctx.Write.WriteToObjectStream = true
// Write page tree.
err := writePagesDict(ctx, indRef, 0)
if err != nil {
return err
}
return stopObjectStream(ctx)
}
func writeRootObject(ctx *types.PDFContext) error {
// => 7.7.2 Document Catalog
xRefTable := ctx.XRefTable
catalog := *xRefTable.Root
objNumber := int(catalog.ObjectNumber)
genNumber := int(catalog.GenerationNumber)
log.Debug.Printf("*** writeRootObject: begin offset=%d *** %s\n", ctx.Write.Offset, catalog)
// Ensure corresponding and accurate name tree object graphs.
if !ctx.Write.ReducedFeatureSet() {
err := ctx.XRefTable.BindNameTrees()
if err != nil {
return err
}
}
var dict *types.PDFDict
dict, err := xRefTable.DereferenceDict(catalog)
if err != nil {
return err
}
if dict == nil {
return errors.Errorf("writeRootObject: unable to dereference root dict")
}
dictName := "rootDict"
if ctx.Write.ReducedFeatureSet() {
log.Debug.Println("writeRootObject: exclude complex entries on split,trim and page extraction.")
dict.Delete("Names")
dict.Delete("Dests")
dict.Delete("Outlines")
dict.Delete("OpenAction")
dict.Delete("AcroForm")
dict.Delete("StructTreeRoot")
dict.Delete("OCProperties")
}
err = writePDFDictObject(ctx, objNumber, genNumber, *dict)
if err != nil {
return err
}
log.Debug.Printf("writeRootObject: %s\n", dict)
log.Debug.Printf("writeRootObject: new offset after rootDict = %d\n", ctx.Write.Offset)
err = writeRootEntry(ctx, dict, dictName, "Version", types.RootVersion)
if err != nil {
return err
}
err = writePages(ctx, dict)
if err != nil {
return err
}
for _, e := range []struct {
entryName string
statsAttr int
}{
{"Extensions", types.RootExtensions},
{"PageLabels", types.RootPageLabels},
{"Names", types.RootNames},
{"Dests", types.RootDests},
{"ViewerPreferences", types.RootViewerPrefs},
{"PageLayout", types.RootPageLayout},
{"PageMode", types.RootPageMode},
{"Outlines", types.RootOutlines},
{"Threads", types.RootThreads},
{"OpenAction", types.RootOpenAction},
{"AA", types.RootAA},
{"URI", types.RootURI},
{"AcroForm", types.RootAcroForm},
{"Metadata", types.RootMetadata},
} {
err = writeRootEntry(ctx, dict, dictName, e.entryName, e.statsAttr)
if err != nil {
return err
}
}
err = writeRootEntryToObjStream(ctx, dict, dictName, "StructTreeRoot", types.RootStructTreeRoot)
if err != nil {
return err
}
for _, e := range []struct {
entryName string
statsAttr int
}{
{"MarkInfo", types.RootMarkInfo},
{"Lang", types.RootLang},
{"SpiderInfo", types.RootSpiderInfo},
{"OutputIntents", types.RootOutputIntents},
{"PieceInfo", types.RootPieceInfo},
{"OCProperties", types.RootOCProperties},
{"Perms", types.RootPerms},
{"Legal", types.RootLegal},
{"Requirements", types.RootRequirements},
{"Collection", types.RootCollection},
{"NeedsRendering", types.RootNeedsRendering},
} {
err = writeRootEntry(ctx, dict, dictName, e.entryName, e.statsAttr)
if err != nil {
return err
}
}
log.Debug.Printf("*** writeRootObject: end offset=%d ***\n", ctx.Write.Offset)
return nil
}
func writeTrailerDict(ctx *types.PDFContext) error {
log.Debug.Printf("writeTrailerDict begin\n")
w := ctx.Write
xRefTable := ctx.XRefTable
_, err := w.WriteString("trailer")
if err != nil {
return err
}
err = w.WriteEol()
if err != nil {
return err
}
dict := types.NewPDFDict()
dict.Insert("Size", types.PDFInteger(*xRefTable.Size))
dict.Insert("Root", *xRefTable.Root)
if xRefTable.Info != nil {
dict.Insert("Info", *xRefTable.Info)
}
if ctx.Encrypt != nil && ctx.EncKey != nil {
dict.Insert("Encrypt", *ctx.Encrypt)
}
if xRefTable.ID != nil {
dict.Insert("ID", *xRefTable.ID)
}
_, err = w.WriteString(dict.PDFString())
if err != nil {
return err
}
log.Debug.Printf("writeTrailerDict end\n")
return nil
}
func writeXRefSubsection(ctx *types.PDFContext, start int, size int) error {
log.Debug.Printf("writeXRefSubsection: start=%d size=%d\n", start, size)
w := ctx.Write
_, err := w.WriteString(fmt.Sprintf("%d %d%s", start, size, w.Eol))
if err != nil {
return err
}
var lines []string
for i := start; i < start+size; i++ {
entry := ctx.XRefTable.Table[i]
if entry.Compressed {
return errors.New("writeXRefSubsection: compressed entries present")
}
var s string
if entry.Free {
s = fmt.Sprintf("%010d %05d f%2s", *entry.Offset, *entry.Generation, w.Eol)
} else {
var off int64
writeOffset, found := ctx.Write.Table[i]
if found {
off = writeOffset
}
s = fmt.Sprintf("%010d %05d n%2s", off, *entry.Generation, w.Eol)
}
lines = append(lines, fmt.Sprintf("%d: %s", i, s))
_, err = w.WriteString(s)
if err != nil {
return err
}
}
log.Debug.Printf("\n%s\n", strings.Join(lines, ""))
log.Debug.Printf("writeXRefSubsection: end\n")
return nil
}
func deleteRedundantObject(ctx *types.PDFContext, objNr int) {
if ctx.Write.ExtractPageNr == 0 &&
(ctx.Optimize.IsDuplicateFontObject(objNr) || ctx.Optimize.IsDuplicateImageObject(objNr)) {
ctx.DeleteObject(objNr)
}
if ctx.IsLinearizationObject(objNr) || ctx.Optimize.IsDuplicateInfoObject(objNr) ||
ctx.Read.IsObjectStreamObject(objNr) || ctx.Read.IsXRefStreamObject(objNr) {
ctx.DeleteObject(objNr)
}
}
func deleteRedundantObjects(ctx *types.PDFContext) {
if ctx.Optimize == nil {
return
}
xRefTable := ctx.XRefTable
log.Debug.Printf("deleteRedundantObjects begin: Size=%d\n", *xRefTable.Size)
for i := 0; i < *xRefTable.Size; i++ {
// Missing object remains missing.
entry, found := xRefTable.Find(i)
if !found {
continue
}
// Free object
if entry.Free {
continue
}
// Object written
if ctx.Write.HasWriteOffset(i) {
// Resources may be cross referenced from different objects
// eg. font descriptors may be shared by different font dicts.
// Try to remove this object from the list of the potential duplicate objects.
log.Debug.Printf("deleteRedundantObjects: remove duplicate obj #%d\n", i)
delete(ctx.Optimize.DuplicateFontObjs, i)
delete(ctx.Optimize.DuplicateImageObjs, i)
delete(ctx.Optimize.DuplicateInfoObjects, i)
continue
}
// Object not written
if ctx.Read.Linearized {
// Since there is no type entry for stream dicts associated with linearization dicts
// we have to check every PDFStreamDict that has not been written.
if _, ok := entry.Object.(types.PDFStreamDict); ok {
if *entry.Offset == *xRefTable.OffsetPrimaryHintTable {
xRefTable.LinearizationObjs[i] = true
log.Debug.Printf("deleteRedundantObjects: primaryHintTable at obj #%d\n", i)
}
if xRefTable.OffsetOverflowHintTable != nil &&
*entry.Offset == *xRefTable.OffsetOverflowHintTable {
xRefTable.LinearizationObjs[i] = true
log.Debug.Printf("deleteRedundantObjects: overflowHintTable at obj #%d\n", i)
}
}
}
deleteRedundantObject(ctx, i)
}
log.Debug.Println("deleteRedundantObjects end")
}
func sortedWritableKeys(ctx *types.PDFContext) []int {
var keys []int
for i, e := range ctx.Table {
if e.Free || ctx.Write.HasWriteOffset(i) {
keys = append(keys, i)
}
}
sort.Ints(keys)
return keys
}
// After inserting the last object write the cross reference table to disk.
func writeXRefTable(ctx *types.PDFContext) error {
err := ctx.EnsureValidFreeList()
if err != nil {
return err
}
keys := sortedWritableKeys(ctx)
objCount := len(keys)
log.Debug.Printf("xref has %d entries\n", objCount)
_, err = ctx.Write.WriteString("xref")
if err != nil {
return err
}
err = ctx.Write.WriteEol()
if err != nil {
return err
}
start := keys[0]
size := 1
for i := 1; i < len(keys); i++ {
if keys[i]-keys[i-1] > 1 {
err = writeXRefSubsection(ctx, start, size)
if err != nil {
return err
}
start = keys[i]
size = 1
continue
}
size++
}
err = writeXRefSubsection(ctx, start, size)
if err != nil {
return err
}
err = writeTrailerDict(ctx)
if err != nil {
return err
}
err = ctx.Write.WriteEol()
if err != nil {
return err
}
_, err = ctx.Write.WriteString("startxref")
if err != nil {
return err
}
err = ctx.Write.WriteEol()
if err != nil {
return err
}
_, err = ctx.Write.WriteString(fmt.Sprintf("%d", ctx.Write.Offset))
if err != nil {
return err
}
return ctx.Write.WriteEol()
}
// int64ToBuf returns a byte slice with length byteCount representing integer i.
func int64ToBuf(i int64, byteCount int) (buf []byte) {
j := 0
var b []byte
for k := i; k > 0; {
b = append(b, byte(k&0xff))
k >>= 8
j++
}
// Swap byte order
for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
b[i], b[j] = b[j], b[i]
}
if j < byteCount {
buf = append(bytes.Repeat([]byte{0}, byteCount-j), b...)
} else {
buf = b
}
return
}
func createXRefStream(ctx *types.PDFContext, i1, i2, i3 int) ([]byte, *types.PDFArray, error) {
log.Debug.Println("createXRefStream begin")
xRefTable := ctx.XRefTable
var (
buf []byte
arr types.PDFArray
)
var keys []int
for i, e := range xRefTable.Table {
if e.Free || ctx.Write.HasWriteOffset(i) {
keys = append(keys, i)
}
}
sort.Ints(keys)
objCount := len(keys)
log.Debug.Printf("createXRefStream: xref has %d entries\n", objCount)
start := keys[0]
size := 0
for i := 0; i < len(keys); i++ {
j := keys[i]
entry := xRefTable.Table[j]
var s1, s2, s3 []byte
if entry.Free {
// unused
log.Debug.Printf("createXRefStream: unused i=%d nextFreeAt:%d gen:%d\n", j, int(*entry.Offset), int(*entry.Generation))
s1 = int64ToBuf(0, i1)
s2 = int64ToBuf(*entry.Offset, i2)
s3 = int64ToBuf(int64(*entry.Generation), i3)
} else if entry.Compressed {
// in use, compressed into object stream
log.Debug.Printf("createXRefStream: compressed i=%d at objstr %d[%d]\n", j, int(*entry.ObjectStream), int(*entry.ObjectStreamInd))
s1 = int64ToBuf(2, i1)
s2 = int64ToBuf(int64(*entry.ObjectStream), i2)
s3 = int64ToBuf(int64(*entry.ObjectStreamInd), i3)
} else {
off, found := ctx.Write.Table[j]
if !found {
return nil, nil, errors.Errorf("createXRefStream: missing write offset for obj #%d\n", i)
}
// in use, uncompressed
log.Debug.Printf("createXRefStream: used i=%d offset:%d gen:%d\n", j, int(off), int(*entry.Generation))
s1 = int64ToBuf(1, i1)
s2 = int64ToBuf(off, i2)
s3 = int64ToBuf(int64(*entry.Generation), i3)
}
log.Debug.Printf("createXRefStream: written: %x %x %x \n", s1, s2, s3)
buf = append(buf, s1...)
buf = append(buf, s2...)
buf = append(buf, s3...)
if i > 0 && (keys[i]-keys[i-1] > 1) {
arr = append(arr, types.PDFInteger(start))
arr = append(arr, types.PDFInteger(size))
start = keys[i]
size = 1
continue
}
size++
}
arr = append(arr, types.PDFInteger(start))
arr = append(arr, types.PDFInteger(size))
log.Debug.Println("createXRefStream end")
return buf, &arr, nil
}
func writeXRefStream(ctx *types.PDFContext) error {
log.Debug.Println("writeXRefStream begin")
xRefTable := ctx.XRefTable
xRefStreamDict := types.NewPDFXRefStreamDict(ctx)
xRefTableEntry := types.NewXRefTableEntryGen0(*xRefStreamDict)
// Reuse free objects (including recycled objects from this run).
var objNumber int
objNumber, err := xRefTable.InsertAndUseRecycled(*xRefTableEntry)
if err != nil {
return err
}
// After the last insert of an object.
err = xRefTable.EnsureValidFreeList()
if err != nil {
return err
}
xRefStreamDict.Insert("Size", types.PDFInteger(*xRefTable.Size))
offset := ctx.Write.Offset
i2Base := int64(*ctx.Size)
if offset > i2Base {
i2Base = offset
}
i1 := 1 // 0, 1 or 2 always fit into 1 byte.
i2 := func(i int64) (byteCount int) {
for i > 0 {
i >>= 8
byteCount++
}
return byteCount
}(i2Base)
i3 := 2 // scale for max objectstream index <= 0x ff ff
wArr := types.PDFArray{types.PDFInteger(i1), types.PDFInteger(i2), types.PDFInteger(i3)}
xRefStreamDict.Insert("W", wArr)
// Generate xRefStreamDict data = xref entries -> xRefStreamDict.Content
content, indArr, err := createXRefStream(ctx, i1, i2, i3)
if err != nil {
return err
}
xRefStreamDict.Content = content
xRefStreamDict.Insert("Index", *indArr)
// Encode xRefStreamDict.Content -> xRefStreamDict.Raw
err = filter.EncodeStream(&xRefStreamDict.PDFStreamDict)
if err != nil {
return err
}
log.Debug.Printf("writeXRefStream: xRefStreamDict: %s\n", xRefStreamDict)
err = writePDFStreamDictObject(ctx, objNumber, 0, xRefStreamDict.PDFStreamDict)
if err != nil {
return err
}
w := ctx.Write
err = w.WriteEol()
if err != nil {
return err
}
_, err = w.WriteString("startxref")
if err != nil {
return err
}
err = w.WriteEol()
if err != nil {
return err
}
_, err = w.WriteString(fmt.Sprintf("%d", offset))
if err != nil {
return err
}
err = w.WriteEol()
if err != nil {
return err
}
log.Debug.Println("writeXRefStream end")
return nil
}
func writeEncryptDict(ctx *types.PDFContext) error {
// Bail out unless we really have to write encrypted.
if ctx.Encrypt == nil || ctx.EncKey == nil {
return nil
}
indRef := *ctx.Encrypt
objNumber := int(indRef.ObjectNumber)
genNumber := int(indRef.GenerationNumber)
var dict *types.PDFDict
dict, err := ctx.DereferenceDict(indRef)
if err != nil {
return err
}
return writePDFObject(ctx, objNumber, genNumber, dict.PDFString())
}
func setupEncryption(ctx *types.PDFContext) error {
var err error
dict := crypto.NewEncryptDict(ctx.EncryptUsingAES, ctx.EncryptUsing128BitKey, ctx.UserAccessPermissions)
ctx.E, err = crypto.SupportedEncryption(ctx, dict)
if err != nil {
return err
}
if ctx.ID == nil {
return errors.New("encrypt: missing ID")
}
var id []byte
id, err = ctx.IDFirstElement()
if err != nil {
return err
}
ctx.E.ID = id
//fmt.Printf("opw before: length:%d <%s>\n", len(ctx.E.O), ctx.E.O)
ctx.E.O, err = crypto.O(ctx)
if err != nil {
return err
}
//fmt.Printf("opw after: length:%d <%s> %0X\n", len(ctx.E.O), ctx.E.O, ctx.E.O)
//fmt.Printf("upw before: length:%d <%s>\n", len(ctx.E.U), ctx.E.U)
ctx.E.U, ctx.EncKey, err = crypto.U(ctx)
if err != nil {
return err
}
//fmt.Printf("upw after: length:%d <%s> %0X\n", len(ctx.E.U), ctx.E.U, ctx.E.U)
//fmt.Printf("encKey = %0X\n", ctx.EncKey)
dict.Update("U", types.PDFHexLiteral(hex.EncodeToString(ctx.E.U)))
dict.Update("O", types.PDFHexLiteral(hex.EncodeToString(ctx.E.O)))
xRefTableEntry := types.NewXRefTableEntryGen0(*dict)
// Reuse free objects (including recycled objects from this run).
var objNumber int
objNumber, err = ctx.InsertAndUseRecycled(*xRefTableEntry)
if err != nil {
return err
}
ctx.Encrypt = types.NewPDFIndirectRef(objNumber, 0)
return nil
}
func updateEncryption(ctx *types.PDFContext) error {
d, err := ctx.EncryptDict()
if err != nil {
return err
}
if ctx.Mode == types.ADDPERMISSIONS {
//fmt.Printf("updating permissions to: %v\n", ctx.UserAccessPermissions)
ctx.E.P = int(ctx.UserAccessPermissions)
d.Update("P", types.PDFInteger(ctx.E.P))
// and moving on, U is dependent on P
}
// Change user or owner password.
//fmt.Println("change upw or opw")
if ctx.UserPWNew != nil {
//fmt.Printf("change upw from <%s> to <%s>\n", ctx.UserPW, *ctx.UserPWNew)
ctx.UserPW = *ctx.UserPWNew
}
if ctx.OwnerPWNew != nil {
//fmt.Printf("change opw from <%s> to <%s>\n", ctx.OwnerPW, *ctx.OwnerPWNew)
ctx.OwnerPW = *ctx.OwnerPWNew
}
//fmt.Printf("opw before: length:%d <%s>\n", len(ctx.E.O), ctx.E.O)
ctx.E.O, err = crypto.O(ctx)
if err != nil {
return err
}
//fmt.Printf("opw after: length:%d <%s> %0X\n", len(ctx.E.O), ctx.E.O, ctx.E.O)
d.Update("O", types.PDFHexLiteral(hex.EncodeToString(ctx.E.O)))
//fmt.Printf("upw before: length:%d <%s>\n", len(ctx.E.U), ctx.E.U)
ctx.E.U, ctx.EncKey, err = crypto.U(ctx)
if err != nil {
return err
}
//fmt.Printf("upw after: length:%d <%s> %0X\n", len(ctx.E.U), ctx.E.U, ctx.E.U)
//fmt.Printf("encKey = %0X\n", ctx.EncKey)
d.Update("U", types.PDFHexLiteral(hex.EncodeToString(ctx.E.U)))
return nil
}
func handleEncryption(ctx *types.PDFContext) error {
if ctx.Mode == types.ENCRYPT || ctx.Mode == types.DECRYPT {
if ctx.Mode == types.DECRYPT {
// Remove encryption.
ctx.EncKey = nil
} else {
err := setupEncryption(ctx)
if err != nil {
return err
}
}
} else if ctx.UserPWNew != nil || ctx.OwnerPWNew != nil || ctx.Mode == types.ADDPERMISSIONS {
err := updateEncryption(ctx)
if err != nil {
return err
}
}
// write xrefstream if using xrefstream only.
if ctx.Encrypt != nil && ctx.EncKey != nil && !ctx.Read.UsingXRefStreams {
ctx.WriteObjectStream = false
ctx.WriteXRefStream = false
}
return nil
}
func writeXRef(ctx *types.PDFContext) error {
if ctx.WriteXRefStream {
// Write cross reference stream and generate objectstreams.
return writeXRefStream(ctx)
}
// Write cross reference table section.
return writeXRefTable(ctx)
}
func setFileSizeOfWrittenFile(w *types.WriteContext, f *os.File) error {
// Get file info for file just written but flush first to get correct file size.
err := w.Flush()
if err != nil {
return err
}
fileInfo, err := f.Stat()
if err != nil {
return err
}
w.FileSize = fileInfo.Size()
return nil
}
| writeRootEntry |
stp_string.go | // Code generated by "stringer -type=STP"; DO NOT EDIT.
package unifi
import "strconv"
const (
_STP_name_0 = "StpDisabledStpBlockingStpListeningStpLearningStpForwarding"
_STP_name_1 = "StpUnknown"
)
var (
_STP_index_0 = [...]uint8{0, 11, 22, 34, 45, 58}
)
func (i STP) String() string { | return _STP_name_1
default:
return "STP(" + strconv.FormatInt(int64(i), 10) + ")"
}
} | switch {
case 0 <= i && i <= 4:
return _STP_name_0[_STP_index_0[i]:_STP_index_0[i+1]]
case i == 9: |
crawl.py | from bs4 import BeautifulSoup as bs
import requests
from pytube import YouTube
base = "https://www.youtube.com/results?search_query="
qstring = "tin tức"
r = requests.get(base+qstring)
page = r.text
soup=bs(page,'html.parser') | vids = soup.findAll('a',attrs={'class':'yt-uix-tile-link'})
videolist=[]
for v in vids:
tmp = 'https://www.youtube.com' + v['href']
videolist.append(tmp)
print(videolist)
count=0
for item in videolist:
# increment counter:
count+=1
# initiate the class:
yt = YouTube(item)
# have a look at the different formats available:
#formats = yt.get_videos()
# grab the video:
video = yt.get_videos('mp4', '360p')
# set the output file name:
yt.set_filename('Video_'+str(count))
# download the video:
video.download('./') | |
client.rs | use spaceframe_grpc::hello_world::{greeter_client::GreeterClient, HelloRequest};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> | {
let mut client = GreeterClient::connect("http://[::1]:50051").await?;
let request = tonic::Request::new(HelloRequest {
name: "Tonic".into(),
});
let response = client.say_hello(request).await?;
println!("RESPONSE={:?}", response);
Ok(())
} |
|
lib.rs | #![no_std]
const FNV_OFFSET_BASIS_32: u32 = 0x811c9dc5;
const FNV_OFFSET_BASIS_64: u64 = 0xcbf29ce484222325;
const FNV_PRIME_32: u32 = 0x01000193;
const FNV_PRIME_64: u64 = 0x00000100000001B3;
const ASCII_CASE_MASK: u8 = 0b0010_0000;
/// Computes 64-bits fnv1a hash of the given slice, or up-to limit if provided.
/// If limit is zero or exceeds slice length, slice length is used instead.
pub const fn | (bytes: &[u8], limit: Option<usize>, case: bool) -> u64 {
let mut hash = FNV_OFFSET_BASIS_64;
let mut i = 0;
let len = match limit {
Some(v) => {
if v <= bytes.len() && v > 0 {
v
} else {
bytes.len()
}
}
None => bytes.len(),
};
while i < len {
let value = if case && (bytes[i] & ASCII_CASE_MASK == ASCII_CASE_MASK) {
bytes[i] ^ ASCII_CASE_MASK
}
else {
bytes[i]
};
hash ^= value as u64;
hash = hash.wrapping_mul(FNV_PRIME_64);
i += 1;
}
hash
}
/// Computes 32-bits fnv1a hash of the given slice, or up-to limit if provided.
/// If limit is zero or exceeds slice length, slice length is used instead.
pub const fn fnv1a_hash_32(bytes: &[u8], limit: Option<usize>, case: bool) -> u32 {
let mut hash = FNV_OFFSET_BASIS_32;
let mut i = 0;
let len = match limit {
Some(v) => {
if v <= bytes.len() && v > 0 {
v
} else {
bytes.len()
}
}
None => bytes.len(),
};
while i < len {
let value = if case && (bytes[i] & ASCII_CASE_MASK == ASCII_CASE_MASK) {
bytes[i] ^ ASCII_CASE_MASK
}
else {
bytes[i]
};
hash ^= value as u32;
hash = hash.wrapping_mul(FNV_PRIME_32);
i += 1;
}
hash
}
/// Computes 32-bits fnv1a hash and XORs higher and lower 16-bits.
/// This results in a 16-bits hash value.
/// Up to limit if provided, otherwise slice length.
/// If limit is zero or exceeds slice length, slice length is used instead.
#[inline(always)]
pub const fn fnv1a_hash_16_xor(bytes: &[u8], limit: Option<usize>) -> u16 {
let bytes = fnv1a_hash_32(bytes, limit, false).to_ne_bytes();
let upper: u16 = u16::from_ne_bytes([bytes[0], bytes[1]]);
let lower: u16 = u16::from_ne_bytes([bytes[2], bytes[3]]);
upper ^ lower
}
/// Computes 64-bit fnv1a hash from a str.
#[inline(always)]
pub const fn fnv1a_hash_str_64(input: &str) -> u64 {
fnv1a_hash_64(input.as_bytes(), None, false)
}
/// Computes 32-bit fnv1a hash from a str.
#[inline(always)]
pub const fn fnv1a_hash_str_32(input: &str) -> u32 {
fnv1a_hash_32(input.as_bytes(), None, false)
}
/// Computes 16-bit fnv1a hash from a str using XOR folding.
#[inline(always)]
pub const fn fnv1a_hash_str_16_xor(input: &str) -> u16 {
fnv1a_hash_16_xor(input.as_bytes(), None)
}
#[test]
fn fnv1a_test_case_comparison() {
let bytes = ['A' as u8, 'B' as u8];
assert_eq!(fnv1a_hash_64(&bytes, None, false), fnv1a_hash_64(&bytes, None, true));
assert_eq!(fnv1a_hash_32(&bytes, None, false), fnv1a_hash_32(&bytes, None, true));
let bytes = ['a' as u8, 'B' as u8];
assert_ne!(fnv1a_hash_64(&bytes, None, false), fnv1a_hash_64(&bytes, None, true));
assert_ne!(fnv1a_hash_32(&bytes, None, false), fnv1a_hash_32(&bytes, None, true));
let bytes = ['a' as u8, 'B' as u8];
let comparison = ['A' as u8, 'B' as u8];
assert_eq!(fnv1a_hash_64(&bytes, None, true), fnv1a_hash_64(&comparison, None, true));
assert_eq!(fnv1a_hash_32(&bytes, None, true), fnv1a_hash_32(&comparison, None, true));
} | fnv1a_hash_64 |
CppHeaderParser.py | #!/usr/bin/python
#
# Author: Jashua R. Cloutier (contact via https://bitbucket.org/senex)
# Project: http://senexcanis.com/open-source/cppheaderparser/
#
# Copyright (C) 2011, Jashua R. Cloutier
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Jashua R. Cloutier nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission. Stories,
# blog entries etc making reference to this project may mention the
# name Jashua R. Cloutier in terms of project originator/creator etc.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
#
# The CppHeaderParser.py script is written in Python 2.4 and released to
# the open source community for continuous improvements under the BSD
# 2.0 new license, which can be found at:
#
# http://www.opensource.org/licenses/bsd-license.php
#
"""Parse C++ header files and generate a data structure
representing the class
"""
import ply.lex as lex
import os
import sys
import re
import inspect
def lineno():
"""Returns the current line number in our program."""
return inspect.currentframe().f_back.f_lineno
version = __version__ = "2.7"
tokens = [
'NUMBER',
'FLOAT_NUMBER',
'TEMPLATE_NAME',
'NAME',
'OPEN_PAREN',
'CLOSE_PAREN',
'OPEN_BRACE',
'CLOSE_BRACE',
'OPEN_SQUARE_BRACKET',
'CLOSE_SQUARE_BRACKET',
'COLON',
'SEMI_COLON',
'COMMA',
'TAB',
'BACKSLASH',
'PIPE',
'PERCENT',
'EXCLAMATION',
'CARET',
'COMMENT_SINGLELINE',
'COMMENT_MULTILINE',
'PRECOMP_MACRO',
'PRECOMP_MACRO_CONT',
'ASTERISK',
'AMPERSTAND',
'EQUALS',
'MINUS',
'PLUS',
'DIVIDE',
'CHAR_LITERAL',
'STRING_LITERAL',
'NEW_LINE',
'SQUOTE',
]
t_ignore = " \r.?@\f"
t_NUMBER = r'[0-9][0-9XxA-Fa-f]*'
t_FLOAT_NUMBER = r'[-+]?[0-9]*\.[0-9]+([eE][-+]?[0-9]+)?'
t_TEMPLATE_NAME = r'CppHeaderParser_template_[0-9]+'
t_NAME = r'[<>A-Za-z_~][A-Za-z0-9_]*'
t_OPEN_PAREN = r'\('
t_CLOSE_PAREN = r'\)'
t_OPEN_BRACE = r'{'
t_CLOSE_BRACE = r'}'
t_OPEN_SQUARE_BRACKET = r'\['
t_CLOSE_SQUARE_BRACKET = r'\]'
t_SEMI_COLON = r';'
t_COLON = r':'
t_COMMA = r','
t_TAB = r'\t'
t_BACKSLASH = r'\\'
t_PIPE = r'\|'
t_PERCENT = r'%'
t_CARET = r'\^'
t_EXCLAMATION = r'!'
t_PRECOMP_MACRO = r'\#.*'
t_PRECOMP_MACRO_CONT = r'.*\\\n'
def t_COMMENT_SINGLELINE(t):
r'\/\/.*\n'
global doxygenCommentCache
if t.value.startswith("///") or t.value.startswith("//!"):
if doxygenCommentCache:
doxygenCommentCache += "\n"
if t.value.endswith("\n"):
doxygenCommentCache += t.value[:-1]
else:
doxygenCommentCache += t.value
t.lexer.lineno += len([a for a in t.value if a=="\n"])
t_ASTERISK = r'\*'
t_MINUS = r'\-'
t_PLUS = r'\+'
t_DIVIDE = r'/(?!/)'
t_AMPERSTAND = r'&'
t_EQUALS = r'='
t_CHAR_LITERAL = "'.'"
t_SQUOTE = "'"
#found at http://wordaligned.org/articles/string-literals-and-regular-expressions
#TODO: This does not work with the string "bla \" bla"
t_STRING_LITERAL = r'"([^"\\]|\\.)*"'
#Found at http://ostermiller.org/findcomment.html
def t_COMMENT_MULTILINE(t):
r'/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/'
global doxygenCommentCache
if t.value.startswith("/**") or t.value.startswith("/*!"):
#not sure why, but get double new lines
v = t.value.replace("\n\n", "\n")
#strip prefixing whitespace
v = re.sub("\n[\s]+\*", "\n*", v)
doxygenCommentCache += v
t.lexer.lineno += len([a for a in t.value if a=="\n"])
def t_NEWLINE(t):
r'\n+'
t.lexer.lineno += len(t.value)
def t_error(v):
print(( "Lex error: ", v ))
lex.lex()
# Controls error_print
print_errors = 1
# Controls warning_print
print_warnings = 1
# Controls debug_print
debug = 0
# Controls trace_print
debug_trace = 0
def error_print(arg):
if print_errors: print(("[%4d] %s"%(inspect.currentframe().f_back.f_lineno, arg)))
def warning_print(arg):
if print_warnings: print(("[%4d] %s"%(inspect.currentframe().f_back.f_lineno, arg)))
def debug_print(arg):
global debug
if debug: print(("[%4d] %s"%(inspect.currentframe().f_back.f_lineno, arg)))
def trace_print(*arg):
global debug_trace
if debug_trace:
sys.stdout.write("[%s] "%(inspect.currentframe().f_back.f_lineno))
for a in arg: sys.stdout.write("%s "%a)
sys.stdout.write("\n")
supportedAccessSpecifier = [
'public',
'protected',
'private',
'public slots',
'protected slots',
'private slots',
'public Q_SLOTS',
'protected Q_SLOTS',
'private Q_SLOTS',
'signals',
'Q_SIGNALS',
]
#Symbols to ignore, usually special macros
ignoreSymbols = [
'Q_OBJECT',
'Q_PROPERTY()',
'Q_DECLARE_FLAGS()',
'Q_INVOKABLE',
]
doxygenCommentCache = ""
#Track what was added in what order and at what depth
parseHistory = []
def is_namespace(nameStack):
"""Determines if a namespace is being specified"""
if len(nameStack) == 0:
return False
if nameStack[0] == "namespace":
return True
return False
def is_enum_namestack(nameStack):
"""Determines if a namestack is an enum namestack"""
if len(nameStack) == 0:
return False
if nameStack[0] == "enum":
return True
if len(nameStack) > 1 and nameStack[0] == "typedef" and nameStack[1] == "enum":
return True
return False
def is_fundamental(s):
for a in s.split():
if a not in ["size_t", "struct", "union", "unsigned", "signed", "bool", "char", "short", "int", "float", "double", "long", "void", "*"]: return False
return True
def is_function_pointer_stack(stack):
"""Count how many non-nested paranthesis are in the stack. Useful for determining if a stack is a function pointer"""
paren_depth = 0
paren_count = 0
star_after_first_paren = False
last_e = None
for e in stack:
if e == "(":
paren_depth += 1
elif e == ")" and paren_depth > 0:
paren_depth -= 1
if paren_depth == 0:
paren_count += 1
elif e == "*" and last_e == "(" and paren_count == 0 and paren_depth == 1:
star_after_first_paren = True
last_e = e
if star_after_first_paren and paren_count == 2:
return True
else:
return False
def is_method_namestack(stack):
r = False
if '(' not in stack: r = False
elif stack[0] == 'typedef': r = False # TODO deal with typedef function prototypes
#elif '=' in stack and stack.index('=') < stack.index('(') and stack[stack.index('=')-1] != 'operator': r = False #disabled July6th - allow all operators
elif 'operator' in stack: r = True # allow all operators
elif '{' in stack and stack.index('{') < stack.index('('): r = False # struct that looks like a method/class
elif '(' in stack and ')' in stack:
if '{' in stack and '}' in stack: r = True
elif stack[-1] == ';':
if is_function_pointer_stack(stack):
r = False
else:
r = True
elif '{' in stack: r = True # ideally we catch both braces... TODO
else: r = False
#Test for case of property set to something with parens such as "static const int CONST_A = (1 << 7) - 1;"
if r and "(" in stack and "=" in stack and 'operator' not in stack:
if stack.index("=") < stack.index("("): r = False
return r
def is_property_namestack(nameStack):
r = False
if '(' not in nameStack and ')' not in nameStack: r = True
elif "(" in nameStack and "=" in nameStack and nameStack.index("=") < nameStack.index("("): r = True
#See if we are a function pointer
if not r and is_function_pointer_stack(nameStack): r = True
return r
def detect_lineno(s):
"""Detect the line number for a given token string"""
try:
rtn = s.lineno()
if rtn != -1:
return rtn
except: pass
global curLine
return curLine
def filter_out_attribute_keyword(stack):
"""Strips __attribute__ and its parenthetical expression from the stack"""
if "__attribute__" not in stack: return stack
try:
debug_print("Stripping __attribute__ from %s"% stack)
attr_index = stack.index("__attribute__")
attr_end = attr_index + 1 #Assuming not followed by parenthetical expression which wont happen
#Find final paren
if stack[attr_index + 1] == '(':
paren_count = 1
for i in range(attr_index + 2, len(stack)):
elm = stack[i]
if elm == '(':
paren_count += 1
elif elm == ')':
paren_count -= 1
if paren_count == 0:
attr_end = i + 1
break
new_stack = stack[0:attr_index] + stack[attr_end:]
debug_print("stripped stack is %s"% new_stack)
return new_stack
except:
return stack
class TagStr(str):
"""Wrapper for a string that allows us to store the line number associated with it"""
lineno_reg = {}
def __new__(cls,*args,**kw):
new_obj = str.__new__(cls,*args)
if "lineno" in kw:
TagStr.lineno_reg[id(new_obj)] = kw["lineno"]
return new_obj
def __del__(self):
try:
del TagStr.lineno_reg[id(self)]
except: pass
def lineno(self):
return TagStr.lineno_reg.get(id(self), -1)
class CppParseError(Exception): pass
class CppClass(dict):
"""Takes a name stack and turns it into a class
Contains the following Keys:
self['name'] - Name of the class
self['doxygen'] - Doxygen comments associated with the class if they exist
self['inherits'] - List of Classes that this one inherits where the values
are of the form {"access": Anything in supportedAccessSpecifier
"class": Name of the class
self['methods'] - Dictionary where keys are from supportedAccessSpecifier
and values are a lists of CppMethod's
self['properties'] - Dictionary where keys are from supportedAccessSpecifier
and values are lists of CppVariable's
self['enums'] - Dictionary where keys are from supportedAccessSpecifier and
values are lists of CppEnum's
self['structs'] - Dictionary where keys are from supportedAccessSpecifier and
values are lists of nested Struct's
An example of how this could look is as follows:
#self =
{
'name': ""
'inherits':[]
'methods':
{
'public':[],
'protected':[],
'private':[]
},
'properties':
{
'public':[],
'protected':[],
'private':[]
},
'enums':
{
'public':[],
'protected':[],
'private':[]
}
}
"""
def get_all_methods(self):
r = []
for typ in supportedAccessSpecifier: r += self['methods'][typ]
return r
def get_all_method_names( self ):
r = []
for typ in supportedAccessSpecifier: r += self.get_method_names(typ) # returns list
return r
def get_all_pure_virtual_methods( self ):
r = {}
for typ in supportedAccessSpecifier: r.update(self.get_pure_virtual_methods(typ)) # returns dict
return r
def get_method_names( self, type='public' ): return [ meth['name'] for meth in self['methods'][ type ] ]
def get_pure_virtual_methods( self, type='public' ):
r = {}
for meth in self['methods'][ type ]:
if meth['pure_virtual']: r[ meth['name'] ] = meth
return r
def __init__(self, nameStack, curTemplate):
self['nested_classes'] = []
self['parent'] = None
self['abstract'] = False
self._public_enums = {}
self._public_structs = {}
self._public_typedefs = {}
self._public_forward_declares = []
self['namespace'] = ""
debug_print( "Class: %s"%nameStack )
debug_print( "Template: %s"%curTemplate)
if (len(nameStack) < 2):
nameStack.insert(1, "")#anonymous struct
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
if "::" in "".join(nameStack):
#Re-Join class paths (ex ['class', 'Bar', ':', ':', 'Foo'] -> ['class', 'Bar::Foo']
try:
new_nameStack = []
for name in nameStack:
if len(new_nameStack) == 0:
new_nameStack.append(name)
elif name == ":" and new_nameStack[-1].endswith(":"):
new_nameStack[-1] += name
elif new_nameStack[-1].endswith("::"):
new_nameStack[-2] += new_nameStack[-1] + name
del new_nameStack[-1]
else:
new_nameStack.append(name)
trace_print("Convert from namestack\n %s\nto\n%s"%(nameStack, new_nameStack))
nameStack = new_nameStack
except: pass
# Handle final specifier
self["final"] = False
try:
final_index = nameStack.index("final")
# Dont trip up the rest of the logic
del nameStack[final_index]
self["final"] = True
trace_print("final")
except: pass
self["name"] = nameStack[1]
self["line_number"] = detect_lineno(nameStack[0])
#Handle template classes
if len(nameStack) > 3 and nameStack[2].startswith("<"):
open_template_count = 0
param_separator = 0
found_first = False
i = 0
for elm in nameStack:
if '<' in elm :
open_template_count += 1
found_first = True
elif '>' in elm:
open_template_count -= 1
if found_first and open_template_count == 0:
self["name"] = "".join(nameStack[1:i + 1])
break;
i += 1
elif ":" in nameStack:
self['name'] = nameStack[ nameStack.index(':') - 1 ]
inheritList = []
if nameStack.count(':') == 1:
nameStack = nameStack[nameStack.index(":") + 1:]
while len(nameStack):
tmpStack = []
tmpInheritClass = {"access":"private", "virtual": False}
if "," in nameStack:
tmpStack = nameStack[:nameStack.index(",")]
nameStack = nameStack[nameStack.index(",") + 1:]
else:
tmpStack = nameStack
nameStack = []
# Convert template classes to one name in the last index
for i in range(0, len(tmpStack)):
if '<' in tmpStack[i]:
tmpStack2 = tmpStack[:i-1]
tmpStack2.append("".join(tmpStack[i-1:]))
tmpStack = tmpStack2
break
if len(tmpStack) == 0:
break;
elif len(tmpStack) == 1:
tmpInheritClass["class"] = tmpStack[0]
elif len(tmpStack) == 2:
tmpInheritClass["access"] = tmpStack[0]
tmpInheritClass["class"] = tmpStack[1]
elif len(tmpStack) == 3 and "virtual" in tmpStack:
tmpInheritClass["access"] = tmpStack[1] if tmpStack[1] != "virtual" else tmpStack[0]
tmpInheritClass["class"] = tmpStack[2]
tmpInheritClass["virtual"] = True
else:
warning_print( "Warning: can not parse inheriting class %s"%(" ".join(tmpStack)))
if '>' in tmpStack: pass # allow skip templates for now
else: raise NotImplemented
if 'class' in tmpInheritClass: inheritList.append(tmpInheritClass)
elif nameStack.count(':') == 2: self['parent'] = self['name']; self['name'] = nameStack[-1]
elif nameStack.count(':') > 2 and nameStack[0] in ("class", "struct"):
tmpStack = nameStack[nameStack.index(":") + 1:]
superTmpStack = [[]]
for tok in tmpStack:
if tok == ',':
superTmpStack.append([])
else:
superTmpStack[-1].append(tok)
for tmpStack in superTmpStack:
tmpInheritClass = {"access":"private"}
if len(tmpStack) and tmpStack[0] in supportedAccessSpecifier:
tmpInheritClass["access"] = tmpStack[0]
tmpStack = tmpStack[1:]
inheritNSStack = []
while len(tmpStack) > 3:
if tmpStack[0] == ':': break;
if tmpStack[1] != ':': break;
if tmpStack[2] != ':': break;
inheritNSStack.append(tmpStack[0])
tmpStack = tmpStack[3:]
if len(tmpStack) == 1 and tmpStack[0] != ':':
inheritNSStack.append(tmpStack[0])
tmpInheritClass["class"] = "::".join(inheritNSStack)
inheritList.append(tmpInheritClass)
self['inherits'] = inheritList
if curTemplate:
self["template"] = curTemplate
trace_print("Setting template to '%s'"%self["template"])
methodAccessSpecificList = {}
propertyAccessSpecificList = {}
enumAccessSpecificList = {}
structAccessSpecificList = {}
typedefAccessSpecificList = {}
forwardAccessSpecificList = {}
for accessSpecifier in supportedAccessSpecifier:
methodAccessSpecificList[accessSpecifier] = []
propertyAccessSpecificList[accessSpecifier] = []
enumAccessSpecificList[accessSpecifier] = []
structAccessSpecificList[accessSpecifier] = []
typedefAccessSpecificList[accessSpecifier] = []
forwardAccessSpecificList[accessSpecifier] = []
self['methods'] = methodAccessSpecificList
self['properties'] = propertyAccessSpecificList
self['enums'] = enumAccessSpecificList
self['structs'] = structAccessSpecificList
self['typedefs'] = typedefAccessSpecificList
self['forward_declares'] = forwardAccessSpecificList
def show(self):
"""Convert class to a string"""
namespace_prefix = ""
if self["namespace"]: namespace_prefix = self["namespace"] + "::"
rtn = "%s %s"%(self["declaration_method"], namespace_prefix + self["name"])
if self["final"]: rtn += " final"
if self['abstract']: rtn += ' (abstract)\n'
else: rtn += '\n'
if 'doxygen' in list(self.keys()): rtn += self["doxygen"] + '\n'
if 'parent' in list(self.keys()) and self['parent']: rtn += 'parent class: ' + self['parent'] + '\n'
if "inherits" in list(self.keys()):
rtn += " Inherits: "
for inheritClass in self["inherits"]:
if inheritClass["virtual"]: rtn += "virtual "
rtn += "%s %s, "%(inheritClass["access"], inheritClass["class"])
rtn += "\n"
rtn += " {\n"
for accessSpecifier in supportedAccessSpecifier:
rtn += " %s\n"%(accessSpecifier)
#Enums
if (len(self["enums"][accessSpecifier])):
rtn += " <Enums>\n"
for enum in self["enums"][accessSpecifier]:
rtn += " %s\n"%(repr(enum))
#Properties
if (len(self["properties"][accessSpecifier])):
rtn += " <Properties>\n"
for property in self["properties"][accessSpecifier]:
rtn += " %s\n"%(repr(property))
#Methods
if (len(self["methods"][accessSpecifier])):
rtn += " <Methods>\n"
for method in self["methods"][accessSpecifier]:
rtn += "\t\t" + method.show() + '\n'
rtn += " }\n"
print(rtn)
def __str__(self):
"""Convert class to a string"""
namespace_prefix = ""
if self["namespace"]: namespace_prefix = self["namespace"] + "::"
rtn = "%s %s"%(self["declaration_method"], namespace_prefix + self["name"])
if self["final"]: rtn += " final"
if self['abstract']: rtn += ' (abstract)\n'
else: rtn += '\n'
if 'doxygen' in list(self.keys()): rtn += self["doxygen"] + '\n'
if 'parent' in list(self.keys()) and self['parent']: rtn += 'parent class: ' + self['parent'] + '\n'
if "inherits" in list(self.keys()) and len(self["inherits"]):
rtn += "Inherits: "
for inheritClass in self["inherits"]:
if inheritClass.get("virtual", False): rtn += "virtual "
rtn += "%s %s, "%(inheritClass["access"], inheritClass["class"])
rtn += "\n"
rtn += "{\n"
for accessSpecifier in supportedAccessSpecifier:
rtn += "%s\n"%(accessSpecifier)
#Enums
if (len(self["enums"][accessSpecifier])):
rtn += " // Enums\n"
for enum in self["enums"][accessSpecifier]:
rtn += " %s\n"%(repr(enum))
#Properties
if (len(self["properties"][accessSpecifier])):
rtn += " // Properties\n"
for property in self["properties"][accessSpecifier]:
rtn += " %s\n"%(repr(property))
#Methods
if (len(self["methods"][accessSpecifier])):
rtn += " // Methods\n"
for method in self["methods"][accessSpecifier]:
rtn += " %s\n"%(repr(method))
rtn += "}\n"
return rtn
class CppUnion( CppClass ):
"""Takes a name stack and turns it into a union
Contains the following Keys:
self['name'] - Name of the union
self['doxygen'] - Doxygen comments associated with the union if they exist
self['members'] - List of members the union has
An example of how this could look is as follows:
#self =
{
'name': ""
'members': []
}
"""
def __init__(self, nameStack):
CppClass.__init__(self, nameStack, None)
self["name"] = "union " + self["name"]
self["members"] = self["properties"]["public"]
def transform_to_union_keys(self):
print("union keys: %s"%list(self.keys()))
for key in ['inherits', 'parent', 'abstract', 'namespace', 'typedefs', 'methods']:
del self[key]
def show(self):
"""Convert class to a string"""
print(self)
def __str__(self):
"""Convert class to a string"""
namespace_prefix = ""
if self["namespace"]: namespace_prefix = self["namespace"] + "::"
rtn = "%s %s"%(self["declaration_method"], namespace_prefix + self["name"])
if self['abstract']: rtn += ' (abstract)\n'
else: rtn += '\n'
if 'doxygen' in list(self.keys()): rtn += self["doxygen"] + '\n'
if 'parent' in list(self.keys()) and self['parent']: rtn += 'parent class: ' + self['parent'] + '\n'
rtn += "{\n"
for member in self["members"]:
rtn += " %s\n"%(repr(member))
rtn += "}\n"
return rtn
class _CppMethod( dict ):
def _params_helper1( self, stack ):
# deal with "throw" keyword
if 'throw' in stack: stack = stack[ : stack.index('throw') ]
## remove GCC keyword __attribute__(...) and preserve returns ##
cleaned = []
hit = False; hitOpen = 0; hitClose = 0
for a in stack:
if a == '__attribute__': hit = True
if hit:
if a == '(': hitOpen += 1
elif a == ')': hitClose += 1
if a==')' and hitOpen == hitClose:
hit = False
else:
cleaned.append( a )
stack = cleaned
# also deal with attribute((const)) function prefix #
# TODO this needs to be better #
if len(stack) > 5:
a = ''.join(stack)
if a.startswith('((__const__))'): stack = stack[ 5 : ]
elif a.startswith('__attribute__((__const__))'): stack = stack[ 6 : ]
stack = stack[stack.index('(') + 1: ]
if not stack: return []
if len(stack)>=3 and stack[0]==')' and stack[1]==':': # is this always a constructor?
self['constructor'] = True
return []
stack.reverse(); _end_ = stack.index(')'); stack.reverse()
stack = stack[ : len(stack)-(_end_+1) ]
if '(' not in stack: return stack # safe to return, no defaults that init a class
# transforms ['someclass', '(', '0', '0', '0', ')'] into "someclass(0,0,0)'"
r = []; hit=False
for a in stack:
if a == '(': hit=True
elif a == ')': hit=False
if hit or a == ')': r[-1] = r[-1] + a
else: r.append( a )
return r
def _params_helper2( self, params ):
for p in params:
p['method'] = self # save reference in variable to parent method
if '::' in p['type']:
ns = p['type'].split('::')[0]
if ns not in Resolver.NAMESPACES and ns in Resolver.CLASSES:
p['type'] = self['namespace'] + p['type']
else: p['namespace'] = self[ 'namespace' ]
class CppMethod( _CppMethod ):
"""Takes a name stack and turns it into a method
Contains the following Keys:
self['rtnType'] - Return type of the method (ex. "int")
self['name'] - Name of the method (ex. "getSize")
self['doxygen'] - Doxygen comments associated with the method if they exist
self['parameters'] - List of CppVariables
"""
def show(self):
r = ['method name: %s (%s)' %(self['name'],self['debug']) ]
if self['returns']: r.append( 'returns: %s'%self['returns'] )
if self['parameters']: r.append( 'number arguments: %s' %len(self['parameters']))
if self['pure_virtual']: r.append( 'pure virtual: %s'%self['pure_virtual'] )
if self['constructor']: r.append( 'constructor' )
if self['destructor']: r.append( 'destructor' )
return '\n\t\t '.join( r )
def __init__(self, nameStack, curClass, methinfo, curTemplate):
debug_print( "Method: %s"%nameStack )
debug_print( "Template: %s"%curTemplate )
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
if "operator" in nameStack:
self["rtnType"] = " ".join(nameStack[:nameStack.index('operator')])
self["name"] = "".join(nameStack[nameStack.index('operator'):nameStack.index('(')])
else:
self["rtnType"] = " ".join(nameStack[:nameStack.index('(') - 1])
self["name"] = " ".join(nameStack[nameStack.index('(') - 1:nameStack.index('(')])
if self["rtnType"].startswith("virtual"):
self["rtnType"] = self["rtnType"][len("virtual"):].strip()
if len(self["rtnType"]) == 0 or self["name"] == curClass:
self["rtnType"] = "void"
self["rtnType"] = self["rtnType"].replace(' : : ', '::' )
self["rtnType"] = self["rtnType"].replace(" <","<")
self["rtnType"] = self["rtnType"].replace(" >",">").replace(">>", "> >").replace(">>", "> >")
self["rtnType"] = self["rtnType"].replace(" ,",",")
for spec in ["const", "final", "override"]:
self[spec] = False
for i in reversed(nameStack):
if i == spec:
self[spec] = True
break
elif i == ")":
break
self.update( methinfo )
self["line_number"] = detect_lineno(nameStack[0])
#Filter out initializer lists used in constructors
try:
paren_depth_counter = 0
for i in range(0, len(nameStack)):
elm = nameStack[i]
if elm == "(":
paren_depth_counter += 1
if elm == ")":
paren_depth_counter -=1
if paren_depth_counter == 0 and nameStack[i+1] == ':':
debug_print("Stripping out initializer list")
nameStack = nameStack[:i+1]
break
except: pass
paramsStack = self._params_helper1( nameStack )
debug_print( "curTemplate: %s"%curTemplate)
if curTemplate:
self["template"] = curTemplate
debug_print( "SET self['template'] to `%s`"%self["template"])
params = []
#See if there is a doxygen comment for the variable
doxyVarDesc = {}
if "doxygen" in self:
doxyLines = self["doxygen"].split("\n")
lastParamDesc = ""
for doxyLine in doxyLines:
if " @param " in doxyLine or " \param " in doxyLine:
try:
#Strip out the param
doxyLine = doxyLine[doxyLine.find("param ") + 6:]
(var, desc) = doxyLine.split(" ", 1)
doxyVarDesc[var] = desc.strip()
lastParamDesc = var
except: pass
elif " @return " in doxyLine or " \return " in doxyLine:
lastParamDesc = ""
# not handled for now
elif lastParamDesc:
try:
doxyLine = doxyLine.strip()
if " " not in doxyLine:
lastParamDesc = ""
continue
doxyLine = doxyLine[doxyLine.find(" ") + 1:]
doxyVarDesc[lastParamDesc] += " " + doxyLine
except: pass
#Create the variable now
while (len(paramsStack)):
# Find commas that are not nexted in <>'s like template types
open_template_count = 0
param_separator = 0
i = 0
for elm in paramsStack:
if '<' in elm :
open_template_count += 1
elif '>' in elm:
open_template_count -= 1
elif elm == ',' and open_template_count == 0:
param_separator = i
break
i += 1
if param_separator:
param = CppVariable(paramsStack[0:param_separator], doxyVarDesc=doxyVarDesc)
if len(list(param.keys())): params.append(param)
paramsStack = paramsStack[param_separator + 1:]
else:
param = CppVariable(paramsStack, doxyVarDesc=doxyVarDesc)
if len(list(param.keys())): params.append(param)
break
self["parameters"] = params
#self._params_helper2( params ) # mods params inplace
def __str__(self):
filter_keys = ("parent", "defined", "operator", "returns_reference")
cpy = dict((k,v) for (k,v) in list(self.items()) if k not in filter_keys)
return "%s"%cpy
class _CppVariable(dict):
def _name_stack_helper( self, stack ):
stack = list(stack)
if '=' not in stack: # TODO refactor me
# check for array[n] and deal with funny array syntax: "int myvar:99"
array = []
while stack and stack[-1].isdigit(): array.append( stack.pop() )
if array: array.reverse(); self['array'] = int(''.join(array))
if stack and stack[-1].endswith(':'): stack[-1] = stack[-1][:-1]
while stack and not stack[-1]: stack.pop() # can be empty
return stack
def init(self):
#assert self['name'] # allow unnamed variables, methods like this: "void func(void);"
a = []
self['aliases'] = []; self['parent'] = None; self['typedef'] = None
for key in 'constant reference pointer static typedefs class fundamental unresolved'.split():
self[ key ] = 0
for b in self['type'].split():
if b == '__const__': b = 'const'
a.append( b )
self['type'] = ' '.join( a )
class CppVariable( _CppVariable ):
"""Takes a name stack and turns it into a method
Contains the following Keys:
self['type'] - Type for the variable (ex. "const string &")
self['name'] - Name of the variable (ex. "numItems")
self['namespace'] - Namespace containing the enum
self['desc'] - Description of the variable if part of a method (optional)
self['doxygen'] - Doxygen comments associated with the method if they exist
self['defaultValue'] - Default value of the variable, this key will only
exist if there is a default value
self['extern'] - True if its an extern, false if not
"""
Vars = []
def __init__(self, nameStack, **kwargs):
debug_print("trace %s"%nameStack)
if len(nameStack) and nameStack[0] == "extern":
self['extern'] = True
del nameStack[0]
else:
self['extern'] = False
_stack_ = nameStack
if "[" in nameStack: #strip off array informatin
arrayStack = nameStack[nameStack.index("["):]
if nameStack.count("[") > 1:
debug_print("Multi dimensional array")
debug_print("arrayStack=%s"%arrayStack)
nums = filter(lambda x: x.isdigit(), arrayStack)
# Calculate size by multiplying all dimensions
p = 1
for n in nums:
p *= int(n)
#Multi dimensional array
self["array_size"] = p
self["multi_dimensional_array"] = 1
self["multi_dimensional_array_size"] = "x".join(nums)
else:
debug_print("Array")
if len(arrayStack) == 3:
self["array_size"] = arrayStack[1]
nameStack = nameStack[:nameStack.index("[")]
self["array"] = 1
else:
self["array"] = 0
nameStack = self._name_stack_helper( nameStack )
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
debug_print( "Variable: %s"%nameStack )
self["line_number"] = detect_lineno(nameStack[0])
self["function_pointer"] = 0
if (len(nameStack) < 2): # +++
if len(nameStack) == 1: self['type'] = nameStack[0]; self['name'] = ''
else: error_print(_stack_); assert 0
elif is_function_pointer_stack(nameStack): #function pointer
self["type"] = " ".join(nameStack[:nameStack.index("(") + 2] + nameStack[nameStack.index(")") :])
self["name"] = " ".join(nameStack[nameStack.index("(") + 2 : nameStack.index(")")])
self["function_pointer"] = 1
elif ("=" in nameStack):
self["type"] = " ".join(nameStack[:nameStack.index("=") - 1])
self["name"] = nameStack[nameStack.index("=") - 1]
self["defaultValue"] = " ".join(nameStack[nameStack.index("=") + 1:]) # deprecate camelCase in dicts
self['default'] = " ".join(nameStack[nameStack.index("=") + 1:])
elif is_fundamental(nameStack[-1]) or nameStack[-1] in ['>', '<' , ':', '.']:
#Un named parameter
self["type"] = " ".join(nameStack)
self["name"] = ""
else: # common case
self["type"] = " ".join(nameStack[:-1])
self["name"] = nameStack[-1]
self["type"] = self["type"].replace(" :",":")
self["type"] = self["type"].replace(": ",":")
self["type"] = self["type"].replace(" <","<")
self["type"] = self["type"].replace(" >",">").replace(">>", "> >").replace(">>", "> >")
self["type"] = self["type"].replace(" ,",",")
#Optional doxygen description
try:
self["desc"] = kwargs["doxyVarDesc"][self["name"]]
except: pass
self.init()
CppVariable.Vars.append( self ) # save and resolve later
def __str__(self):
keys_white_list = ['constant','name','reference','type','static','pointer','desc', 'line_number', 'extern']
cpy = dict((k,v) for (k,v) in list(self.items()) if k in keys_white_list)
if "array_size" in self: cpy["array_size"] = self["array_size"]
return "%s"%cpy
class _CppEnum(dict):
def resolve_enum_values( self, values ):
"""Evaluates the values list of dictionaries passed in and figures out what the enum value
for each enum is editing in place:
Example:
From: [{'name': 'ORANGE'},
{'name': 'RED'},
{'name': 'GREEN', 'value': '8'}]
To: [{'name': 'ORANGE', 'value': 0},
{'name': 'RED', 'value': 1},
{'name': 'GREEN', 'value': 8}]
"""
t = int; i = 0
names = [ v['name'] for v in values ]
for v in values:
if 'value' in v:
a = v['value'].strip()
# Remove single quotes from single quoted chars (unless part of some expression
if len(a) == 3 and a[0] == "'" and a[2] == "'":
a = v['value'] = a[1]
if a.lower().startswith("0x"):
try:
i = a = int(a , 16)
except:pass
elif a.isdigit():
i = a = int( a )
elif a in names:
for other in values:
if other['name'] == a:
v['value'] = other['value']
break
elif '"' in a or "'" in a: t = str # only if there are quotes it this a string enum
else:
try:
a = i = ord(a)
except: pass
#Allow access of what is in the file pre-convert if converted
if v['value'] != str(a):
v['raw_value'] = v['value']
v['value'] = a
else: v['value'] = i
try:
v['value'] = v['value'].replace(" < < ", " << ").replace(" >> ", " >> ")
except: pass
i += 1
return t
class CppEnum(_CppEnum):
"""Takes a name stack and turns it into an Enum
Contains the following Keys:
self['name'] - Name of the enum (ex. "ItemState")
self['namespace'] - Namespace containing the enum
self['values'] - List of values where the values are a dictionary of the
form {"name": name of the key (ex. "PARSING_HEADER"),
"value": Specified value of the enum, this key will only exist
if a value for a given enum value was defined
}
"""
def __init__(self, nameStack):
global doxygenCommentCache
if len(doxygenCommentCache):
self["doxygen"] = doxygenCommentCache
doxygenCommentCache = ""
if len(nameStack) == 3 and nameStack[0] == "enum":
debug_print("Created enum as just name/value")
self["name"] = nameStack[1]
self["instances"]=[nameStack[2]]
if len(nameStack) < 4 or "{" not in nameStack or "}" not in nameStack:
#Not enough stuff for an enum
debug_print("Bad enum")
return
valueList = []
self["line_number"] = detect_lineno(nameStack[0])
#Figure out what values it has
valueStack = nameStack[nameStack.index('{') + 1: nameStack.index('}')]
while len(valueStack):
tmpStack = []
if "," in valueStack:
tmpStack = valueStack[:valueStack.index(",")]
valueStack = valueStack[valueStack.index(",") + 1:]
else:
tmpStack = valueStack
valueStack = []
d = {}
if len(tmpStack) == 1: d["name"] = tmpStack[0]
elif len(tmpStack) >= 3 and tmpStack[1] == "=":
d["name"] = tmpStack[0]; d["value"] = " ".join(tmpStack[2:])
elif len(tmpStack) == 2 and tmpStack[1] == "=":
debug_print( "WARN-enum: parser missed value for %s"%tmpStack[0] )
d["name"] = tmpStack[0]
if d: valueList.append( d )
if len(valueList):
self['type'] = self.resolve_enum_values( valueList ) # returns int for standard enum
self["values"] = valueList
else:
warning_print( 'WARN-enum: empty enum %s'%nameStack )
return
#Figure out if it has a name
preBraceStack = nameStack[:nameStack.index("{")]
postBraceStack = nameStack[nameStack.index("}") + 1:]
self["typedef"] = False
if (len(preBraceStack) == 2 and "typedef" not in nameStack):
self["name"] = preBraceStack[1]
elif len(postBraceStack) and "typedef" in nameStack:
self["name"] = " ".join(postBraceStack)
self["typedef"] = True
else: warning_print( 'WARN-enum: nameless enum %s'%nameStack )
#See if there are instances of this
if "typedef" not in nameStack and len(postBraceStack):
self["instances"] = []
for var in postBraceStack:
if "," in var:
continue
self["instances"].append(var)
self["namespace"] = ""
class CppStruct(dict):
Structs = []
def __init__(self, nameStack):
if len(nameStack) >= 2: self['type'] = nameStack[1]
else: self['type'] = None
self['fields'] = []
self.Structs.append( self )
global curLine
self["line_number"] = curLine
C99_NONSTANDARD = {
'int8' : 'signed char',
'int16' : 'short int',
'int32' : 'int',
'int64' : 'int64_t', # this can be: long int (64bit), or long long int (32bit)
'uint' : 'unsigned int',
'uint8' : 'unsigned char',
'uint16' : 'unsigned short int',
'uint32' : 'unsigned int',
'uint64' : 'uint64_t', # depends on host bits
}
def standardize_fundamental( s ):
if s in C99_NONSTANDARD: return C99_NONSTANDARD[ s ]
else: return s
class Resolver(object):
C_FUNDAMENTAL = 'size_t unsigned signed bool char wchar short int float double long void'.split()
C_FUNDAMENTAL += 'struct union enum'.split()
SubTypedefs = {} # TODO deprecate?
NAMESPACES = []
CLASSES = {}
STRUCTS = {}
def initextra(self):
self.typedefs = {}
self.typedefs_order = []
self.classes_order = []
self.structs = Resolver.STRUCTS
self.structs_order = []
self.namespaces = Resolver.NAMESPACES # save all namespaces
self.curStruct = None
self.stack = [] # full name stack, good idea to keep both stacks? (simple stack and full stack)
self._classes_brace_level = {} # class name : level
self._structs_brace_level = {} # struct type : level
self._method_body = None
self._forward_decls = []
self._template_typenames = [] # template<typename XXX>
def current_namespace(self): return self.cur_namespace(True)
def cur_namespace(self, add_double_colon=False):
rtn = ""
i = 0
while i < len(self.nameSpaces):
rtn += self.nameSpaces[i]
if add_double_colon or i < len(self.nameSpaces) - 1: rtn += "::"
i+=1
return rtn
def guess_ctypes_type( self, string ):
pointers = string.count('*')
string = string.replace('*','')
a = string.split()
if 'unsigned' in a: u = 'u'
else: u = ''
if 'long' in a and 'double' in a: b = 'longdouble' # there is no ctypes.c_ulongdouble (this is a 64bit float?)
elif a.count('long') == 2 and 'int' in a: b = '%sint64' %u
elif a.count('long') == 2: b = '%slonglong' %u
elif 'long' in a: b = '%slong' %u
elif 'double' in a: b = 'double' # no udouble in ctypes
elif 'short' in a: b = '%sshort' %u
elif 'char' in a: b = '%schar' %u
elif 'wchar' in a: b = 'wchar'
elif 'bool' in a: b = 'bool'
elif 'float' in a: b = 'float'
elif 'int' in a: b = '%sint' %u
elif 'int8' in a: b = 'int8'
elif 'int16' in a: b = 'int16'
elif 'int32' in a: b = 'int32'
elif 'int64' in a: b = 'int64'
elif 'uint' in a: b = 'uint'
elif 'uint8' in a: b = 'uint8'
elif 'uint16' in a: b = 'uint16'
elif 'uint32' in a: b = 'uint32'
elif 'uint64' in a: b = 'uint64'
elif 'size_t' in a: b = 'size_t'
elif 'void' in a: b = 'void_p'
elif string in 'struct union'.split(): b = 'void_p' # what should be done here? don't trust struct, it could be a class, no need to expose via ctypes
else: b = 'void_p'
if not pointers: return 'ctypes.c_%s' %b
else:
x = ''
for i in range(pointers): x += 'ctypes.POINTER('
x += 'ctypes.c_%s' %b
x += ')' * pointers
return x
def resolve_type( self, string, result ): # recursive
'''
keeps track of useful things like: how many pointers, number of typedefs, is fundamental or a class, etc...
'''
## be careful with templates, what is inside <something*> can be a pointer but the overall type is not a pointer
## these come before a template
s = string.split('<')[0]
result[ 'constant' ] += s.split().count('const')
result[ 'static' ] += s.split().count('static')
result[ 'mutable' ] = 'mutable' in s.split()
## these come after a template
s = string.split('>')[-1]
result[ 'pointer' ] += s.count('*')
result[ 'reference' ] += s.count('&')
x = string; alias = False
for a in '* & const static mutable'.split(): x = x.replace(a,'')
for y in x.split():
if y not in self.C_FUNDAMENTAL: alias = y; break
#if alias == 'class':
# result['class'] = result['name'] # forward decl of class
# result['forward_decl'] = True
if alias == '__extension__': result['fundamental_extension'] = True
elif alias:
result['aliases'].append( alias )
if alias in C99_NONSTANDARD:
result['type'] = C99_NONSTANDARD[ alias ]
result['typedef'] = alias
result['typedefs'] += 1
elif alias in self.typedefs:
result['typedefs'] += 1
result['typedef'] = alias
self.resolve_type( self.typedefs[alias], result )
elif alias in self.classes:
klass = self.classes[alias]; result['fundamental'] = False
result['class'] = klass
result['unresolved'] = False
else: result['unresolved'] = True
else:
result['fundamental'] = True
result['unresolved'] = False
def finalize_vars(self):
for s in CppStruct.Structs: # vars within structs can be ignored if they do not resolve
for var in s['fields']: var['parent'] = s['type']
#for c in self.classes.values():
# for var in c.get_all_properties(): var['parent'] = c['name']
## RESOLVE ##
for var in CppVariable.Vars:
self.resolve_type( var['type'], var )
#if 'method' in var and var['method']['name'] == '_notifyCurrentCamera': print(var); assert 0
# then find concrete type and best guess ctypes type #
for var in CppVariable.Vars:
if not var['aliases']: #var['fundamental']:
var['ctypes_type'] = self.guess_ctypes_type( var['type'] )
else:
var['unresolved'] = False # below may test to True
if var['class']:
var['ctypes_type'] = 'ctypes.c_void_p'
else:
assert var['aliases']
tag = var['aliases'][0]
klass = None
nestedEnum = None
nestedStruct = None
nestedTypedef = None
if 'method' in var and 'parent' in list(var['method'].keys()):
klass = var['method']['parent']
if tag in var['method']['parent']._public_enums:
nestedEnum = var['method']['parent']._public_enums[ tag ]
elif tag in var['method']['parent']._public_structs:
nestedStruct = var['method']['parent']._public_structs[ tag ]
elif tag in var['method']['parent']._public_typedefs:
nestedTypedef = var['method']['parent']._public_typedefs[ tag ]
if '<' in tag: # should also contain '>'
var['template'] = tag # do not resolve templates
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif nestedEnum:
enum = nestedEnum
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
var['enum'] = var['method']['path'] + '::' + enum['name']
var['fundamental'] = True
elif nestedStruct:
var['ctypes_type'] = 'ctypes.c_void_p'
var['raw_type'] = var['method']['path'] + '::' + nestedStruct['type']
var['fundamental'] = False
elif nestedTypedef:
var['fundamental'] = is_fundamental( nestedTypedef )
if not var['fundamental']:
var['raw_type'] = var['method']['path'] + '::' + tag
else:
_tag = tag
if '::' in tag and tag.split('::')[0] in self.namespaces: tag = tag.split('::')[-1]
con = self.concrete_typedef( _tag )
if con:
var['concrete_type'] = con
var['ctypes_type'] = self.guess_ctypes_type( var['concrete_type'] )
elif tag in self.structs:
trace_print( 'STRUCT', var )
var['struct'] = tag
var['ctypes_type'] = 'ctypes.c_void_p'
var['raw_type'] = self.structs[tag]['namespace'] + '::' + tag
elif tag in self._forward_decls:
var['forward_declared'] = tag
var['ctypes_type'] = 'ctypes.c_void_p'
elif tag in self.global_enums:
enum = self.global_enums[ tag ]
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
var['enum'] = enum['namespace'] + enum['name']
var['fundamental'] = True
elif var['parent']:
warning_print( 'WARN unresolved %s'%_tag)
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif tag.count('::')==1:
trace_print( 'trying to find nested something in', tag )
a = tag.split('::')[0]
b = tag.split('::')[-1]
if a in self.classes: # a::b is most likely something nested in a class
klass = self.classes[ a ]
if b in klass._public_enums:
trace_print( '...found nested enum', b )
enum = klass._public_enums[ b ]
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
try:
if 'method' in var: var['enum'] = var['method']['path'] + '::' + enum['name']
else: # class property
var['unresolved'] = True
except:
var['unresolved'] = True
var['fundamental'] = True
else: var['unresolved'] = True # TODO klass._public_xxx
elif a in self.namespaces: # a::b can also be a nested namespace
if b in self.global_enums:
enum = self.global_enums[ b ]
trace_print(enum)
trace_print(var)
assert 0
elif b in self.global_enums: # falling back, this is a big ugly
enum = self.global_enums[ b ]
assert a in enum['namespace'].split('::')
if enum['type'] is int:
var['ctypes_type'] = 'ctypes.c_int'
var['raw_type'] = 'int'
elif enum['type'] is str:
var['ctypes_type'] = 'ctypes.c_char_p'
var['raw_type'] = 'char*'
var['fundamental'] = True
else: # boost::gets::crazy
trace_print('NAMESPACES', self.namespaces)
trace_print( a, b )
trace_print( '---- boost gets crazy ----' )
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif 'namespace' in var and self.concrete_typedef(var['namespace']+tag):
#print( 'TRYING WITH NS', var['namespace'] )
con = self.concrete_typedef( var['namespace']+tag )
if con:
var['typedef'] = var['namespace']+tag
var['type'] = con
if 'struct' in con.split():
var['raw_type'] = var['typedef']
var['ctypes_type'] = 'ctypes.c_void_p'
else:
self.resolve_type( var['type'], var )
var['ctypes_type'] = self.guess_ctypes_type( var['type'] )
elif '::' in var:
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
elif tag in self.SubTypedefs: # TODO remove SubTypedefs
if 'property_of_class' in var or 'property_of_struct' in var:
trace_print( 'class:', self.SubTypedefs[ tag ], 'tag:', tag )
var['typedef'] = self.SubTypedefs[ tag ] # class name
var['ctypes_type'] = 'ctypes.c_void_p'
else:
trace_print( "WARN-this should almost never happen!" )
trace_print( var ); trace_print('-'*80)
var['unresolved'] = True
elif tag in self._template_typenames:
var['typename'] = tag
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True # TODO, how to deal with templates?
elif tag.startswith('_'): # assume starting with underscore is not important for wrapping
warning_print( 'WARN unresolved %s'%_tag)
var['ctypes_type'] = 'ctypes.c_void_p'
var['unresolved'] = True
else:
trace_print( 'WARN: unknown type', var )
assert 'property_of_class' in var or 'property_of_struct' # only allow this case
var['unresolved'] = True
## if not resolved and is a method param, not going to wrap these methods ##
if var['unresolved'] and 'method' in var: var['method']['unresolved_parameters'] = True
# create stripped raw_type #
p = '* & const static mutable'.split() # +++ new July7: "mutable"
for var in CppVariable.Vars:
if 'raw_type' not in var:
raw = []
for x in var['type'].split():
if x not in p: raw.append( x )
var['raw_type'] = ' '.join( raw )
#if 'AutoConstantEntry' in var['raw_type']: print(var); assert 0
if var['class']:
if '::' not in var['raw_type']:
if not var['class']['parent']:
var['raw_type'] = var['class']['namespace'] + '::' + var['raw_type']
elif var['class']['parent'] in self.classes:
parent = self.classes[ var['class']['parent'] ]
var['raw_type'] = parent['namespace'] + '::' + var['class']['name'] + '::' + var['raw_type']
else:
var['unresolved'] = True
elif '::' in var['raw_type'] and var['raw_type'].split('::')[0] not in self.namespaces:
var['raw_type'] = var['class']['namespace'] + '::' + var['raw_type']
else:
var['unresolved'] = True
elif 'forward_declared' in var and 'namespace' in var:
if '::' not in var['raw_type']:
var['raw_type'] = var['namespace'] + var['raw_type']
elif '::' in var['raw_type'] and var['raw_type'].split('::')[0] in self.namespaces:
pass
else: trace_print('-'*80); trace_print(var); raise NotImplemented
## need full name space for classes in raw type ##
if var['raw_type'].startswith( '::' ):
#print(var)
#print('NAMESPACE', var['class']['namespace'])
#print( 'PARENT NS', var['class']['parent']['namespace'] )
#assert 0
var['unresolved'] = True
if 'method' in var: var['method']['unresolved_parameters'] = True
#var['raw_type'] = var['raw_type'][2:]
# Take care of #defines and #pragmas etc
trace_print("Processing precomp_macro_buf: %s"%self._precomp_macro_buf)
for m in self._precomp_macro_buf:
macro = m.replace("<CppHeaderParser_newline_temp_replacement>\\n", "\n")
try:
if macro.lower().startswith("#define"):
trace_print("Adding #define %s"%macro)
self.defines.append(macro.split(" ", 1)[1].strip())
elif macro.lower().startswith("#if") or macro.lower().startswith("#endif") or macro.lower().startswith("#else"):
self.conditionals.append(macro)
elif macro.lower().startswith("#pragma"):
trace_print("Adding #pragma %s"%macro)
self.pragmas.append(macro.split(" ", 1)[1].strip())
elif macro.lower().startswith("#include"):
trace_print("Adding #include %s"%macro)
self.includes.append(macro.split(" ", 1)[1].strip())
else:
debug_print("Cant detect what to do with precomp macro '%s'"%macro)
except: pass
self._precomp_macro_buf = None
def concrete_typedef( self, key ):
if key not in self.typedefs:
#print( 'FAILED typedef', key )
return None
while key in self.typedefs:
prev = key
key = self.typedefs[ key ]
if '<' in key or '>' in key: return prev # stop at template
if key.startswith('std::'): return key # stop at std lib
return key
class _CppHeader( Resolver ):
def finalize(self):
self.finalize_vars()
# finalize classes and method returns types
for cls in list(self.classes.values()):
for meth in cls.get_all_methods():
if meth['pure_virtual']: cls['abstract'] = True
if not meth['returns_fundamental'] and meth['returns'] in C99_NONSTANDARD:
meth['returns'] = C99_NONSTANDARD[meth['returns']]
meth['returns_fundamental'] = True
elif not meth['returns_fundamental']: # describe the return type
con = None
if cls['namespace'] and '::' not in meth['returns']:
con = self.concrete_typedef( cls['namespace'] + '::' + meth['returns'] )
else: con = self.concrete_typedef( meth['returns'] )
if con:
meth['returns_concrete'] = con
meth['returns_fundamental'] = is_fundamental( con )
elif meth['returns'] in self.classes:
trace_print( 'meth returns class:', meth['returns'] )
meth['returns_class'] = True
elif meth['returns'] in self.SubTypedefs:
meth['returns_class'] = True
meth['returns_nested'] = self.SubTypedefs[ meth['returns'] ]
elif meth['returns'] in cls._public_enums:
enum = cls._public_enums[ meth['returns'] ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
elif meth['returns'] in self.global_enums:
enum = self.global_enums[ meth['returns'] ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
elif meth['returns'].count('::')==1:
trace_print( meth )
a,b = meth['returns'].split('::')
if a in self.namespaces:
if b in self.classes:
klass = self.classes[ b ]
meth['returns_class'] = a + '::' + b
elif '<' in b and '>' in b:
warning_print( 'WARN-can not return template: %s'%b )
meth['returns_unknown'] = True
elif b in self.global_enums:
enum = self.global_enums[ b ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
else: trace_print( a, b); trace_print( meth); meth['returns_unknown'] = True # +++
elif a in self.classes:
klass = self.classes[ a ]
if b in klass._public_enums:
trace_print( '...found nested enum', b )
enum = klass._public_enums[ b ]
meth['returns_enum'] = enum['type']
meth['returns_fundamental'] = True
if enum['type'] == int: meth['returns'] = 'int'
else: meth['returns'] = 'char*'
elif b in klass._public_forward_declares:
meth['returns_class'] = True
elif b in klass._public_typedefs:
typedef = klass._public_typedefs[ b ]
meth['returns_fundamental'] = is_fundamental( typedef )
else:
trace_print( meth ) # should be a nested class, TODO fix me.
meth['returns_unknown'] = True
elif '::' in meth['returns']:
trace_print('TODO namespace or extra nested return:', meth)
meth['returns_unknown'] = True
else:
trace_print( 'WARN: UNKNOWN RETURN', meth['name'], meth['returns'])
meth['returns_unknown'] = True
if meth["returns"].startswith(": : "):
meth["returns"] = meth["returns"].replace(": : ", "::")
for cls in list(self.classes.values()):
methnames = cls.get_all_method_names()
pvm = cls.get_all_pure_virtual_methods()
for d in cls['inherits']:
c = d['class']
a = d['access'] # do not depend on this to be 'public'
trace_print( 'PARENT CLASS:', c )
if c not in self.classes: trace_print('WARN: parent class not found')
if c in self.classes and self.classes[c]['abstract']:
p = self.classes[ c ]
for meth in p.get_all_methods(): #p["methods"]["public"]:
trace_print( '\t\tmeth', meth['name'], 'pure virtual', meth['pure_virtual'] )
if meth['pure_virtual'] and meth['name'] not in methnames: cls['abstract'] = True; break
def evaluate_struct_stack(self):
"""Create a Struct out of the name stack (but not its parts)"""
#print( 'eval struct stack', self.nameStack )
#if self.braceDepth != len(self.nameSpaces): return
struct = CppStruct(self.nameStack)
struct["namespace"] = self.cur_namespace()
self.structs[ struct['type'] ] = struct
self.structs_order.append( struct )
if self.curClass:
struct['parent'] = self.curClass
klass = self.classes[ self.curClass ]
klass['structs'][self.curAccessSpecifier].append( struct )
if self.curAccessSpecifier == 'public': klass._public_structs[ struct['type'] ] = struct
self.curStruct = struct
self._structs_brace_level[ struct['type'] ] = self.braceDepth
def parse_method_type( self, stack ):
trace_print( 'meth type info', stack )
if stack[0] in ':;' and stack[1] != ':': stack = stack[1:]
info = {
'debug': ' '.join(stack).replace(' : : ', '::' ).replace(' < ', '<' ).replace(' > ', '> ' ).replace(" >",">").replace(">>", "> >").replace(">>", "> >"),
'class':None,
'namespace':self.cur_namespace(add_double_colon=True),
}
for tag in 'defined pure_virtual operator constructor destructor extern template virtual static explicit inline friend returns returns_pointer returns_fundamental returns_class'.split(): info[tag]=False
header = stack[ : stack.index('(') ]
header = ' '.join( header )
header = header.replace(' : : ', '::' )
header = header.replace(' < ', '<' )
header = header.replace(' > ', '> ' )
header = header.strip()
if '{' in stack:
info['defined'] = True
self._method_body = self.braceDepth + 1
trace_print( 'NEW METHOD WITH BODY', self.braceDepth )
elif stack[-1] == ';':
info['defined'] = False
self._method_body = None # not a great idea to be clearing here
else: assert 0
if len(stack) > 3 and stack[-1] == ';' and stack[-2] == '0' and stack[-3] == '=':
info['pure_virtual'] = True
r = header.split()
name = None
if 'operator' in stack: # rare case op overload defined outside of class
op = stack[ stack.index('operator')+1 : stack.index('(') ]
op = ''.join(op)
if not op:
if " ".join(['operator', '(', ')', '(']) in " ".join(stack):
op = "()"
else:
trace_print( 'Error parsing operator')
return None
info['operator'] = op
name = 'operator' + op
a = stack[ : stack.index('operator') ]
elif r:
name = r[-1]
a = r[ : -1 ] # strip name
if name is None: return None
#if name.startswith('~'): name = name[1:]
while a and a[0] == '}': # strip - can have multiple } }
a = a[1:]
if '::' in name:
#klass,name = name.split('::') # methods can be defined outside of class
klass = name[ : name.rindex('::') ]
name = name.split('::')[-1]
info['class'] = klass
if klass in self.classes and not self.curClass:
#Class function defined outside the class
return None
# info['name'] = name
#else: info['name'] = name
if name.startswith('~'):
info['destructor'] = True
name = name[1:]
elif not a or (name == self.curClass and len(self.curClass)):
info['constructor'] = True
info['name'] = name
for tag in 'extern virtual static explicit inline friend'.split():
if tag in a: info[ tag ] = True; a.remove( tag ) # inplace
if 'template' in a:
a.remove('template')
b = ' '.join( a )
if '>' in b:
info['template'] = b[ : b.index('>')+1 ]
info['returns'] = b[ b.index('>')+1 : ] # find return type, could be incorrect... TODO
if '<typename' in info['template'].split():
typname = info['template'].split()[-1]
typname = typname[ : -1 ] # strip '>'
if typname not in self._template_typenames: self._template_typenames.append( typname )
else: info['returns'] = ' '.join( a )
else: info['returns'] = ' '.join( a )
info['returns'] = info['returns'].replace(' <', '<').strip()
## be careful with templates, do not count pointers inside template
info['returns_pointer'] = info['returns'].split('>')[-1].count('*')
if info['returns_pointer']: info['returns'] = info['returns'].replace('*','').strip()
info['returns_reference'] = '&' in info['returns']
if info['returns']: info['returns'] = info['returns'].replace('&','').strip()
a = []
for b in info['returns'].split():
if b == '__const__': info['returns_const'] = True
elif b == 'const': info['returns_const'] = True
else: a.append( b )
info['returns'] = ' '.join( a )
info['returns_fundamental'] = is_fundamental( info['returns'] )
return info
def evaluate_method_stack(self):
"""Create a method out of the name stack"""
if self.curStruct:
trace_print( 'WARN - struct contains methods - skipping' )
trace_print( self.stack )
assert 0
info = self.parse_method_type( self.stack )
if info:
if info[ 'class' ] and info['class'] in self.classes: # case where methods are defined outside of class
newMethod = CppMethod(self.nameStack, info['name'], info, self.curTemplate)
klass = self.classes[ info['class'] ]
klass[ 'methods' ][ 'public' ].append( newMethod )
newMethod['parent'] = klass
if klass['namespace']: newMethod['path'] = klass['namespace'] + '::' + klass['name']
else: newMethod['path'] = klass['name']
elif self.curClass: # normal case
newMethod = CppMethod(self.nameStack, self.curClass, info, self.curTemplate)
klass = self.classes[self.curClass]
klass['methods'][self.curAccessSpecifier].append(newMethod)
newMethod['parent'] = klass
if klass['namespace']: newMethod['path'] = klass['namespace'] + '::' + klass['name']
else: newMethod['path'] = klass['name']
else: #non class functions
debug_print("FREE FUNCTION")
newMethod = CppMethod(self.nameStack, None, info, self.curTemplate)
self.functions.append(newMethod)
global parseHistory
parseHistory.append({"braceDepth": self.braceDepth, "item_type": "method", "item": newMethod})
else:
trace_print( 'free function?', self.nameStack )
self.stack = []
def _parse_typedef( self, stack, namespace='' ):
if not stack or 'typedef' not in stack: return
stack = list( stack ) # copy just to be safe
if stack[-1] == ';': stack.pop()
while stack and stack[-1].isdigit(): stack.pop() # throw away array size for now
idx = stack.index('typedef')
if stack[-1] == "]":
try:
name = namespace + "".join(stack[-4:])
# Strip off the array part so the rest of the parsing is better
stack = stack[:-3]
except:
name = namespace + stack[-1]
else:
name = namespace + stack[-1]
s = ''
for a in stack[idx+1:-1]:
if a == '{': break
if not s or s[-1] in ':<>' or a in ':<>': s += a # keep compact
else: s += ' ' + a # spacing
r = {'name':name, 'raw':s, 'type':s}
if not is_fundamental(s):
if 'struct' in s.split(): pass # TODO is this right? "struct ns::something"
elif '::' not in s: s = namespace + s # only add the current name space if no namespace given
r['type'] = s
if s: return r
def evaluate_typedef(self):
ns = self.cur_namespace(add_double_colon=True)
res = self._parse_typedef( self.stack, ns )
if res:
name = res['name']
self.typedefs[ name ] = res['type']
if name not in self.typedefs_order: self.typedefs_order.append( name )
def evaluate_property_stack(self):
"""Create a Property out of the name stack"""
global parseHistory
assert self.stack[-1] == ';'
debug_print( "trace" )
if self.nameStack[0] == 'typedef':
if self.curClass:
typedef = self._parse_typedef( self.stack )
name = typedef['name']
klass = self.classes[ self.curClass ]
klass[ 'typedefs' ][ self.curAccessSpecifier ].append( name )
if self.curAccessSpecifier == 'public': klass._public_typedefs[ name ] = typedef['type']
Resolver.SubTypedefs[ name ] = self.curClass
else: assert 0
elif self.curStruct or self.curClass:
if len(self.nameStack) == 1:
#See if we can de anonymize the type
filteredParseHistory = [h for h in parseHistory if h["braceDepth"] == self.braceDepth]
if len(filteredParseHistory) and filteredParseHistory[-1]["item_type"] == "class":
self.nameStack.insert(0, filteredParseHistory[-1]["item"]["name"])
debug_print("DEANONYMOIZING %s to type '%s'"%(self.nameStack[1], self.nameStack[0]))
if "," in self.nameStack: #Maybe we have a variable list
#Figure out what part is the variable separator but remember templates of function pointer
#First find left most comma outside of a > and )
leftMostComma = 0;
for i in range(0, len(self.nameStack)):
name = self.nameStack[i]
if name in (">", ")"): leftMostComma = 0
if leftMostComma == 0 and name == ",": leftMostComma = i
# Is it really a list of variables?
if leftMostComma != 0:
trace_print("Multiple variables for namestack in %s. Separating processing"%self.nameStack)
orig_nameStack = self.nameStack[:]
orig_stack = self.stack[:]
type_nameStack = orig_nameStack[:leftMostComma-1]
for name in orig_nameStack[leftMostComma - 1::2]:
self.nameStack = type_nameStack + [name]
self.stack = orig_stack[:] # Not maintained for mucking, but this path it doesnt matter
self.evaluate_property_stack()
return
newVar = CppVariable(self.nameStack)
newVar['namespace'] = self.current_namespace()
if self.curStruct:
self.curStruct[ 'fields' ].append( newVar )
newVar['property_of_struct'] = self.curStruct
elif self.curClass:
klass = self.classes[self.curClass]
klass["properties"][self.curAccessSpecifier].append(newVar)
newVar['property_of_class'] = klass['name']
parseHistory.append({"braceDepth": self.braceDepth, "item_type": "variable", "item": newVar})
else:
debug_print( "Found Global variable" )
newVar = CppVariable(self.nameStack)
self.variables.append(newVar)
self.stack = [] # CLEAR STACK
def evaluate_class_stack(self):
"""Create a Class out of the name stack (but not its parts)"""
#dont support sub classes today
#print( 'eval class stack', self.nameStack )
parent = self.curClass
if self.braceDepth > len( self.nameSpaces) and parent:
trace_print( 'HIT NESTED SUBCLASS' )
self.accessSpecifierStack.append(self.curAccessSpecifier)
elif self.braceDepth != len(self.nameSpaces):
error_print( 'ERROR: WRONG BRACE DEPTH' )
return
# When dealing with typedefed structs, get rid of typedef keyword to handle later on
if self.nameStack[0] == "typedef":
del self.nameStack[0]
if len(self.nameStack) == 1:
self.anon_struct_counter += 1
# We cant handle more than 1 anonymous struct, so name them uniquely
self.nameStack.append("<anon-struct-%d>"%self.anon_struct_counter)
if self.nameStack[0] == "class":
self.curAccessSpecifier = 'private'
else:#struct
self.curAccessSpecifier = 'public'
debug_print("curAccessSpecifier changed/defaulted to %s"%self.curAccessSpecifier)
if self.nameStack[0] == "union":
newClass = CppUnion(self.nameStack)
self.anon_union_counter = [self.braceDepth, 2]
trace_print( 'NEW UNION', newClass['name'] )
else:
newClass = CppClass(self.nameStack, self.curTemplate)
trace_print( 'NEW CLASS', newClass['name'] )
newClass["declaration_method"] = self.nameStack[0]
self.classes_order.append( newClass ) # good idea to save ordering
self.stack = [] # fixes if class declared with ';' in closing brace
if parent:
newClass["namespace"] = self.classes[ parent ]['namespace'] + '::' + parent
newClass['parent'] = parent
self.classes[ parent ]['nested_classes'].append( newClass )
## supports nested classes with the same name ##
self.curClass = key = parent+'::'+newClass['name']
self._classes_brace_level[ key ] = self.braceDepth
elif newClass['parent']: # nested class defined outside of parent. A::B {...}
parent = newClass['parent']
newClass["namespace"] = self.classes[ parent ]['namespace'] + '::' + parent
self.classes[ parent ]['nested_classes'].append( newClass )
## supports nested classes with the same name ##
self.curClass = key = parent+'::'+newClass['name']
self._classes_brace_level[ key ] = self.braceDepth
else:
newClass["namespace"] = self.cur_namespace()
key = newClass['name']
self.curClass = newClass["name"]
self._classes_brace_level[ newClass['name'] ] = self.braceDepth
if not key.endswith("::") and not key.endswith(" ") and len(key) != 0:
if key in self.classes:
trace_print( 'ERROR name collision:', key )
self.classes[key].show()
trace_print('-'*80)
newClass.show()
assert key not in self.classes # namespace collision
self.classes[ key ] = newClass
global parseHistory
parseHistory.append({"braceDepth": self.braceDepth, "item_type": "class", "item": newClass})
def evalute_forward_decl(self):
trace_print( 'FORWARD DECL', self.nameStack )
assert self.nameStack[0] in ('class', 'struct')
name = self.nameStack[-1]
if self.curClass:
klass = self.classes[ self.curClass ]
klass['forward_declares'][self.curAccessSpecifier].append( name )
if self.curAccessSpecifier == 'public': klass._public_forward_declares.append( name )
else: self._forward_decls.append( name )
class CppHeader( _CppHeader ):
"""Parsed C++ class header
Variables produced:
self.classes - Dictionary of classes found in a given header file where the
key is the name of the class
"""
IGNORE_NAMES = '__extension__'.split()
def show(self):
for className in list(self.classes.keys()):self.classes[className].show()
def __init__(self, headerFileName, argType="file", **kwargs):
"""Create the parsed C++ header file parse tree
headerFileName - Name of the file to parse OR actual file contents (depends on argType)
argType - Indicates how to interpret headerFileName as a file string or file name
kwargs - Supports the following keywords
"""
## reset global state ##
global doxygenCommentCache
doxygenCommentCache = ""
CppVariable.Vars = []
CppStruct.Structs = []
if (argType == "file"):
self.headerFileName = os.path.expandvars(headerFileName)
self.mainClass = os.path.split(self.headerFileName)[1][:-2]
headerFileStr = ""
elif argType == "string":
self.headerFileName = ""
self.mainClass = "???"
headerFileStr = headerFileName
else:
raise Exception("Arg type must be either file or string")
self.curClass = ""
# nested classes have parent::nested, but no extra namespace,
# this keeps the API compatible, TODO proper namespace for everything.
Resolver.CLASSES = {}
self.classes = Resolver.CLASSES
#Functions that are not part of a class
self.functions = []
self.pragmas = []
self.defines = []
self.includes = []
self.conditionals = []
self._precomp_macro_buf = [] #for internal purposes, will end up filling out pragmras and defines at the end
self.enums = []
self.variables = []
self.global_enums = {}
self.nameStack = []
self.nameSpaces = []
self.curAccessSpecifier = 'private' # private is default
self.curTemplate = None
self.accessSpecifierStack = []
self.accessSpecifierScratch = []
debug_print("curAccessSpecifier changed/defaulted to %s"%self.curAccessSpecifier)
self.initextra()
# Old namestacks for a given level
self.nameStackHistory = []
self.anon_struct_counter = 0
self.anon_union_counter = [-1, 0]
self.templateRegistry = []
if (len(self.headerFileName)):
fd = open(self.headerFileName)
headerFileStr = "".join(fd.readlines())
fd.close()
# Make sure supportedAccessSpecifier are sane
for i in range(0, len(supportedAccessSpecifier)):
if " " not in supportedAccessSpecifier[i]: continue
supportedAccessSpecifier[i] = re.sub("[ ]+", " ", supportedAccessSpecifier[i]).strip()
# Strip out template declarations
templateSectionsToSliceOut = []
try:
for m in re.finditer("template[\t ]*<[^>]*>", headerFileStr):
start = m.start()
# Search for the final '>' which may or may not be caught in the case of nexted <>'s
for i in range(start, len(headerFileStr)):
if headerFileStr[i] == '<':
firstBracket = i
break
ltgtStackCount = 1
#Now look for fianl '>'
for i in range(firstBracket + 1, len(headerFileStr)):
if headerFileStr[i] == '<':
ltgtStackCount += 1
elif headerFileStr[i] == '>':
ltgtStackCount -= 1
if ltgtStackCount == 0:
end = i
break
templateSectionsToSliceOut.append((start, end))
# Now strip out all instances of the template
templateSectionsToSliceOut.reverse()
for tslice in templateSectionsToSliceOut:
# Replace the template symbol with a single symbol
template_symbol="CppHeaderParser_template_%d"%len(self.templateRegistry)
self.templateRegistry.append(headerFileStr[tslice[0]: tslice[1]+1])
newlines = headerFileStr[tslice[0]: tslice[1]].count("\n") * "\n" #Keep line numbers the same
headerFileStr = headerFileStr[:tslice[0]] + newlines + " " + template_symbol + " " + headerFileStr[tslice[1] + 1:]
except:
pass
# Change multi line #defines and expressions to single lines maintaining line nubmers
# Based from http://stackoverflow.com/questions/2424458/regular-expression-to-match-cs-multiline-preprocessor-statements
matches = re.findall(r'(?m)^(?:.*\\\r?\n)+.*$', headerFileStr)
is_define = re.compile(r'[ \t\v]*#[Dd][Ee][Ff][Ii][Nn][Ee]')
for m in matches:
#Keep the newlines so that linecount doesnt break
num_newlines = len([a for a in m if a=="\n"])
if is_define.match(m):
new_m = m.replace("\n", "<CppHeaderParser_newline_temp_replacement>\\n")
else:
# Just expression taking up multiple lines, make it take 1 line for easier parsing
new_m = m.replace("\\\n", " ")
if (num_newlines > 0):
new_m += "\n"*(num_newlines)
headerFileStr = headerFileStr.replace(m, new_m)
#Filter out Extern "C" statements. These are order dependent
matches = re.findall(re.compile(r'extern[\t ]+"[Cc]"[\t \n\r]*{', re.DOTALL), headerFileStr)
for m in matches:
#Keep the newlines so that linecount doesnt break
num_newlines = len([a for a in m if a=="\n"])
headerFileStr = headerFileStr.replace(m, "\n" * num_newlines)
headerFileStr = re.sub(r'extern[ ]+"[Cc]"[ ]*', "", headerFileStr)
#Filter out any ignore symbols that end with "()" to account for #define magic functions
for ignore in ignoreSymbols:
if not ignore.endswith("()"): continue
while True:
locStart = headerFileStr.find(ignore[:-1])
if locStart == -1:
break;
locEnd = None
#Now walk till we find the last paren and account for sub parens
parenCount = 1
inQuotes = False
for i in range(locStart + len(ignore) - 1, len(headerFileStr)):
c = headerFileStr[i]
if not inQuotes:
if c == "(":
parenCount += 1
elif c == ")":
parenCount -= 1
elif c == '"':
inQuotes = True
if parenCount == 0:
locEnd = i + 1
break;
else:
if c == '"' and headerFileStr[i-1] != '\\':
|
if locEnd:
#Strip it out but keep the linecount the same so line numbers are right
match_str = headerFileStr[locStart:locEnd]
debug_print("Striping out '%s'"%match_str)
num_newlines = len([a for a in match_str if a=="\n"])
headerFileStr = headerFileStr.replace(headerFileStr[locStart:locEnd], "\n"*num_newlines)
self.braceDepth = 0
lex.lex()
lex.input(headerFileStr)
global curLine
global curChar
curLine = 0
curChar = 0
try:
while True:
tok = lex.token()
if not tok: break
if self.anon_union_counter[0] == self.braceDepth and self.anon_union_counter[1]:
self.anon_union_counter[1] -= 1
tok.value = TagStr(tok.value, lineno=tok.lineno)
#debug_print("TOK: %s"%tok)
if tok.type == 'NAME' and tok.value in self.IGNORE_NAMES: continue
if tok.type != 'TEMPLATE_NAME':
self.stack.append( tok.value )
curLine = tok.lineno
curChar = tok.lexpos
if (tok.type in ('PRECOMP_MACRO', 'PRECOMP_MACRO_CONT')):
debug_print("PRECOMP: %s"%tok)
self._precomp_macro_buf.append(tok.value)
self.stack = []
self.nameStack = []
continue
if tok.type == 'TEMPLATE_NAME':
try:
templateId = int(tok.value.replace("CppHeaderParser_template_",""))
self.curTemplate = self.templateRegistry[templateId]
except: pass
if (tok.type == 'OPEN_BRACE'):
if len(self.nameStack) >= 2 and is_namespace(self.nameStack): # namespace {} with no name used in boost, this sets default?
if self.nameStack[1] == "__IGNORED_NAMESPACE__CppHeaderParser__":#Used in filtering extern "C"
self.nameStack[1] = ""
self.nameSpaces.append(self.nameStack[1])
ns = self.cur_namespace(); self.stack = []
if ns not in self.namespaces: self.namespaces.append( ns )
# Detect special condition of macro magic before class declaration so we
# can filter it out
if 'class' in self.nameStack and self.nameStack[0] != 'class':
classLocationNS = self.nameStack.index("class")
classLocationS = self.stack.index("class")
if "(" not in self.nameStack[classLocationNS:]:
debug_print("keyword 'class' found in unexpected location in nameStack, must be following #define magic. Process that before moving on")
origNameStack = self.nameStack
origStack = self.stack
#Process first part of stack which is probably #define macro magic and may cause issues
self.nameStack = self.nameStack[:classLocationNS]
self.stack = self.stack[:classLocationS]
try:
self.evaluate_stack()
except:
debug_print("Error processing #define magic... Oh well")
#Process rest of stack
self.nameStack = origNameStack[classLocationNS:]
self.stack = origStack[classLocationS:]
if len(self.nameStack) and not is_enum_namestack(self.nameStack):
self.evaluate_stack()
else:
self.nameStack.append(tok.value)
if self.stack and self.stack[0] == 'class': self.stack = []
self.braceDepth += 1
elif (tok.type == 'CLOSE_BRACE'):
if self.braceDepth == 0:
continue
if (self.braceDepth == len(self.nameSpaces)):
tmp = self.nameSpaces.pop()
self.stack = [] # clear stack when namespace ends?
if len(self.nameStack) and is_enum_namestack(self.nameStack):
self.nameStack.append(tok.value)
elif self.braceDepth < 10:
self.evaluate_stack()
else:
self.nameStack = []
self.braceDepth -= 1
#self.stack = []; print 'BRACE DEPTH', self.braceDepth, 'NS', len(self.nameSpaces)
if self.curClass: debug_print( 'CURBD %s'%self._classes_brace_level[ self.curClass ] )
if (self.braceDepth == 0) or (self.curClass and self._classes_brace_level[self.curClass]==self.braceDepth):
trace_print( 'END OF CLASS DEF' )
if self.accessSpecifierStack:
self.curAccessSpecifier = self.accessSpecifierStack[-1]
self.accessSpecifierStack = self.accessSpecifierStack[:-1]
if self.curClass and self.classes[ self.curClass ]['parent']: self.curClass = self.classes[ self.curClass ]['parent']
else: self.curClass = ""; #self.curStruct = None
self.stack = []
#if self.curStruct: self.curStruct = None
if self.braceDepth == 0 or (self.curStruct and self._structs_brace_level[self.curStruct['type']]==self.braceDepth):
trace_print( 'END OF STRUCT DEF' )
self.curStruct = None
if self._method_body and (self.braceDepth + 1) <= self._method_body:
self._method_body = None; self.stack = []; self.nameStack = []; trace_print( 'FORCE CLEAR METHBODY' )
if (tok.type == 'OPEN_PAREN'):
self.nameStack.append(tok.value)
elif (tok.type == 'CLOSE_PAREN'):
self.nameStack.append(tok.value)
elif (tok.type == 'OPEN_SQUARE_BRACKET'):
self.nameStack.append(tok.value)
elif (tok.type == 'CLOSE_SQUARE_BRACKET'):
self.nameStack.append(tok.value)
elif (tok.type == 'TAB'): pass
elif (tok.type == 'EQUALS'):
self.nameStack.append(tok.value)
elif (tok.type == 'COMMA'):
self.nameStack.append(tok.value)
elif (tok.type == 'BACKSLASH'):
self.nameStack.append(tok.value)
elif (tok.type == 'DIVIDE'):
self.nameStack.append(tok.value)
elif (tok.type == 'PIPE'):
self.nameStack.append(tok.value)
elif (tok.type == 'PERCENT'):
self.nameStack.append(tok.value)
elif (tok.type == 'CARET'):
self.nameStack.append(tok.value)
elif (tok.type == 'EXCLAMATION'):
self.nameStack.append(tok.value)
elif (tok.type == 'SQUOTE'): pass
elif (tok.type == 'NUMBER' or tok.type == 'FLOAT_NUMBER'):
self.nameStack.append(tok.value)
elif (tok.type == 'MINUS'):
self.nameStack.append(tok.value)
elif (tok.type == 'PLUS'):
self.nameStack.append(tok.value)
elif (tok.type == 'STRING_LITERAL'):
self.nameStack.append(tok.value)
elif (tok.type == 'NAME' or tok.type == 'AMPERSTAND' or tok.type == 'ASTERISK' or tok.type == 'CHAR_LITERAL'):
if tok.value in ignoreSymbols:
debug_print("Ignore symbol %s"%tok.value)
elif (tok.value == 'class'):
self.nameStack.append(tok.value)
elif tok.value in supportedAccessSpecifier:
if len(self.nameStack) and self.nameStack[0] in ("class", "struct", "union"):
self.nameStack.append(tok.value)
elif self.braceDepth == len(self.nameSpaces) + 1 or self.braceDepth == (len(self.nameSpaces) + len(self.curClass.split("::"))):
self.curAccessSpecifier = tok.value;
self.accessSpecifierScratch.append(tok.value)
debug_print("curAccessSpecifier updated to %s"%self.curAccessSpecifier)
self.stack = []
else:
self.nameStack.append(tok.value)
if self.anon_union_counter[0] == self.braceDepth:
self.anon_union_counter = [-1, 0]
elif (tok.type == 'COLON'):
#Dont want colon to be first in stack
if len(self.nameStack) == 0:
self.accessSpecifierScratch = []
continue
# Handle situation where access specifiers can be multi words such as "public slots"
jns = " ".join(self.accessSpecifierScratch + self.nameStack)
if jns in supportedAccessSpecifier:
self.curAccessSpecifier = jns;
debug_print("curAccessSpecifier updated to %s"%self.curAccessSpecifier)
self.stack = []
self.nameStack = []
else:
self.nameStack.append(tok.value)
self.accessSpecifierScratch = []
elif (tok.type == 'SEMI_COLON'):
if self.anon_union_counter[0] == self.braceDepth and self.anon_union_counter[1]:
debug_print("Creating anonymous union")
#Force the processing of an anonymous union
saved_namestack = self.nameStack[:]
saved_stack = self.stack[:]
self.nameStack = [""]
self.stack = self.nameStack + [";"]
self.nameStack = self.nameStack[0:1]
debug_print("pre eval anon stack")
self.evaluate_stack( tok.type )
debug_print("post eval anon stack")
self.nameStack = saved_namestack
self.stack = saved_stack
self.anon_union_counter = [-1, 0];
if (self.braceDepth < 10): self.evaluate_stack( tok.type )
self.stack = []
self.nameStack = []
except:
if (debug): raise
raise CppParseError("Not able to parse %s on line %d evaluating \"%s\"\nError around: %s"
% (self.headerFileName, tok.lineno, tok.value, " ".join(self.nameStack)))
self.finalize()
global parseHistory
parseHistory = []
# Delete some temporary variables
for key in ["_precomp_macro_buf", "nameStack", "nameSpaces", "curAccessSpecifier", "accessSpecifierStack",
"accessSpecifierScratch", "nameStackHistory", "anon_struct_counter", "anon_union_counter",
"_classes_brace_level", "_forward_decls", "stack", "mainClass", "curStruct", "_template_typenames",
"_method_body", "braceDepth", "_structs_brace_level", "typedefs_order", "curTemplate", "templateRegistry"]:
del self.__dict__[key]
def evaluate_stack(self, token=None):
"""Evaluates the current name stack"""
global doxygenCommentCache
self.nameStack = filter_out_attribute_keyword(self.nameStack)
self.stack = filter_out_attribute_keyword(self.stack)
nameStackCopy = self.nameStack[:]
debug_print( "Evaluating stack %s\n BraceDepth: %s (called from %d)" %(self.nameStack,self.braceDepth, inspect.currentframe().f_back.f_lineno))
#Handle special case of overloading operator ()
if "operator()(" in "".join(self.nameStack):
operator_index = self.nameStack.index("operator")
self.nameStack.pop(operator_index + 2)
self.nameStack.pop(operator_index + 1)
self.nameStack[operator_index] = "operator()"
if (len(self.curClass)):
debug_print( "%s (%s) "%(self.curClass, self.curAccessSpecifier))
else:
debug_print( "<anonymous> (%s) "%self.curAccessSpecifier)
#Filter special case of array with casting in it
try:
bracePos = self.nameStack.index("[")
parenPos = self.nameStack.index("(")
if bracePos == parenPos - 1:
endParen = self.nameStack.index(")")
self.nameStack = self.nameStack[:bracePos + 1] + self.nameStack[endParen + 1:]
debug_print("Filtered namestack to=%s"%self.nameStack)
except: pass
#if 'typedef' in self.nameStack: self.evaluate_typedef() # allows nested typedefs, probably a bad idea
if (not self.curClass and 'typedef' in self.nameStack and
(('struct' not in self.nameStack and 'union' not in self.nameStack) or self.stack[-1] == ";") and
not is_enum_namestack(self.nameStack)):
trace_print('STACK', self.stack)
self.evaluate_typedef()
return
elif (len(self.nameStack) == 0):
debug_print( "trace" )
debug_print( "(Empty Stack)" )
return
elif (self.nameStack[0] == "namespace"):
#Taken care of outside of here
pass
elif len(self.nameStack) == 2 and self.nameStack[0] == "friend":#friend class declaration
pass
elif len(self.nameStack) >= 2 and self.nameStack[0] == 'using' and self.nameStack[1] == 'namespace': pass # TODO
elif is_enum_namestack(self.nameStack):
debug_print( "trace" )
self.evaluate_enum_stack()
elif self._method_body and (self.braceDepth + 1) > self._method_body: trace_print( 'INSIDE METHOD DEF' )
elif is_method_namestack(self.stack) and not self.curStruct and '(' in self.nameStack:
debug_print( "trace" )
if self.braceDepth > 0:
if "{" in self.stack and self.stack[0] != '{' and self.stack[-1] == ';' and self.braceDepth == 1:
#Special case of a method defined outside a class that has a body
pass
else:
self.evaluate_method_stack()
else:
#Free function
self.evaluate_method_stack()
elif (len(self.nameStack) == 1 and len(self.nameStackHistory) > self.braceDepth
and (self.nameStackHistory[self.braceDepth][0][0:2] == ["typedef", "struct"] or
self.nameStackHistory[self.braceDepth][0][0:2] == ["typedef", "union"])):
# Look for the name of a typedef struct: struct typedef {...] StructName; or unions to get renamed
debug_print("found the naming of a union")
type_name_to_rename = self.nameStackHistory[self.braceDepth][1]
new_name = self.nameStack[0]
type_to_rename = self.classes[type_name_to_rename]
type_to_rename["name"] = self.nameStack[0]
#Now re install it in its new location
self.classes[new_name] = type_to_rename
del self.classes[type_name_to_rename]
elif is_property_namestack(self.nameStack) and self.stack[-1] == ';':
debug_print( "trace" )
if self.nameStack[0] in ('class', 'struct') and len(self.stack) == 3: self.evalute_forward_decl()
elif len(self.nameStack) >= 2 and (self.nameStack[0]=='friend' and self.nameStack[1]=='class'): pass
else: self.evaluate_property_stack() # catches class props and structs in a namespace
elif self.nameStack[0] in ("class", "struct", "union") or self.nameStack[0] == 'typedef' and self.nameStack[1] in ('struct', 'union'):
#Parsing a union can reuse much of the class parsing
debug_print( "trace" )
self.evaluate_class_stack()
elif not self.curClass:
debug_print( "trace" )
if is_enum_namestack(self.nameStack): self.evaluate_enum_stack()
elif self.curStruct and self.stack[-1] == ';': self.evaluate_property_stack() # this catches fields of global structs
self.nameStack = []
doxygenCommentCache = ""
elif (self.braceDepth < 1):
debug_print( "trace" )
#Ignore global stuff for now
debug_print( "Global stuff: %s"%self.nameStack )
self.nameStack = []
doxygenCommentCache = ""
elif (self.braceDepth > len(self.nameSpaces) + 1):
debug_print( "trace" )
self.nameStack = []
doxygenCommentCache = ""
try:
self.nameStackHistory[self.braceDepth] = (nameStackCopy, self.curClass)
except:
self.nameStackHistory.append((nameStackCopy, self.curClass))
self.nameStack = [] # its a little confusing to have some if/else above return and others not, and then clearning the nameStack down here
doxygenCommentCache = ""
self.curTemplate = None
def evaluate_enum_stack(self):
"""Create an Enum out of the name stack"""
debug_print( "evaluating enum" )
newEnum = CppEnum(self.nameStack)
if len(list(newEnum.keys())):
if len(self.curClass):
newEnum["namespace"] = self.cur_namespace(False)
klass = self.classes[self.curClass]
klass["enums"][self.curAccessSpecifier].append(newEnum)
if self.curAccessSpecifier == 'public' and 'name' in newEnum: klass._public_enums[ newEnum['name'] ] = newEnum
else:
newEnum["namespace"] = self.cur_namespace(True)
self.enums.append(newEnum)
if 'name' in newEnum and newEnum['name']: self.global_enums[ newEnum['name'] ] = newEnum
#This enum has instances, turn them into properties
if "instances" in newEnum:
instanceType = "enum"
if "name" in newEnum:
instanceType = newEnum["name"]
for instance in newEnum["instances"]:
self.nameStack = [instanceType, instance]
self.evaluate_property_stack()
del newEnum["instances"]
def strip_parent_keys(self):
"""Strip all parent keys to prevent loops"""
obj_queue = [self]
while len(obj_queue):
obj = obj_queue.pop()
trace_print("pop %s type %s"%(obj, type(obj)))
try:
if "parent" in obj.keys():
del obj["parent"]
trace_print("Stripped parent from %s"%obj.keys())
except: pass
# Figure out what sub types are one of ours
try:
if not hasattr(obj, 'keys'):
obj = obj.__dict__
for k in obj.keys():
trace_print("-Try key %s"%(k))
trace_print("-type %s"%(type(obj[k])))
if k in ["nameStackHistory", "parent", "_public_typedefs"]: continue
if type(obj[k]) == list:
for i in obj[k]:
trace_print("push l %s"%i)
obj_queue.append(i)
elif type(obj[k]) == dict:
if len(obj):
trace_print("push d %s"%obj[k])
obj_queue.append(obj[k])
elif type(obj[k]) == type(type(0)):
if type(obj[k]) == int:
obj[k] = "int"
elif type(obj[k]) == str:
obj[k] = "string"
else:
obj[k] = "???"
trace_print("next key\n")
except:
trace_print("Exception")
def toJSON(self, indent=4):
"""Converts a parsed structure to JSON"""
import json
self.strip_parent_keys()
try:
del self.__dict__["classes_order"]
except: pass
return json.dumps(self.__dict__, indent=indent)
def __repr__(self):
rtn = {
"classes": self.classes,
"functions": self.functions,
"enums": self.enums,
"variables": self.variables,
}
return repr(rtn)
def __str__(self):
rtn = ""
for className in list(self.classes.keys()):
rtn += "%s\n"%self.classes[className]
if self.functions:
rtn += "// functions\n"
for f in self.functions:
rtn += "%s\n"%f
if self.variables:
rtn += "// variables\n"
for f in self.variables:
rtn += "%s\n"%f
if self.enums:
rtn += "// enums\n"
for f in self.enums:
rtn += "%s\n"%f
return rtn
| inQuotes = False |
test_swift_store.py | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests the Swift backend store"""
import copy
import fixtures
import hashlib
import httplib
import mock
import tempfile
import uuid
from oslo.config import cfg
from oslotest import moxstubout
import six
import stubout
import swiftclient
from glance_store._drivers.swift import store as swift
from glance_store._drivers.swift import utils as sutils
from glance_store import backend
from glance_store import BackendException
from glance_store.common import auth
from glance_store import exceptions
from glance_store.location import get_location_from_uri
from glance_store.openstack.common import context
from glance_store.openstack.common import units
from glance_store.tests import base
CONF = cfg.CONF
FAKE_UUID = lambda: str(uuid.uuid4())
Store = swift.Store
FIVE_KB = 5 * units.Ki
FIVE_GB = 5 * units.Gi
MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_PUT_OBJECT_CALLS = 0
SWIFT_CONF = {'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance',
'swift_store_user': 'user',
'swift_store_key': 'key',
'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance',
'swift_store_retry_get_count': 1,
'default_swift_reference': 'ref1'
}
# We stub out as little as possible to ensure that the code paths
# between swift and swiftclient are tested
# thoroughly
def stub_out_swiftclient(stubs, swift_store_auth_version):
fixture_containers = ['glance']
fixture_container_headers = {}
fixture_headers = {
'glance/%s' % FAKE_UUID: {
'content-length': FIVE_KB,
'etag': 'c2e5db72bd7fd153f53ede5da5a06de3'
}
}
fixture_objects = {'glance/%s' % FAKE_UUID:
six.StringIO("*" * FIVE_KB)}
def fake_head_container(url, token, container, **kwargs):
if container not in fixture_containers:
msg = "No container %s found" % container
raise swiftclient.ClientException(msg,
http_status=httplib.NOT_FOUND)
return fixture_container_headers
def fake_put_container(url, token, container, **kwargs):
fixture_containers.append(container)
def fake_post_container(url, token, container, headers, http_conn=None):
for key, value in six.iteritems(headers):
fixture_container_headers[key] = value
def fake_put_object(url, token, container, name, contents, **kwargs):
# PUT returns the ETag header for the newly-added object
# Large object manifest...
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS += 1
CHUNKSIZE = 64 * units.Ki
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
if kwargs.get('headers'):
etag = kwargs['headers']['ETag']
fixture_headers[fixture_key] = {'manifest': True,
'etag': etag}
return etag
if hasattr(contents, 'read'):
fixture_object = six.StringIO()
chunk = contents.read(CHUNKSIZE)
checksum = hashlib.md5()
while chunk:
fixture_object.write(chunk)
checksum.update(chunk)
chunk = contents.read(CHUNKSIZE)
etag = checksum.hexdigest()
else:
fixture_object = six.StringIO(contents)
etag = hashlib.md5(fixture_object.getvalue()).hexdigest()
read_len = fixture_object.len
if read_len > MAX_SWIFT_OBJECT_SIZE:
msg = ('Image size:%d exceeds Swift max:%d' %
(read_len, MAX_SWIFT_OBJECT_SIZE))
raise swiftclient.ClientException(
msg, http_status=httplib.REQUEST_ENTITY_TOO_LARGE)
fixture_objects[fixture_key] = fixture_object
fixture_headers[fixture_key] = {
'content-length': read_len,
'etag': etag}
return etag
else:
msg = ("Object PUT failed - Object with key %s already exists"
% fixture_key)
raise swiftclient.ClientException(msg,
http_status=httplib.CONFLICT)
def fake_get_object(url, token, container, name, **kwargs):
# GET returns the tuple (list of headers, file object)
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
msg = "Object GET failed"
raise swiftclient.ClientException(msg,
http_status=httplib.NOT_FOUND)
byte_range = None
headers = kwargs.get('headers', dict())
if headers is not None:
headers = dict((k.lower(), v) for k, v in six.iteritems(headers))
if 'range' in headers:
byte_range = headers.get('range')
fixture = fixture_headers[fixture_key]
if 'manifest' in fixture:
# Large object manifest... we return a file containing
# all objects with prefix of this fixture key
chunk_keys = sorted([k for k in fixture_headers.keys()
if k.startswith(fixture_key) and
k != fixture_key])
result = six.StringIO()
for key in chunk_keys:
result.write(fixture_objects[key].getvalue())
else:
result = fixture_objects[fixture_key]
if byte_range is not None:
start = int(byte_range.split('=')[1].strip('-'))
result = six.StringIO(result.getvalue()[start:])
fixture_headers[fixture_key]['content-length'] = len(
result.getvalue())
return fixture_headers[fixture_key], result
def fake_head_object(url, token, container, name, **kwargs):
# HEAD returns the list of headers for an object
try:
fixture_key = "%s/%s" % (container, name)
return fixture_headers[fixture_key]
except KeyError:
msg = "Object HEAD failed - Object does not exist"
raise swiftclient.ClientException(msg,
http_status=httplib.NOT_FOUND)
def fake_delete_object(url, token, container, name, **kwargs):
# DELETE returns nothing
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
msg = "Object DELETE failed - Object does not exist"
raise swiftclient.ClientException(msg,
http_status=httplib.NOT_FOUND)
else:
del fixture_headers[fixture_key]
del fixture_objects[fixture_key]
def fake_http_connection(*args, **kwargs):
return None
def fake_get_auth(url, user, key, snet, auth_version, **kwargs):
if url is None:
return None, None
if 'http' in url and '://' not in url:
raise ValueError('Invalid url %s' % url)
# Check the auth version against the configured value
if swift_store_auth_version != auth_version:
msg = 'AUTHENTICATION failed (version mismatch)'
raise swiftclient.ClientException(msg)
return None, None
stubs.Set(swiftclient.client,
'head_container', fake_head_container)
stubs.Set(swiftclient.client,
'put_container', fake_put_container)
stubs.Set(swiftclient.client,
'post_container', fake_post_container)
stubs.Set(swiftclient.client,
'put_object', fake_put_object)
stubs.Set(swiftclient.client,
'delete_object', fake_delete_object)
stubs.Set(swiftclient.client,
'head_object', fake_head_object)
stubs.Set(swiftclient.client,
'get_object', fake_get_object)
stubs.Set(swiftclient.client,
'get_auth', fake_get_auth)
stubs.Set(swiftclient.client,
'http_connection', fake_http_connection)
class SwiftTests(object):
@property
def swift_store_user(self):
return 'tenant:user1'
def test_get_size(self):
"""
Test that we can get the size of an object in the swift store
"""
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = get_location_from_uri(uri)
image_size = self.store.get_size(loc)
self.assertEqual(image_size, 5120)
def test_validate_location_for_invalid_uri(self):
"""
Test that validate location raises when the location contains
any account reference.
"""
uri = "swift+config://store_1/glance/%s"
self.assertRaises(exceptions.BadStoreUri,
self.store.validate_location,
uri)
def test_validate_location_for_valid_uri(self):
"""
Test that validate location verifies that the location does not
contain any account reference
"""
uri = "swift://user:key@auth_address/glance/%s"
try:
self.assertIsNone(self.store.validate_location(uri))
except Exception:
self.fail('Location uri validation failed')
def test_get_size_with_multi_tenant_on(self):
"""Test that single tenant uris work with multi tenant on."""
uri = ("swift://%s:key@auth_address/glance/%s" %
(self.swift_store_user, FAKE_UUID))
self.config(swift_store_multi_tenant=True)
#NOTE(markwash): ensure the image is found
size = backend.get_size_from_backend(uri, context={})
self.assertEqual(size, 5120)
def test_get(self):
"""Test a "normal" retrieval of an image in chunks"""
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = get_location_from_uri(uri)
(image_swift, image_size) = self.store.get(loc)
self.assertEqual(image_size, 5120)
expected_data = "*" * FIVE_KB
data = ""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_with_retry(self):
"""
Test a retrieval where Swift does not get the full image in a single
request.
"""
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = get_location_from_uri(uri)
ctxt = context.RequestContext()
(image_swift, image_size) = self.store.get(loc, context=ctxt)
resp_full = ''.join([chunk for chunk in image_swift.wrapped])
resp_half = resp_full[:len(resp_full) / 2]
image_swift.wrapped = swift.swift_retry_iter(resp_half, image_size,
self.store,
loc.store_location,
ctxt)
self.assertEqual(image_size, 5120)
expected_data = "*" * FIVE_KB
data = ""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_with_http_auth(self):
"""
Test a retrieval from Swift with an HTTP authurl. This is
specified either via a Location header with swift+http:// or using
http:// in the swift_store_auth_address config value
"""
loc = get_location_from_uri("swift+http://%s:key@auth_address/"
"glance/%s" %
(self.swift_store_user, FAKE_UUID))
ctxt = context.RequestContext()
(image_swift, image_size) = self.store.get(loc, context=ctxt)
self.assertEqual(image_size, 5120)
expected_data = "*" * FIVE_KB
data = ""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_non_existing(self):
"""
Test that trying to retrieve a swift that doesn't exist
raises an error
"""
loc = get_location_from_uri("swift://%s:key@authurl/glance/noexist" % (
self.swift_store_user))
self.assertRaises(exceptions.NotFound,
self.store.get,
loc)
def test_add(self):
"""Test that we can add an image via the swift backend"""
sutils.is_multiple_swift_store_accounts_enabled = \
mock.Mock(return_value=False)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = "swift+https://tenant%%3Auser1:key@localhost:8080/glance/%s"
expected_location = loc % (expected_image_id)
image_swift = six.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
location, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# Expecting a single object to be created on Swift i.e. no chunking.
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = get_location_from_uri(expected_location)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = ''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_multi_store(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['default_swift_reference'] = 'store_2'
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_image_id = str(uuid.uuid4())
image_swift = six.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
loc = 'swift+config://store_2/glance/%s'
expected_location = loc % (expected_image_id)
location, size, checksum, arg = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, location)
def test_add_auth_url_variations(self):
"""
Test that we can add an image via the swift backend with
a variety of different auth_address values
"""
sutils.is_multiple_swift_store_accounts_enabled = \
mock.Mock(return_value=True)
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
variations = {
'store_4': 'swift+config://store_4/glance/%s',
'store_5': 'swift+config://store_5/glance/%s',
'store_6': 'swift+config://store_6/glance/%s'
}
for variation, expected_location in variations.items():
image_id = str(uuid.uuid4())
expected_location = expected_location % image_id
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest()
image_swift = six.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf['default_swift_reference'] = variation
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
location, size, checksum, _ = self.store.add(image_id, image_swift,
expected_swift_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = get_location_from_uri(expected_location)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = ''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_no_container_no_create(self):
"""
Tests that adding an image with a non-existing container
raises an appropriate exception
"""
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = False
conf['swift_store_container'] = 'noexist'
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
image_swift = six.StringIO("nevergonnamakeit")
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
# We check the exception text to ensure the container
# missing text is found in it, otherwise, we would have
# simply used self.assertRaises here
exception_caught = False
try:
self.store.add(str(uuid.uuid4()), image_swift, 0)
except BackendException as e:
exception_caught = True
self.assertIn("container noexist does not exist "
"in Swift", unicode(e))
self.assertTrue(exception_caught)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 0)
def test_add_no_container_and_create(self):
"""
Tests that adding an image with a non-existing container
creates the container automatically if flag is set
"""
sutils.is_multiple_swift_store_accounts_enabled = \
mock.Mock(return_value=True)
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/noexist/%s'
expected_location = loc % (expected_image_id)
image_swift = six.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = True
conf['swift_store_container'] = 'noexist'
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
location, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_swift_size, size) | self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = get_location_from_uri(expected_location)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = ''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_large_object(self):
"""
Tests that adding a very large image. We simulate the large
object by setting store.large_object_size to a small number
and then verify that there have been a number of calls to
put_object()...
"""
sutils.is_multiple_swift_store_accounts_enabled = \
mock.Mock(return_value=True)
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
image_swift = six.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(self.conf)
self.store.configure()
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
try:
self.store.large_object_size = 1024
self.store.large_object_chunk_size = 1024
location, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
finally:
self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size
self.assertEqual(expected_location, location)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# Expecting 6 objects to be created on Swift -- 5 chunks and 1
# manifest.
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 6)
loc = get_location_from_uri(expected_location)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = ''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_contents)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_large_object_zero_size(self):
"""
Tests that adding an image to Swift which has both an unknown size and
exceeds Swift's maximum limit of 5GB is correctly uploaded.
We avoid the overhead of creating a 5GB object for this test by
temporarily setting MAX_SWIFT_OBJECT_SIZE to 1KB, and then adding
an object of 5KB.
Bug lp:891738
"""
# Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
image_swift = six.StringIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
# Temporarily set Swift MAX_SWIFT_OBJECT_SIZE to 1KB and add our image,
# explicitly setting the image_length to 0
self.store = Store(self.conf)
self.store.configure()
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
global MAX_SWIFT_OBJECT_SIZE
orig_max_swift_object_size = MAX_SWIFT_OBJECT_SIZE
try:
MAX_SWIFT_OBJECT_SIZE = 1024
self.store.large_object_size = 1024
self.store.large_object_chunk_size = 1024
location, size, checksum, _ = self.store.add(expected_image_id,
image_swift, 0)
finally:
self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size
MAX_SWIFT_OBJECT_SIZE = orig_max_swift_object_size
self.assertEqual(expected_location, location)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# Expecting 7 calls to put_object -- 5 chunks, a zero chunk which is
# then deleted, and the manifest. Note the difference with above
# where the image_size is specified in advance (there's no zero chunk
# in that case).
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 7)
loc = get_location_from_uri(expected_location)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = ''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_contents)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_already_existing(self):
"""
Tests that adding an image with an existing identifier
raises an appropriate exception
"""
image_swift = six.StringIO("nevergonnamakeit")
self.assertRaises(exceptions.Duplicate,
self.store.add,
FAKE_UUID, image_swift, 0)
def _option_required(self, key):
conf = self.getConfig()
conf[key] = None
try:
self.config(**conf)
self.store = Store(self.conf)
return self.store.add == self.store.add_disabled
except Exception:
return False
return False
def test_no_store_credentials(self):
"""
Tests that options without a valid credentials disables the add method
"""
swift.SWIFT_STORE_REF_PARAMS = {'ref1': {'auth_address':
'authurl.com', 'user': '',
'key': ''}}
self.store = Store(self.conf)
self.store.configure()
self.assertEqual(self.store.add, self.store.add_disabled)
def test_no_auth_address(self):
"""
Tests that options without auth address disables the add method
"""
swift.SWIFT_STORE_REF_PARAMS = {'ref1': {'auth_address':
'', 'user': 'user1',
'key': 'key1'}}
self.store = Store(self.conf)
self.store.configure()
self.assertEqual(self.store.add, self.store.add_disabled)
def test_delete(self):
"""
Test we can delete an existing image in the swift store
"""
uri = "swift://%s:key@authurl/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = get_location_from_uri(uri)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
def test_delete_with_reference_params(self):
"""
Test we can delete an existing image in the swift store
"""
uri = "swift+config://ref1/glance/%s" % (FAKE_UUID)
loc = get_location_from_uri(uri)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
def test_delete_non_existing(self):
"""
Test that trying to delete a swift that doesn't exist
raises an error
"""
loc = get_location_from_uri("swift://%s:key@authurl/glance/noexist" % (
self.swift_store_user))
self.assertRaises(exceptions.NotFound, self.store.delete, loc)
def test_read_acl_public(self):
"""
Test that we can set a public read acl.
"""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = get_location_from_uri(uri)
ctxt = context.RequestContext()
store.set_acls(loc, public=True, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Read'],
".r:*,.rlistings")
def test_read_acl_tenants(self):
"""
Test that we can set read acl for tenants.
"""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = get_location_from_uri(uri)
read_tenants = ['matt', 'mark']
ctxt = context.RequestContext()
store.set_acls(loc, read_tenants=read_tenants, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Read'],
'matt:*,mark:*')
def test_write_acls(self):
"""
Test that we can set write acl for tenants.
"""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = get_location_from_uri(uri)
read_tenants = ['frank', 'jim']
ctxt = context.RequestContext()
store.set_acls(loc, write_tenants=read_tenants, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Write'],
'frank:*,jim:*')
class TestStoreAuthV1(base.StoreBaseTest, SwiftTests):
_CONF = cfg.CONF
def getConfig(self):
conf = SWIFT_CONF.copy()
conf['swift_store_auth_version'] = '1'
conf['swift_store_user'] = 'tenant:user1'
return conf
def setUp(self):
"""Establish a clean test environment"""
super(TestStoreAuthV1, self).setUp()
conf = self.getConfig()
conf_file = 'glance-swift.conf'
self.swift_config_file = self.copy_data_file(conf_file, self.test_dir)
conf.update({'swift_store_config_file': self.swift_config_file})
self.stubs = stubout.StubOutForTesting()
stub_out_swiftclient(self.stubs, conf['swift_store_auth_version'])
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.addCleanup(self.stubs.UnsetAll)
self.register_store_schemes(self.store)
swift.SWIFT_STORE_REF_PARAMS = sutils.SwiftParams().params
self.addCleanup(self.conf.reset)
class TestStoreAuthV2(TestStoreAuthV1):
def getConfig(self):
conf = super(TestStoreAuthV2, self).getConfig()
conf['swift_store_auth_version'] = '2'
conf['swift_store_user'] = 'tenant:user1'
return conf
def test_v2_with_no_tenant(self):
uri = "swift://failme:key@auth_address/glance/%s" % (FAKE_UUID)
loc = get_location_from_uri(uri)
self.assertRaises(exceptions.BadStoreUri,
self.store.get,
loc)
def test_v2_multi_tenant_location(self):
conf = self.getConfig()
conf['swift_store_multi_tenant'] = True
uri = "swift://auth_address/glance/%s" % (FAKE_UUID)
loc = get_location_from_uri(uri)
self.assertEqual('swift', loc.store_name)
class FakeConnection(object):
def __init__(self, authurl, user, key, retries=5, preauthurl=None,
preauthtoken=None, snet=False, starting_backoff=1,
tenant_name=None, os_options=None, auth_version="1",
insecure=False, ssl_compression=True):
if os_options is None:
os_options = {}
self.authurl = authurl
self.user = user
self.key = key
self.preauthurl = preauthurl
self.preauthtoken = preauthtoken
self.snet = snet
self.tenant_name = tenant_name
self.os_options = os_options
self.auth_version = auth_version
self.insecure = insecure
class TestSingleTenantStoreConnections(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestSingleTenantStoreConnections, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
self.store = swift.SingleTenantStore(self.conf)
self.store.configure()
specs = {'scheme': 'swift',
'auth_or_store_url': 'example.com/v2/',
'user': 'tenant:user1',
'key': 'key1',
'container': 'cont',
'obj': 'object'}
self.location = swift.StoreLocation(specs)
self.addCleanup(self.conf.reset)
def test_basic_connection(self):
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertFalse(connection.snet)
self.assertEqual(connection.key, 'key1')
self.assertIsNone(connection.preauthurl)
self.assertIsNone(connection.preauthtoken)
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_no_trailing_slash(self):
self.location.auth_or_store_url = 'example.com/v2'
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
def test_connection_insecure(self):
self.config(swift_store_auth_insecure=True)
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertTrue(connection.insecure)
def test_connection_with_auth_v1(self):
self.config(swift_store_auth_version='1')
self.store.configure()
self.location.user = 'auth_v1_user'
connection = self.store.get_connection(self.location)
self.assertEqual(connection.auth_version, '1')
self.assertEqual(connection.user, 'auth_v1_user')
self.assertIsNone(connection.tenant_name)
def test_connection_invalid_user(self):
self.store.configure()
self.location.user = 'invalid:format:user'
self.assertRaises(exceptions.BadStoreUri,
self.store.get_connection, self.location)
def test_connection_missing_user(self):
self.store.configure()
self.location.user = None
self.assertRaises(exceptions.BadStoreUri,
self.store.get_connection, self.location)
def test_connection_with_region(self):
self.config(swift_store_region='Sahara')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'region_name': 'Sahara',
'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_service_type(self):
self.config(swift_store_service_type='shoe-store')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'service_type': 'shoe-store',
'endpoint_type': 'publicURL'})
def test_connection_with_endpoint_type(self):
self.config(swift_store_endpoint_type='internalURL')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'internalURL'})
def test_connection_with_snet(self):
self.config(swift_enable_snet=True)
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertTrue(connection.snet)
def test_bad_location_uri(self):
self.store.configure()
self.location.uri = 'http://bad_uri://'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_bad_location_uri_invalid_credentials(self):
self.store.configure()
self.location.uri = 'swift://bad_creds@uri/cont/obj'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_bad_location_uri_invalid_object_path(self):
self.store.configure()
self.location.uri = 'swift://user:key@uri/cont'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
class TestMultiTenantStoreConnections(base.StoreBaseTest):
def setUp(self):
super(TestMultiTenantStoreConnections, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
self.context = context.RequestContext(
user='tenant:user1', tenant='tenant', auth_token='0123')
self.store = swift.MultiTenantStore(self.conf)
specs = {'scheme': 'swift',
'auth_or_store_url': 'example.com',
'container': 'cont',
'obj': 'object'}
self.location = swift.StoreLocation(specs)
self.addCleanup(self.conf.reset)
def test_basic_connection(self):
self.store.configure()
connection = self.store.get_connection(self.location,
context=self.context)
self.assertIsNone(connection.authurl)
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'tenant:user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertIsNone(connection.key)
self.assertFalse(connection.snet)
self.assertEqual(connection.preauthurl, 'https://example.com')
self.assertEqual(connection.preauthtoken, '0123')
self.assertEqual(connection.os_options, {})
def test_connection_with_snet(self):
self.config(swift_enable_snet=True)
self.store.configure()
connection = self.store.get_connection(self.location,
context=self.context)
self.assertTrue(connection.snet)
class FakeGetEndpoint(object):
def __init__(self, response):
self.response = response
def __call__(self, service_catalog, service_type=None,
endpoint_region=None, endpoint_type=None):
self.service_type = service_type
self.endpoint_region = endpoint_region
self.endpoint_type = endpoint_type
return self.response
class TestCreatingLocations(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestCreatingLocations, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
conf = copy.deepcopy(SWIFT_CONF)
self.store = Store(self.conf)
self.config(**conf)
reload(swift)
self.addCleanup(self.conf.reset)
def test_single_tenant_location(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_container'] = 'container'
conf_file = "glance-swift.conf"
self.swift_config_file = self.copy_data_file(conf_file, self.test_dir)
conf.update({'swift_store_config_file': self.swift_config_file})
conf['default_swift_reference'] = 'ref1'
self.config(**conf)
reload(swift)
store = swift.SingleTenantStore(self.conf)
store.configure()
location = store.create_location('image-id')
self.assertEqual(location.scheme, 'swift+https')
self.assertEqual(location.swift_url, 'https://example.com')
self.assertEqual(location.container, 'container')
self.assertEqual(location.obj, 'image-id')
self.assertEqual(location.user, 'tenant:user1')
self.assertEqual(location.key, 'key1')
def test_single_tenant_location_http(self):
conf_file = "glance-swift.conf"
test_dir = self.useFixture(fixtures.TempDir()).path
self.swift_config_file = self.copy_data_file(conf_file, test_dir)
self.config(swift_store_container='container',
default_swift_reference='ref2',
swift_store_config_file=self.swift_config_file)
swift.SWIFT_STORE_REF_PARAMS = sutils.SwiftParams().params
store = swift.SingleTenantStore(self.conf)
store.configure()
location = store.create_location('image-id')
self.assertEqual(location.scheme, 'swift+http')
self.assertEqual(location.swift_url, 'http://example.com')
def test_multi_tenant_location(self):
self.config(swift_store_container='container')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = context.RequestContext(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location = store.create_location('image-id', context=ctxt)
self.assertEqual(location.scheme, 'swift+https')
self.assertEqual(location.swift_url, 'https://some_endpoint')
self.assertEqual(location.container, 'container_image-id')
self.assertEqual(location.obj, 'image-id')
self.assertIsNone(location.user)
self.assertIsNone(location.key)
self.assertEqual(fake_get_endpoint.service_type, 'object-store')
def test_multi_tenant_location_http(self):
fake_get_endpoint = FakeGetEndpoint('http://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = context.RequestContext(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location = store.create_location('image-id', context=ctxt)
self.assertEqual(location.scheme, 'swift+http')
self.assertEqual(location.swift_url, 'http://some_endpoint')
def test_multi_tenant_location_with_region(self):
self.config(swift_store_region='WestCarolina')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = context.RequestContext(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.endpoint_region, 'WestCarolina')
def test_multi_tenant_location_custom_service_type(self):
self.config(swift_store_service_type='toy-store')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = context.RequestContext(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.service_type, 'toy-store')
def test_multi_tenant_location_custom_endpoint_type(self):
self.config(swift_store_endpoint_type='InternalURL')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = context.RequestContext(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.endpoint_type, 'InternalURL')
class TestChunkReader(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestChunkReader, self).setUp()
conf = copy.deepcopy(SWIFT_CONF)
store = Store(self.conf)
self.config(**conf)
def test_read_all_data(self):
"""
Replicate what goes on in the Swift driver with the
repeated creation of the ChunkReader object
"""
CHUNKSIZE = 100
checksum = hashlib.md5()
data_file = tempfile.NamedTemporaryFile()
data_file.write('*' * units.Ki)
data_file.flush()
infile = open(data_file.name, 'rb')
bytes_read = 0
while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE)
bytes_read += len(chunk)
if not chunk:
break
self.assertEqual(1024, bytes_read)
data_file.close() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.